Compare commits
170 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| e29335e7e1 | |||
| defb8d080a | |||
| c6151931b9 | |||
| ce5e368f40 | |||
| 884d4b5d0e | |||
| f54fee0f72 | |||
| e8b436ef61 | |||
| 0e99594b70 | |||
| bd669b352d | |||
| 7437fdf535 | |||
| ee8e2213b0 | |||
| 15175a4780 | |||
| ee64d91eac | |||
| 91bf3cfc28 | |||
| 03559ade48 | |||
| 15730d40fc | |||
| 295869bef1 | |||
| 2fe1353f7a | |||
| f3d950d917 | |||
| ef51e7d07a | |||
| 5119db3cd2 | |||
| 9f3b2cddb1 | |||
| a53a5682d5 | |||
| 038e23b82c | |||
| 104aa19014 | |||
| 0faee8e913 | |||
| 3fa438f80e | |||
| 275602ce61 | |||
| 5887867e9b | |||
| 41de4ad91a | |||
| 9bef5de30d | |||
| 1c8ee5cb88 | |||
| 91d4f8b59b | |||
| 6b990bda52 | |||
| df738abccd | |||
| cc4127405b | |||
| 82945cfb16 | |||
| 408b09a1f2 | |||
| 03d285db26 | |||
| ade3fce0f6 | |||
| 6c99d5eca2 | |||
| 09e2daf282 | |||
| 9861931df1 | |||
| 24dddd56c5 | |||
| 05b037bdea | |||
| 3699ff6b48 | |||
| e0b1536075 | |||
| 8ac54534f4 | |||
| 3dd3859ebf | |||
| 9f229370e7 | |||
| 06db58771f | |||
| 9c6be5bfe7 | |||
| 152103bf78 | |||
| 9158e46bda | |||
| 5c8176b7ff | |||
| 3d779c2093 | |||
| a10ca93c42 | |||
| 4a373ee6af | |||
| 847058c56c | |||
| 5ab72cce63 | |||
| fb86841071 | |||
| 51b0767429 | |||
| a8f87c2625 | |||
| 5dc57da6b4 | |||
| 952c69f412 | |||
| 299a54a99b | |||
| d9f95f2285 | |||
| dd85d1fdd8 | |||
| 2ded9d6a73 | |||
| 97cb19eeb5 | |||
| caeff49aba | |||
| 6213b30f10 | |||
| 5d66f1f027 | |||
| 8f2394c256 | |||
| ed90ecf00e | |||
| f31e6e09ba | |||
| 8942b50872 | |||
| c84ab728a8 | |||
| 9935da86c6 | |||
| 4b8d73994f | |||
| e5d7f43c51 | |||
| 55be304e53 | |||
| 79736154db | |||
| 6c5b741bed | |||
| fa82f841c7 | |||
| 5807fe01e4 | |||
| e2076beb9f | |||
| f1d3f31c10 | |||
| 46e242e444 | |||
| 544f15523c | |||
| 037e6f06f5 | |||
| 8274251fe1 | |||
| 63631b5b2b | |||
| d0f434b672 | |||
| be22c38161 | |||
| 65335bc7d4 | |||
| 0e0f0925e4 | |||
| 057d7dacdc | |||
| 6b72ef8b18 | |||
| 0c2eca94f5 | |||
| 05f8f67ced | |||
| 156f985fb0 | |||
| 09f69ef74f | |||
| 83c4611293 | |||
| 23a44d10ac | |||
| 1783ee13ab | |||
| 33fbb174d5 | |||
| 840afb225b | |||
| 3f49e28308 | |||
| 5fdc4185c7 | |||
| a7c9e623fe | |||
| dd99d40ad6 | |||
| 815e58e872 | |||
| b60ae0a0c4 | |||
| d17dd5d787 | |||
| c1357d4e27 | |||
| 2ec9be2203 | |||
| b48e367c58 | |||
| bb5884467d | |||
| a360785199 | |||
| 2b4485575c | |||
| e1619daacc | |||
| 2880582cc9 | |||
| 091b5c88d6 | |||
| 039fa89481 | |||
| ffcb7a1693 | |||
| 4d1003eace | |||
| 0d2ccacacd | |||
| c517aabe43 | |||
| 1f71c2c129 | |||
| d93629bcde | |||
| d63bb2ce2f | |||
| d2c286d9b0 | |||
| e5c8c9bdaf | |||
| 76d6e13185 | |||
| f84a789b40 | |||
| 41420aea3c | |||
| 29b4cfc55b | |||
| 79f5cb8b9a | |||
| 4e53d8b9c2 | |||
| 230df3eadf | |||
| 94c76b97bd | |||
| ebcfae27a2 | |||
| 0993dcfef7 | |||
| f05ccfe525 | |||
| b68c648476 | |||
| e89c7cdcef | |||
| b92a0f428c | |||
| cf8d7665e3 | |||
| 4cabb9528e | |||
| 29aa1cc572 | |||
| 30f81edbce | |||
| a191ff9aa2 | |||
| f68495aa6e | |||
| 0765970d49 | |||
| f7388f3dfc | |||
| 7328fa5c08 | |||
| c6c9fa1e39 | |||
| 3eb6e1cc97 | |||
| b05033897a | |||
| 7c3e24f3be | |||
| 3afab824f5 | |||
| b3f1492f14 | |||
| 6b61c8a32a | |||
| 718f712c18 | |||
| f23a6a1140 | |||
| 0fbe21e574 | |||
| 3a56ea56a7 | |||
| 257fb76882 | |||
| 98262d8fb2 |
@@ -10,8 +10,12 @@ install_devcontainer_cli() {
|
||||
|
||||
install_ssh_config() {
|
||||
echo "🔑 Installing SSH configuration..."
|
||||
rsync -a /mnt/home/coder/.ssh/ ~/.ssh/
|
||||
chmod 0700 ~/.ssh
|
||||
if [ -d /mnt/home/coder/.ssh ]; then
|
||||
rsync -a /mnt/home/coder/.ssh/ ~/.ssh/
|
||||
chmod 0700 ~/.ssh
|
||||
else
|
||||
echo "⚠️ SSH directory not found."
|
||||
fi
|
||||
}
|
||||
|
||||
install_git_config() {
|
||||
|
||||
@@ -26,5 +26,6 @@ ignorePatterns:
|
||||
- pattern: "claude.ai"
|
||||
- pattern: "splunk.com"
|
||||
- pattern: "stackoverflow.com/questions"
|
||||
- pattern: "developer.hashicorp.com/terraform/language"
|
||||
aliveStatusCodes:
|
||||
- 200
|
||||
|
||||
@@ -0,0 +1,3 @@
|
||||
node_modules/
|
||||
*.log
|
||||
.DS_Store
|
||||
@@ -0,0 +1,86 @@
|
||||
name: "Create Coder Task"
|
||||
description: "Create a Coder task for a GitHub user, with support for issue commenting"
|
||||
|
||||
inputs:
|
||||
# Required: Coder configuration
|
||||
coder-url:
|
||||
description: "Coder deployment URL"
|
||||
required: true
|
||||
|
||||
coder-token:
|
||||
description: "Coder session token for authentication"
|
||||
required: true
|
||||
|
||||
# Required: Task configuration
|
||||
template-name:
|
||||
description: "Coder template to use for workspace"
|
||||
required: true
|
||||
|
||||
task-prompt:
|
||||
description: "Prompt/instructions to send to the task"
|
||||
required: true
|
||||
|
||||
# Optional: User identification
|
||||
github-user-id:
|
||||
description: "GitHub user ID (defaults to event sender)"
|
||||
required: false
|
||||
|
||||
github-username:
|
||||
description: "GitHub username (defaults to event sender)"
|
||||
required: false
|
||||
|
||||
# Optional: Task configuration
|
||||
template-preset:
|
||||
description: "Template preset to use"
|
||||
required: false
|
||||
default: "Default"
|
||||
|
||||
task-name-prefix:
|
||||
description: "Prefix for task name"
|
||||
required: false
|
||||
default: "task"
|
||||
|
||||
task-name:
|
||||
description: "Full task name (overrides auto-generation)"
|
||||
required: false
|
||||
|
||||
organization:
|
||||
description: "Coder organization name"
|
||||
required: false
|
||||
default: "coder"
|
||||
|
||||
# Optional: Issue integration
|
||||
issue-url:
|
||||
description: "GitHub issue URL to comment on"
|
||||
required: false
|
||||
|
||||
comment-on-issue:
|
||||
description: "Whether to comment on the issue"
|
||||
required: false
|
||||
default: "true"
|
||||
|
||||
coder-web-url:
|
||||
description: "Coder web UI URL for task links (defaults to coder-url)"
|
||||
required: false
|
||||
|
||||
# GitHub token for API operations
|
||||
github-token:
|
||||
description: "GitHub token for commenting on issues"
|
||||
required: true
|
||||
|
||||
outputs:
|
||||
coder-username:
|
||||
description: "The Coder username resolved from GitHub user"
|
||||
|
||||
task-name:
|
||||
description: "The full task name (username/task-name)"
|
||||
|
||||
task-url:
|
||||
description: "The URL to view the task in Coder"
|
||||
|
||||
task-exists:
|
||||
description: "Whether the task already existed (true/false)"
|
||||
|
||||
runs:
|
||||
using: "node20"
|
||||
main: "dist/index.js"
|
||||
@@ -0,0 +1,158 @@
|
||||
{
|
||||
"lockfileVersion": 1,
|
||||
"workspaces": {
|
||||
"": {
|
||||
"name": "coder-task-action",
|
||||
"dependencies": {
|
||||
"@actions/core": "^1.10.1",
|
||||
"@actions/github": "^6.0.0",
|
||||
"@octokit/rest": "^21.1.1",
|
||||
"zod": "^3.24.2",
|
||||
},
|
||||
"devDependencies": {
|
||||
"@biomejs/biome": "2.2.4",
|
||||
"@types/bun": "latest",
|
||||
"typescript": "^5.0.0",
|
||||
},
|
||||
},
|
||||
},
|
||||
"packages": {
|
||||
"@actions/core": ["@actions/core@1.11.1", "", { "dependencies": { "@actions/exec": "^1.1.1", "@actions/http-client": "^2.0.1" } }, "sha512-hXJCSrkwfA46Vd9Z3q4cpEpHB1rL5NG04+/rbqW9d3+CSvtB1tYe8UTpAlixa1vj0m/ULglfEK2UKxMGxCxv5A=="],
|
||||
|
||||
"@actions/exec": ["@actions/exec@1.1.1", "", { "dependencies": { "@actions/io": "^1.0.1" } }, "sha512-+sCcHHbVdk93a0XT19ECtO/gIXoxvdsgQLzb2fE2/5sIZmWQuluYyjPQtrtTHdU1YzTZ7bAPN4sITq2xi1679w=="],
|
||||
|
||||
"@actions/github": ["@actions/github@6.0.1", "", { "dependencies": { "@actions/http-client": "^2.2.0", "@octokit/core": "^5.0.1", "@octokit/plugin-paginate-rest": "^9.2.2", "@octokit/plugin-rest-endpoint-methods": "^10.4.0", "@octokit/request": "^8.4.1", "@octokit/request-error": "^5.1.1", "undici": "^5.28.5" } }, "sha512-xbZVcaqD4XnQAe35qSQqskb3SqIAfRyLBrHMd/8TuL7hJSz2QtbDwnNM8zWx4zO5l2fnGtseNE3MbEvD7BxVMw=="],
|
||||
|
||||
"@actions/http-client": ["@actions/http-client@2.2.3", "", { "dependencies": { "tunnel": "^0.0.6", "undici": "^5.25.4" } }, "sha512-mx8hyJi/hjFvbPokCg4uRd4ZX78t+YyRPtnKWwIl+RzNaVuFpQHfmlGVfsKEJN8LwTCvL+DfVgAM04XaHkm6bA=="],
|
||||
|
||||
"@actions/io": ["@actions/io@1.1.3", "", {}, "sha512-wi9JjgKLYS7U/z8PPbco+PvTb/nRWjeoFlJ1Qer83k/3C5PHQi28hiVdeE2kHXmIL99mQFawx8qt/JPjZilJ8Q=="],
|
||||
|
||||
"@biomejs/biome": ["@biomejs/biome@2.2.4", "", { "optionalDependencies": { "@biomejs/cli-darwin-arm64": "2.2.4", "@biomejs/cli-darwin-x64": "2.2.4", "@biomejs/cli-linux-arm64": "2.2.4", "@biomejs/cli-linux-arm64-musl": "2.2.4", "@biomejs/cli-linux-x64": "2.2.4", "@biomejs/cli-linux-x64-musl": "2.2.4", "@biomejs/cli-win32-arm64": "2.2.4", "@biomejs/cli-win32-x64": "2.2.4" }, "bin": { "biome": "bin/biome" } }, "sha512-TBHU5bUy/Ok6m8c0y3pZiuO/BZoY/OcGxoLlrfQof5s8ISVwbVBdFINPQZyFfKwil8XibYWb7JMwnT8wT4WVPg=="],
|
||||
|
||||
"@biomejs/cli-darwin-arm64": ["@biomejs/cli-darwin-arm64@2.2.4", "", { "os": "darwin", "cpu": "arm64" }, "sha512-RJe2uiyaloN4hne4d2+qVj3d3gFJFbmrr5PYtkkjei1O9c+BjGXgpUPVbi8Pl8syumhzJjFsSIYkcLt2VlVLMA=="],
|
||||
|
||||
"@biomejs/cli-darwin-x64": ["@biomejs/cli-darwin-x64@2.2.4", "", { "os": "darwin", "cpu": "x64" }, "sha512-cFsdB4ePanVWfTnPVaUX+yr8qV8ifxjBKMkZwN7gKb20qXPxd/PmwqUH8mY5wnM9+U0QwM76CxFyBRJhC9tQwg=="],
|
||||
|
||||
"@biomejs/cli-linux-arm64": ["@biomejs/cli-linux-arm64@2.2.4", "", { "os": "linux", "cpu": "arm64" }, "sha512-M/Iz48p4NAzMXOuH+tsn5BvG/Jb07KOMTdSVwJpicmhN309BeEyRyQX+n1XDF0JVSlu28+hiTQ2L4rZPvu7nMw=="],
|
||||
|
||||
"@biomejs/cli-linux-arm64-musl": ["@biomejs/cli-linux-arm64-musl@2.2.4", "", { "os": "linux", "cpu": "arm64" }, "sha512-7TNPkMQEWfjvJDaZRSkDCPT/2r5ESFPKx+TEev+I2BXDGIjfCZk2+b88FOhnJNHtksbOZv8ZWnxrA5gyTYhSsQ=="],
|
||||
|
||||
"@biomejs/cli-linux-x64": ["@biomejs/cli-linux-x64@2.2.4", "", { "os": "linux", "cpu": "x64" }, "sha512-orr3nnf2Dpb2ssl6aihQtvcKtLySLta4E2UcXdp7+RTa7mfJjBgIsbS0B9GC8gVu0hjOu021aU8b3/I1tn+pVQ=="],
|
||||
|
||||
"@biomejs/cli-linux-x64-musl": ["@biomejs/cli-linux-x64-musl@2.2.4", "", { "os": "linux", "cpu": "x64" }, "sha512-m41nFDS0ksXK2gwXL6W6yZTYPMH0LughqbsxInSKetoH6morVj43szqKx79Iudkp8WRT5SxSh7qVb8KCUiewGg=="],
|
||||
|
||||
"@biomejs/cli-win32-arm64": ["@biomejs/cli-win32-arm64@2.2.4", "", { "os": "win32", "cpu": "arm64" }, "sha512-NXnfTeKHDFUWfxAefa57DiGmu9VyKi0cDqFpdI+1hJWQjGJhJutHPX0b5m+eXvTKOaf+brU+P0JrQAZMb5yYaQ=="],
|
||||
|
||||
"@biomejs/cli-win32-x64": ["@biomejs/cli-win32-x64@2.2.4", "", { "os": "win32", "cpu": "x64" }, "sha512-3Y4V4zVRarVh/B/eSHczR4LYoSVyv3Dfuvm3cWs5w/HScccS0+Wt/lHOcDTRYeHjQmMYVC3rIRWqyN2EI52+zg=="],
|
||||
|
||||
"@fastify/busboy": ["@fastify/busboy@2.1.1", "", {}, "sha512-vBZP4NlzfOlerQTnba4aqZoMhE/a9HY7HRqoOPaETQcSQuWEIyZMHGfVu6w9wGtGK5fED5qRs2DteVCjOH60sA=="],
|
||||
|
||||
"@octokit/auth-token": ["@octokit/auth-token@4.0.0", "", {}, "sha512-tY/msAuJo6ARbK6SPIxZrPBms3xPbfwBrulZe0Wtr/DIY9lje2HeV1uoebShn6mx7SjCHif6EjMvoREj+gZ+SA=="],
|
||||
|
||||
"@octokit/core": ["@octokit/core@5.2.2", "", { "dependencies": { "@octokit/auth-token": "^4.0.0", "@octokit/graphql": "^7.1.0", "@octokit/request": "^8.4.1", "@octokit/request-error": "^5.1.1", "@octokit/types": "^13.0.0", "before-after-hook": "^2.2.0", "universal-user-agent": "^6.0.0" } }, "sha512-/g2d4sW9nUDJOMz3mabVQvOGhVa4e/BN/Um7yca9Bb2XTzPPnfTWHWQg+IsEYO7M3Vx+EXvaM/I2pJWIMun1bg=="],
|
||||
|
||||
"@octokit/endpoint": ["@octokit/endpoint@9.0.6", "", { "dependencies": { "@octokit/types": "^13.1.0", "universal-user-agent": "^6.0.0" } }, "sha512-H1fNTMA57HbkFESSt3Y9+FBICv+0jFceJFPWDePYlR/iMGrwM5ph+Dd4XRQs+8X+PUFURLQgX9ChPfhJ/1uNQw=="],
|
||||
|
||||
"@octokit/graphql": ["@octokit/graphql@7.1.1", "", { "dependencies": { "@octokit/request": "^8.4.1", "@octokit/types": "^13.0.0", "universal-user-agent": "^6.0.0" } }, "sha512-3mkDltSfcDUoa176nlGoA32RGjeWjl3K7F/BwHwRMJUW/IteSa4bnSV8p2ThNkcIcZU2umkZWxwETSSCJf2Q7g=="],
|
||||
|
||||
"@octokit/openapi-types": ["@octokit/openapi-types@24.2.0", "", {}, "sha512-9sIH3nSUttelJSXUrmGzl7QUBFul0/mB8HRYl3fOlgHbIWG+WnYDXU3v/2zMtAvuzZ/ed00Ei6on975FhBfzrg=="],
|
||||
|
||||
"@octokit/plugin-paginate-rest": ["@octokit/plugin-paginate-rest@9.2.2", "", { "dependencies": { "@octokit/types": "^12.6.0" }, "peerDependencies": { "@octokit/core": "5" } }, "sha512-u3KYkGF7GcZnSD/3UP0S7K5XUFT2FkOQdcfXZGZQPGv3lm4F2Xbf71lvjldr8c1H3nNbF+33cLEkWYbokGWqiQ=="],
|
||||
|
||||
"@octokit/plugin-request-log": ["@octokit/plugin-request-log@5.3.1", "", { "peerDependencies": { "@octokit/core": ">=6" } }, "sha512-n/lNeCtq+9ofhC15xzmJCNKP2BWTv8Ih2TTy+jatNCCq/gQP/V7rK3fjIfuz0pDWDALO/o/4QY4hyOF6TQQFUw=="],
|
||||
|
||||
"@octokit/plugin-rest-endpoint-methods": ["@octokit/plugin-rest-endpoint-methods@10.4.1", "", { "dependencies": { "@octokit/types": "^12.6.0" }, "peerDependencies": { "@octokit/core": "5" } }, "sha512-xV1b+ceKV9KytQe3zCVqjg+8GTGfDYwaT1ATU5isiUyVtlVAO3HNdzpS4sr4GBx4hxQ46s7ITtZrAsxG22+rVg=="],
|
||||
|
||||
"@octokit/request": ["@octokit/request@8.4.1", "", { "dependencies": { "@octokit/endpoint": "^9.0.6", "@octokit/request-error": "^5.1.1", "@octokit/types": "^13.1.0", "universal-user-agent": "^6.0.0" } }, "sha512-qnB2+SY3hkCmBxZsR/MPCybNmbJe4KAlfWErXq+rBKkQJlbjdJeS85VI9r8UqeLYLvnAenU8Q1okM/0MBsAGXw=="],
|
||||
|
||||
"@octokit/request-error": ["@octokit/request-error@5.1.1", "", { "dependencies": { "@octokit/types": "^13.1.0", "deprecation": "^2.0.0", "once": "^1.4.0" } }, "sha512-v9iyEQJH6ZntoENr9/yXxjuezh4My67CBSu9r6Ve/05Iu5gNgnisNWOsoJHTP6k0Rr0+HQIpnH+kyammu90q/g=="],
|
||||
|
||||
"@octokit/rest": ["@octokit/rest@21.1.1", "", { "dependencies": { "@octokit/core": "^6.1.4", "@octokit/plugin-paginate-rest": "^11.4.2", "@octokit/plugin-request-log": "^5.3.1", "@octokit/plugin-rest-endpoint-methods": "^13.3.0" } }, "sha512-sTQV7va0IUVZcntzy1q3QqPm/r8rWtDCqpRAmb8eXXnKkjoQEtFe3Nt5GTVsHft+R6jJoHeSiVLcgcvhtue/rg=="],
|
||||
|
||||
"@octokit/types": ["@octokit/types@13.10.0", "", { "dependencies": { "@octokit/openapi-types": "^24.2.0" } }, "sha512-ifLaO34EbbPj0Xgro4G5lP5asESjwHracYJvVaPIyXMuiuXLlhic3S47cBdTb+jfODkTE5YtGCLt3Ay3+J97sA=="],
|
||||
|
||||
"@types/bun": ["@types/bun@1.3.0", "", { "dependencies": { "bun-types": "1.3.0" } }, "sha512-+lAGCYjXjip2qY375xX/scJeVRmZ5cY0wyHYyCYxNcdEXrQ4AOe3gACgd4iQ8ksOslJtW4VNxBJ8llUwc3a6AA=="],
|
||||
|
||||
"@types/node": ["@types/node@24.8.1", "", { "dependencies": { "undici-types": "~7.14.0" } }, "sha512-alv65KGRadQVfVcG69MuB4IzdYVpRwMG/mq8KWOaoOdyY617P5ivaDiMCGOFDWD2sAn5Q0mR3mRtUOgm99hL9Q=="],
|
||||
|
||||
"@types/react": ["@types/react@19.2.2", "", { "dependencies": { "csstype": "^3.0.2" } }, "sha512-6mDvHUFSjyT2B2yeNx2nUgMxh9LtOWvkhIU3uePn2I2oyNymUAX1NIsdgviM4CH+JSrp2D2hsMvJOkxY+0wNRA=="],
|
||||
|
||||
"before-after-hook": ["before-after-hook@2.2.3", "", {}, "sha512-NzUnlZexiaH/46WDhANlyR2bXRopNg4F/zuSA3OpZnllCUgRaOF2znDioDWrmbNVsuZk6l9pMquQB38cfBZwkQ=="],
|
||||
|
||||
"bun-types": ["bun-types@1.3.0", "", { "dependencies": { "@types/node": "*" }, "peerDependencies": { "@types/react": "^19" } }, "sha512-u8X0thhx+yJ0KmkxuEo9HAtdfgCBaM/aI9K90VQcQioAmkVp3SG3FkwWGibUFz3WdXAdcsqOcbU40lK7tbHdkQ=="],
|
||||
|
||||
"csstype": ["csstype@3.1.3", "", {}, "sha512-M1uQkMl8rQK/szD0LNhtqxIPLpimGm8sOBwU7lLnCpSbTyY3yeU1Vc7l4KT5zT4s/yOxHH5O7tIuuLOCnLADRw=="],
|
||||
|
||||
"deprecation": ["deprecation@2.3.1", "", {}, "sha512-xmHIy4F3scKVwMsQ4WnVaS8bHOx0DmVwRywosKhaILI0ywMDWPtBSku2HNxRvF7jtwDRsoEwYQSfbxj8b7RlJQ=="],
|
||||
|
||||
"fast-content-type-parse": ["fast-content-type-parse@2.0.1", "", {}, "sha512-nGqtvLrj5w0naR6tDPfB4cUmYCqouzyQiz6C5y/LtcDllJdrcc6WaWW6iXyIIOErTa/XRybj28aasdn4LkVk6Q=="],
|
||||
|
||||
"once": ["once@1.4.0", "", { "dependencies": { "wrappy": "1" } }, "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w=="],
|
||||
|
||||
"tunnel": ["tunnel@0.0.6", "", {}, "sha512-1h/Lnq9yajKY2PEbBadPXj3VxsDDu844OnaAo52UVmIzIvwwtBPIuNvkjuzBlTWpfJyUbG3ez0KSBibQkj4ojg=="],
|
||||
|
||||
"typescript": ["typescript@5.9.3", "", { "bin": { "tsc": "bin/tsc", "tsserver": "bin/tsserver" } }, "sha512-jl1vZzPDinLr9eUt3J/t7V6FgNEw9QjvBPdysz9KfQDD41fQrC2Y4vKQdiaUpFT4bXlb1RHhLpp8wtm6M5TgSw=="],
|
||||
|
||||
"undici": ["undici@5.29.0", "", { "dependencies": { "@fastify/busboy": "^2.0.0" } }, "sha512-raqeBD6NQK4SkWhQzeYKd1KmIG6dllBOTt55Rmkt4HtI9mwdWtJljnrXjAFUBLTSN67HWrOIZ3EPF4kjUw80Bg=="],
|
||||
|
||||
"undici-types": ["undici-types@7.14.0", "", {}, "sha512-QQiYxHuyZ9gQUIrmPo3IA+hUl4KYk8uSA7cHrcKd/l3p1OTpZcM0Tbp9x7FAtXdAYhlasd60ncPpgu6ihG6TOA=="],
|
||||
|
||||
"universal-user-agent": ["universal-user-agent@6.0.1", "", {}, "sha512-yCzhz6FN2wU1NiiQRogkTQszlQSlpWaw8SvVegAc+bDxbzHgh1vX8uIe8OYyMH6DwH+sdTJsgMl36+mSMdRJIQ=="],
|
||||
|
||||
"wrappy": ["wrappy@1.0.2", "", {}, "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ=="],
|
||||
|
||||
"zod": ["zod@3.25.76", "", {}, "sha512-gzUt/qt81nXsFGKIFcC3YnfEAx5NkunCfnDlvuBSSFS02bcXu4Lmea0AFIUwbLWxWPx3d9p8S5QoaujKcNQxcQ=="],
|
||||
|
||||
"@octokit/plugin-paginate-rest/@octokit/types": ["@octokit/types@12.6.0", "", { "dependencies": { "@octokit/openapi-types": "^20.0.0" } }, "sha512-1rhSOfRa6H9w4YwK0yrf5faDaDTb+yLyBUKOCV4xtCDB5VmIPqd/v9yr9o6SAzOAlRxMiRiCic6JVM1/kunVkw=="],
|
||||
|
||||
"@octokit/plugin-request-log/@octokit/core": ["@octokit/core@6.1.6", "", { "dependencies": { "@octokit/auth-token": "^5.0.0", "@octokit/graphql": "^8.2.2", "@octokit/request": "^9.2.3", "@octokit/request-error": "^6.1.8", "@octokit/types": "^14.0.0", "before-after-hook": "^3.0.2", "universal-user-agent": "^7.0.0" } }, "sha512-kIU8SLQkYWGp3pVKiYzA5OSaNF5EE03P/R8zEmmrG6XwOg5oBjXyQVVIauQ0dgau4zYhpZEhJrvIYt6oM+zZZA=="],
|
||||
|
||||
"@octokit/plugin-rest-endpoint-methods/@octokit/types": ["@octokit/types@12.6.0", "", { "dependencies": { "@octokit/openapi-types": "^20.0.0" } }, "sha512-1rhSOfRa6H9w4YwK0yrf5faDaDTb+yLyBUKOCV4xtCDB5VmIPqd/v9yr9o6SAzOAlRxMiRiCic6JVM1/kunVkw=="],
|
||||
|
||||
"@octokit/rest/@octokit/core": ["@octokit/core@6.1.6", "", { "dependencies": { "@octokit/auth-token": "^5.0.0", "@octokit/graphql": "^8.2.2", "@octokit/request": "^9.2.3", "@octokit/request-error": "^6.1.8", "@octokit/types": "^14.0.0", "before-after-hook": "^3.0.2", "universal-user-agent": "^7.0.0" } }, "sha512-kIU8SLQkYWGp3pVKiYzA5OSaNF5EE03P/R8zEmmrG6XwOg5oBjXyQVVIauQ0dgau4zYhpZEhJrvIYt6oM+zZZA=="],
|
||||
|
||||
"@octokit/rest/@octokit/plugin-paginate-rest": ["@octokit/plugin-paginate-rest@11.6.0", "", { "dependencies": { "@octokit/types": "^13.10.0" }, "peerDependencies": { "@octokit/core": ">=6" } }, "sha512-n5KPteiF7pWKgBIBJSk8qzoZWcUkza2O6A0za97pMGVrGfPdltxrfmfF5GucHYvHGZD8BdaZmmHGz5cX/3gdpw=="],
|
||||
|
||||
"@octokit/rest/@octokit/plugin-rest-endpoint-methods": ["@octokit/plugin-rest-endpoint-methods@13.5.0", "", { "dependencies": { "@octokit/types": "^13.10.0" }, "peerDependencies": { "@octokit/core": ">=6" } }, "sha512-9Pas60Iv9ejO3WlAX3maE1+38c5nqbJXV5GrncEfkndIpZrJ/WPMRd2xYDcPPEt5yzpxcjw9fWNoPhsSGzqKqw=="],
|
||||
|
||||
"@octokit/plugin-paginate-rest/@octokit/types/@octokit/openapi-types": ["@octokit/openapi-types@20.0.0", "", {}, "sha512-EtqRBEjp1dL/15V7WiX5LJMIxxkdiGJnabzYx5Apx4FkQIFgAfKumXeYAqqJCj1s+BMX4cPFIFC4OLCR6stlnA=="],
|
||||
|
||||
"@octokit/plugin-request-log/@octokit/core/@octokit/auth-token": ["@octokit/auth-token@5.1.2", "", {}, "sha512-JcQDsBdg49Yky2w2ld20IHAlwr8d/d8N6NiOXbtuoPCqzbsiJgF633mVUw3x4mo0H5ypataQIX7SFu3yy44Mpw=="],
|
||||
|
||||
"@octokit/plugin-request-log/@octokit/core/@octokit/graphql": ["@octokit/graphql@8.2.2", "", { "dependencies": { "@octokit/request": "^9.2.3", "@octokit/types": "^14.0.0", "universal-user-agent": "^7.0.0" } }, "sha512-Yi8hcoqsrXGdt0yObxbebHXFOiUA+2v3n53epuOg1QUgOB6c4XzvisBNVXJSl8RYA5KrDuSL2yq9Qmqe5N0ryA=="],
|
||||
|
||||
"@octokit/plugin-request-log/@octokit/core/@octokit/request": ["@octokit/request@9.2.4", "", { "dependencies": { "@octokit/endpoint": "^10.1.4", "@octokit/request-error": "^6.1.8", "@octokit/types": "^14.0.0", "fast-content-type-parse": "^2.0.0", "universal-user-agent": "^7.0.2" } }, "sha512-q8ybdytBmxa6KogWlNa818r0k1wlqzNC+yNkcQDECHvQo8Vmstrg18JwqJHdJdUiHD2sjlwBgSm9kHkOKe2iyA=="],
|
||||
|
||||
"@octokit/plugin-request-log/@octokit/core/@octokit/request-error": ["@octokit/request-error@6.1.8", "", { "dependencies": { "@octokit/types": "^14.0.0" } }, "sha512-WEi/R0Jmq+IJKydWlKDmryPcmdYSVjL3ekaiEL1L9eo1sUnqMJ+grqmC9cjk7CA7+b2/T397tO5d8YLOH3qYpQ=="],
|
||||
|
||||
"@octokit/plugin-request-log/@octokit/core/@octokit/types": ["@octokit/types@14.1.0", "", { "dependencies": { "@octokit/openapi-types": "^25.1.0" } }, "sha512-1y6DgTy8Jomcpu33N+p5w58l6xyt55Ar2I91RPiIA0xCJBXyUAhXCcmZaDWSANiha7R9a6qJJ2CRomGPZ6f46g=="],
|
||||
|
||||
"@octokit/plugin-request-log/@octokit/core/before-after-hook": ["before-after-hook@3.0.2", "", {}, "sha512-Nik3Sc0ncrMK4UUdXQmAnRtzmNQTAAXmXIopizwZ1W1t8QmfJj+zL4OA2I7XPTPW5z5TDqv4hRo/JzouDJnX3A=="],
|
||||
|
||||
"@octokit/plugin-request-log/@octokit/core/universal-user-agent": ["universal-user-agent@7.0.3", "", {}, "sha512-TmnEAEAsBJVZM/AADELsK76llnwcf9vMKuPz8JflO1frO8Lchitr0fNaN9d+Ap0BjKtqWqd/J17qeDnXh8CL2A=="],
|
||||
|
||||
"@octokit/plugin-rest-endpoint-methods/@octokit/types/@octokit/openapi-types": ["@octokit/openapi-types@20.0.0", "", {}, "sha512-EtqRBEjp1dL/15V7WiX5LJMIxxkdiGJnabzYx5Apx4FkQIFgAfKumXeYAqqJCj1s+BMX4cPFIFC4OLCR6stlnA=="],
|
||||
|
||||
"@octokit/rest/@octokit/core/@octokit/auth-token": ["@octokit/auth-token@5.1.2", "", {}, "sha512-JcQDsBdg49Yky2w2ld20IHAlwr8d/d8N6NiOXbtuoPCqzbsiJgF633mVUw3x4mo0H5ypataQIX7SFu3yy44Mpw=="],
|
||||
|
||||
"@octokit/rest/@octokit/core/@octokit/graphql": ["@octokit/graphql@8.2.2", "", { "dependencies": { "@octokit/request": "^9.2.3", "@octokit/types": "^14.0.0", "universal-user-agent": "^7.0.0" } }, "sha512-Yi8hcoqsrXGdt0yObxbebHXFOiUA+2v3n53epuOg1QUgOB6c4XzvisBNVXJSl8RYA5KrDuSL2yq9Qmqe5N0ryA=="],
|
||||
|
||||
"@octokit/rest/@octokit/core/@octokit/request": ["@octokit/request@9.2.4", "", { "dependencies": { "@octokit/endpoint": "^10.1.4", "@octokit/request-error": "^6.1.8", "@octokit/types": "^14.0.0", "fast-content-type-parse": "^2.0.0", "universal-user-agent": "^7.0.2" } }, "sha512-q8ybdytBmxa6KogWlNa818r0k1wlqzNC+yNkcQDECHvQo8Vmstrg18JwqJHdJdUiHD2sjlwBgSm9kHkOKe2iyA=="],
|
||||
|
||||
"@octokit/rest/@octokit/core/@octokit/request-error": ["@octokit/request-error@6.1.8", "", { "dependencies": { "@octokit/types": "^14.0.0" } }, "sha512-WEi/R0Jmq+IJKydWlKDmryPcmdYSVjL3ekaiEL1L9eo1sUnqMJ+grqmC9cjk7CA7+b2/T397tO5d8YLOH3qYpQ=="],
|
||||
|
||||
"@octokit/rest/@octokit/core/@octokit/types": ["@octokit/types@14.1.0", "", { "dependencies": { "@octokit/openapi-types": "^25.1.0" } }, "sha512-1y6DgTy8Jomcpu33N+p5w58l6xyt55Ar2I91RPiIA0xCJBXyUAhXCcmZaDWSANiha7R9a6qJJ2CRomGPZ6f46g=="],
|
||||
|
||||
"@octokit/rest/@octokit/core/before-after-hook": ["before-after-hook@3.0.2", "", {}, "sha512-Nik3Sc0ncrMK4UUdXQmAnRtzmNQTAAXmXIopizwZ1W1t8QmfJj+zL4OA2I7XPTPW5z5TDqv4hRo/JzouDJnX3A=="],
|
||||
|
||||
"@octokit/rest/@octokit/core/universal-user-agent": ["universal-user-agent@7.0.3", "", {}, "sha512-TmnEAEAsBJVZM/AADELsK76llnwcf9vMKuPz8JflO1frO8Lchitr0fNaN9d+Ap0BjKtqWqd/J17qeDnXh8CL2A=="],
|
||||
|
||||
"@octokit/plugin-request-log/@octokit/core/@octokit/request/@octokit/endpoint": ["@octokit/endpoint@10.1.4", "", { "dependencies": { "@octokit/types": "^14.0.0", "universal-user-agent": "^7.0.2" } }, "sha512-OlYOlZIsfEVZm5HCSR8aSg02T2lbUWOsCQoPKfTXJwDzcHQBrVBGdGXb89dv2Kw2ToZaRtudp8O3ZIYoaOjKlA=="],
|
||||
|
||||
"@octokit/plugin-request-log/@octokit/core/@octokit/types/@octokit/openapi-types": ["@octokit/openapi-types@25.1.0", "", {}, "sha512-idsIggNXUKkk0+BExUn1dQ92sfysJrje03Q0bv0e+KPLrvyqZF8MnBpFz8UNfYDwB3Ie7Z0TByjWfzxt7vseaA=="],
|
||||
|
||||
"@octokit/rest/@octokit/core/@octokit/request/@octokit/endpoint": ["@octokit/endpoint@10.1.4", "", { "dependencies": { "@octokit/types": "^14.0.0", "universal-user-agent": "^7.0.2" } }, "sha512-OlYOlZIsfEVZm5HCSR8aSg02T2lbUWOsCQoPKfTXJwDzcHQBrVBGdGXb89dv2Kw2ToZaRtudp8O3ZIYoaOjKlA=="],
|
||||
|
||||
"@octokit/rest/@octokit/core/@octokit/types/@octokit/openapi-types": ["@octokit/openapi-types@25.1.0", "", {}, "sha512-idsIggNXUKkk0+BExUn1dQ92sfysJrje03Q0bv0e+KPLrvyqZF8MnBpFz8UNfYDwB3Ie7Z0TByjWfzxt7vseaA=="],
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,25 @@
|
||||
{
|
||||
"name": "coder-task-action",
|
||||
"version": "1.0.0",
|
||||
"description": "GitHub Action to create and manage Coder tasks",
|
||||
"main": "dist/index.js",
|
||||
"scripts": {
|
||||
"build": "bun build src/index.ts --outfile dist/index.js --target node",
|
||||
"dev": "bun run --watch src/index.ts",
|
||||
"format": "biome format --write .",
|
||||
"format:check": "biome format .",
|
||||
"lint": "biome lint --error-on-warnings .",
|
||||
"typecheck": "tsc --noEmit"
|
||||
},
|
||||
"dependencies": {
|
||||
"@actions/core": "^1.10.1",
|
||||
"@actions/github": "^6.0.0",
|
||||
"@octokit/rest": "^21.1.1",
|
||||
"zod": "^3.24.2"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@biomejs/biome": "2.2.4",
|
||||
"@types/bun": "latest",
|
||||
"typescript": "^5.0.0"
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,682 @@
|
||||
import { describe, expect, test, beforeEach } from "bun:test";
|
||||
import { CoderTaskAction } from "./action";
|
||||
import type { Octokit } from "./action";
|
||||
import {
|
||||
MockCoderClient,
|
||||
createMockOctokit,
|
||||
createMockInputs,
|
||||
mockUser,
|
||||
mockTask,
|
||||
mockTemplate,
|
||||
} from "./test-helpers";
|
||||
|
||||
describe("CoderTaskAction", () => {
|
||||
let coderClient: MockCoderClient;
|
||||
let octokit: ReturnType<typeof createMockOctokit>;
|
||||
|
||||
beforeEach(() => {
|
||||
coderClient = new MockCoderClient();
|
||||
octokit = createMockOctokit();
|
||||
});
|
||||
|
||||
describe("parseGithubIssueUrl", () => {
|
||||
test("parses valid GitHub issue URL", () => {
|
||||
const inputs = createMockInputs({
|
||||
githubIssueURL: "https://github.com/owner/repo/issues/123",
|
||||
});
|
||||
const action = new CoderTaskAction(
|
||||
coderClient,
|
||||
octokit as unknown as Octokit,
|
||||
inputs,
|
||||
);
|
||||
|
||||
const result = (
|
||||
action as unknown as CoderTaskAction
|
||||
).parseGithubIssueURL();
|
||||
|
||||
expect(result).toEqual({
|
||||
githubOrg: "owner",
|
||||
githubRepo: "repo",
|
||||
githubIssueNumber: 123,
|
||||
});
|
||||
});
|
||||
|
||||
test("throws when no issue URL provided", () => {
|
||||
const inputs = createMockInputs({ githubIssueURL: undefined });
|
||||
const action = new CoderTaskAction(
|
||||
coderClient,
|
||||
octokit as unknown as Octokit,
|
||||
inputs,
|
||||
);
|
||||
|
||||
const result = (
|
||||
action as unknown as CoderTaskAction
|
||||
).parseGithubIssueURL();
|
||||
|
||||
expect(result).toThrowError("Missing issue URL");
|
||||
});
|
||||
|
||||
test("throws for invalid URL format", () => {
|
||||
const inputs = createMockInputs({ githubIssueURL: "not-a-url" });
|
||||
const action = new CoderTaskAction(
|
||||
coderClient,
|
||||
octokit as unknown as Octokit,
|
||||
inputs,
|
||||
);
|
||||
|
||||
const result = (
|
||||
action as unknown as CoderTaskAction
|
||||
).parseGithubIssueURL();
|
||||
|
||||
expect(result).toThrowError("Invalid issue URL: not-a-url");
|
||||
});
|
||||
|
||||
test("handled non-github.com URL", () => {
|
||||
const inputs = createMockInputs({
|
||||
githubIssueURL: "https://code.acme.com/owner/repo/issues/123",
|
||||
});
|
||||
const action = new CoderTaskAction(
|
||||
coderClient,
|
||||
octokit as unknown as Octokit,
|
||||
inputs,
|
||||
);
|
||||
|
||||
const result = (
|
||||
action as unknown as CoderTaskAction
|
||||
).parseGithubIssueURL();
|
||||
|
||||
expect(result).toEqual({
|
||||
githubOrg: "owner",
|
||||
githubRepo: "repo",
|
||||
githubIssueNumber: 123,
|
||||
});
|
||||
});
|
||||
|
||||
test("handles URL with trailing junk", () => {
|
||||
const inputs = createMockInputs({
|
||||
githubIssueURL:
|
||||
"https://github.com/owner/repo/issues/123/?param=value#anchor",
|
||||
});
|
||||
const action = new CoderTaskAction(
|
||||
coderClient,
|
||||
octokit as unknown as Octokit,
|
||||
inputs,
|
||||
);
|
||||
|
||||
const result = (
|
||||
action as unknown as CoderTaskAction
|
||||
).parseGithubIssueURL();
|
||||
|
||||
// Should still parse correctly
|
||||
expect(result).toEqual({
|
||||
githubOrg: "owner",
|
||||
githubRepo: "repo",
|
||||
githubIssueNumber: 123,
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe("generateTaskUrl", () => {
|
||||
test("generates correct task URL", () => {
|
||||
const inputs = createMockInputs();
|
||||
const action = new CoderTaskAction(
|
||||
coderClient,
|
||||
octokit as unknown as Octokit,
|
||||
inputs,
|
||||
);
|
||||
|
||||
const result = (action as unknown as CoderTaskAction).generateTaskUrl(
|
||||
"testuser",
|
||||
"task-123",
|
||||
);
|
||||
|
||||
expect(result).toBe("https://coder.test/tasks/testuser/task-123");
|
||||
});
|
||||
|
||||
test("handles URL with trailing junk", () => {
|
||||
const inputs = createMockInputs({
|
||||
coderURL: "https://coder.test/?param=value#anchor",
|
||||
});
|
||||
const action = new CoderTaskAction(
|
||||
coderClient,
|
||||
octokit as unknown as Octokit,
|
||||
inputs,
|
||||
);
|
||||
|
||||
const result = (action as unknown as CoderTaskAction).generateTaskUrl(
|
||||
"testuser",
|
||||
"task-123",
|
||||
);
|
||||
|
||||
// Should not have double slash
|
||||
expect(result).toBe("https://coder.test//tasks/testuser/task-123");
|
||||
});
|
||||
});
|
||||
|
||||
describe("commentOnIssue", () => {
|
||||
describe("Success Cases", () => {
|
||||
test("creates new comment when none exists", async () => {
|
||||
octokit.rest.issues.listComments.mockResolvedValue({
|
||||
data: [],
|
||||
} as ReturnType<typeof octokit.rest.issues.listComments>);
|
||||
octokit.rest.issues.createComment.mockResolvedValue(
|
||||
{} as ReturnType<typeof octokit.rest.issues.createComment>,
|
||||
);
|
||||
|
||||
const inputs = createMockInputs();
|
||||
const action = new CoderTaskAction(
|
||||
coderClient,
|
||||
octokit as unknown as Octokit,
|
||||
inputs,
|
||||
);
|
||||
|
||||
await (action as unknown as CoderTaskAction).commentOnIssue(
|
||||
"https://coder.test/tasks/testuser/task-123",
|
||||
"owner",
|
||||
"repo",
|
||||
123,
|
||||
);
|
||||
|
||||
expect(octokit.rest.issues.createComment).toHaveBeenCalledWith({
|
||||
owner: "owner",
|
||||
repo: "repo",
|
||||
issue_number: 123,
|
||||
body: "Task created: https://coder.test/tasks/testuser/task-123",
|
||||
});
|
||||
});
|
||||
|
||||
test("updates existing Task created comment", async () => {
|
||||
octokit.rest.issues.listComments.mockResolvedValue({
|
||||
data: [
|
||||
{ id: 1, body: "Task created: old-url" },
|
||||
{ id: 2, body: "Other comment" },
|
||||
{ id: 3, body: "Task created: another-old-url" },
|
||||
],
|
||||
} as ReturnType<typeof octokit.rest.issues.listComments>);
|
||||
octokit.rest.issues.updateComment.mockResolvedValue(
|
||||
{} as ReturnType<typeof octokit.rest.issues.updateComment>,
|
||||
);
|
||||
|
||||
const inputs = createMockInputs();
|
||||
const action = new CoderTaskAction(
|
||||
coderClient,
|
||||
octokit as unknown as Octokit,
|
||||
inputs,
|
||||
);
|
||||
|
||||
await (action as unknown as CoderTaskAction).commentOnIssue(
|
||||
"https://coder.test/tasks/testuser/task-123",
|
||||
"owner",
|
||||
"repo",
|
||||
123,
|
||||
);
|
||||
|
||||
// Should update the last "Task created:" comment
|
||||
expect(octokit.rest.issues.updateComment).toHaveBeenCalledWith({
|
||||
owner: "owner",
|
||||
repo: "repo",
|
||||
comment_id: 3,
|
||||
body: "Task created: https://coder.test/tasks/testuser/task-123",
|
||||
});
|
||||
});
|
||||
|
||||
test("parses owner/repo/issue from URL correctly", async () => {
|
||||
octokit.rest.issues.listComments.mockResolvedValue({
|
||||
data: [],
|
||||
} as ReturnType<typeof octokit.rest.issues.listComments>);
|
||||
octokit.rest.issues.createComment.mockResolvedValue(
|
||||
{} as ReturnType<typeof octokit.rest.issues.createComment>,
|
||||
);
|
||||
|
||||
const inputs = createMockInputs();
|
||||
const action = new CoderTaskAction(
|
||||
coderClient,
|
||||
octokit as unknown as Octokit,
|
||||
inputs,
|
||||
);
|
||||
|
||||
await (action as unknown as CoderTaskAction).commentOnIssue(
|
||||
"https://coder.test/tasks/testuser/task-123",
|
||||
"different-owner",
|
||||
"different-repo",
|
||||
456,
|
||||
);
|
||||
|
||||
expect(octokit.rest.issues.createComment).toHaveBeenCalledWith({
|
||||
owner: "different-owner",
|
||||
repo: "different-repo",
|
||||
issue_number: 456,
|
||||
body: "Task created: https://coder.test/tasks/testuser/task-123",
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe("Error Cases", () => {
|
||||
test("warns but doesn't fail on GitHub API error", async () => {
|
||||
octokit.rest.issues.listComments.mockRejectedValue(
|
||||
new Error("API Error"),
|
||||
);
|
||||
|
||||
const inputs = createMockInputs();
|
||||
const action = new CoderTaskAction(
|
||||
coderClient,
|
||||
octokit as unknown as Octokit,
|
||||
inputs,
|
||||
);
|
||||
|
||||
// Should not throw
|
||||
expect(
|
||||
(action as unknown as CoderTaskAction).commentOnIssue(
|
||||
"https://coder.test/tasks/testuser/task-123",
|
||||
"owner",
|
||||
"repo",
|
||||
123,
|
||||
),
|
||||
).resolves.toBeUndefined();
|
||||
});
|
||||
|
||||
test("warns but doesn't fail on permission error", async () => {
|
||||
octokit.rest.issues.listComments.mockResolvedValue({
|
||||
data: [],
|
||||
} as ReturnType<typeof octokit.rest.issues.listComments>);
|
||||
octokit.rest.issues.createComment.mockRejectedValue(
|
||||
new Error("Permission denied"),
|
||||
);
|
||||
|
||||
const inputs = createMockInputs();
|
||||
const action = new CoderTaskAction(
|
||||
coderClient,
|
||||
octokit as unknown as Octokit,
|
||||
inputs,
|
||||
);
|
||||
|
||||
// Should not throw
|
||||
expect(
|
||||
(action as unknown as CoderTaskAction).commentOnIssue(
|
||||
"https://coder.test/tasks/testuser/task-123",
|
||||
"owner",
|
||||
"repo",
|
||||
123,
|
||||
),
|
||||
).resolves.toBeUndefined();
|
||||
});
|
||||
});
|
||||
|
||||
describe("Edge Cases", () => {
|
||||
test("updates last comment when multiple Task created comments exist", async () => {
|
||||
octokit.rest.issues.listComments.mockResolvedValue({
|
||||
data: [
|
||||
{ id: 1, body: "Task created: url1" },
|
||||
{ id: 2, body: "Other comment" },
|
||||
{ id: 3, body: "Task created: url2" },
|
||||
{ id: 4, body: "Another comment" },
|
||||
{ id: 5, body: "Task created: url3" },
|
||||
],
|
||||
} as ReturnType<typeof octokit.rest.issues.listComments>);
|
||||
octokit.rest.issues.updateComment.mockResolvedValue(
|
||||
{} as ReturnType<typeof octokit.rest.issues.updateComment>,
|
||||
);
|
||||
|
||||
const inputs = createMockInputs();
|
||||
const action = new CoderTaskAction(
|
||||
coderClient,
|
||||
octokit as unknown as Octokit,
|
||||
inputs,
|
||||
);
|
||||
|
||||
await (action as unknown as CoderTaskAction).commentOnIssue(
|
||||
"https://coder.test/tasks/testuser/task-123",
|
||||
"owner",
|
||||
"repo",
|
||||
123,
|
||||
);
|
||||
|
||||
// Should update comment 5 (last Task created comment)
|
||||
expect(octokit.rest.issues.updateComment).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
comment_id: 5,
|
||||
}),
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
test("creates new task successfully", async () => {
|
||||
// Setup
|
||||
coderClient.mockGetCoderUserByGithubID.mockResolvedValue(mockUser);
|
||||
coderClient.mockGetTask.mockResolvedValue(null);
|
||||
coderClient.mockCreateTask.mockResolvedValue(mockTask);
|
||||
|
||||
const inputs = createMockInputs({
|
||||
githubUserID: 12345,
|
||||
});
|
||||
const action = new CoderTaskAction(
|
||||
coderClient,
|
||||
octokit as unknown as Octokit,
|
||||
inputs,
|
||||
);
|
||||
|
||||
// Execute
|
||||
const result = await action.run();
|
||||
|
||||
// Verify
|
||||
expect(coderClient.mockGetCoderUserByGithubID).toHaveBeenCalledWith(12345);
|
||||
expect(coderClient.mockGetTask).toHaveBeenCalledWith(
|
||||
mockUser.username,
|
||||
mockTask.name,
|
||||
);
|
||||
expect(coderClient.mockCreateTask).toHaveBeenCalledWith({
|
||||
username: mockUser.username,
|
||||
name: mockTask.name,
|
||||
template_id: mockTemplate.id,
|
||||
input: "idk",
|
||||
});
|
||||
expect(result.coderUsername).toBe("testuser");
|
||||
expect(result.taskCreated).toBe(false);
|
||||
expect(result.taskUrl).toContain("/tasks/testuser/");
|
||||
});
|
||||
|
||||
test("sends prompt to existing task", async () => {
|
||||
// Setup
|
||||
coderClient.mockGetCoderUserByGithubID.mockResolvedValue(mockUser);
|
||||
coderClient.mockGetTemplateByOrganizationAndName.mockResolvedValue(
|
||||
mockTemplate,
|
||||
);
|
||||
coderClient.mockGetTemplateVersionPresets.mockResolvedValue([]);
|
||||
coderClient.mockGetTask.mockResolvedValue(mockTask);
|
||||
coderClient.mockSendTaskInput.mockResolvedValue(undefined);
|
||||
|
||||
const inputs = createMockInputs({
|
||||
githubUserID: 12345,
|
||||
});
|
||||
const action = new CoderTaskAction(
|
||||
coderClient,
|
||||
octokit as unknown as Octokit,
|
||||
inputs,
|
||||
);
|
||||
|
||||
// Execute
|
||||
const result = await action.run();
|
||||
|
||||
// Verify
|
||||
expect(coderClient.mockGetTask).toHaveBeenCalledWith(
|
||||
mockUser.username,
|
||||
mockTask.name,
|
||||
);
|
||||
expect(coderClient.mockSendTaskInput).toHaveBeenCalledWith(mockTask.id, {
|
||||
prompt: "test prompt",
|
||||
});
|
||||
expect(coderClient.mockCreateTask).not.toHaveBeenCalled();
|
||||
expect(result.taskCreated).toBe(false);
|
||||
});
|
||||
|
||||
test("errors without issue URL", async () => {
|
||||
// Setup
|
||||
coderClient.mockGetCoderUserByGithubID.mockResolvedValue(mockUser);
|
||||
coderClient.mockGetTask.mockResolvedValue(null);
|
||||
coderClient.mockCreateTask.mockResolvedValue(mockTask);
|
||||
|
||||
const inputs = createMockInputs({
|
||||
githubUserID: 12345,
|
||||
githubIssueURL: undefined,
|
||||
});
|
||||
const action = new CoderTaskAction(
|
||||
coderClient,
|
||||
octokit as unknown as Octokit,
|
||||
inputs,
|
||||
);
|
||||
|
||||
// Execute
|
||||
expect(action.run()).rejects.toThrowError("Missing issue URL");
|
||||
});
|
||||
|
||||
test("comments on issue", async () => {
|
||||
// Setup
|
||||
coderClient.mockGetCoderUserByGithubID.mockResolvedValue(mockUser);
|
||||
coderClient.mockGetTask.mockResolvedValue(null);
|
||||
coderClient.mockGetTemplateByOrganizationAndName.mockResolvedValue(
|
||||
mockTemplate,
|
||||
);
|
||||
coderClient.mockGetTemplateVersionPresets.mockResolvedValue([]);
|
||||
coderClient.mockCreateTask.mockResolvedValue(mockTask);
|
||||
octokit.rest.issues.listComments.mockResolvedValue({
|
||||
data: [],
|
||||
} as ReturnType<typeof octokit.rest.issues.listComments>);
|
||||
octokit.rest.issues.createComment.mockResolvedValue(
|
||||
{} as ReturnType<typeof octokit.rest.issues.updateComment>,
|
||||
);
|
||||
|
||||
const inputs = createMockInputs({
|
||||
githubUserID: 12345,
|
||||
githubIssueURL: "https://github.com/owner/repo/issues/123",
|
||||
});
|
||||
const action = new CoderTaskAction(
|
||||
coderClient,
|
||||
octokit as unknown as Octokit,
|
||||
inputs,
|
||||
);
|
||||
|
||||
// Execute
|
||||
await action.run();
|
||||
|
||||
// Verify
|
||||
expect(octokit.rest.issues.createComment).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
owner: "owner",
|
||||
repo: "repo",
|
||||
issue_number: 123,
|
||||
body: "Task created: https://coder.test/tasks/testuser/task-123",
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
test("updates existing comment on issue", async () => {
|
||||
// Setup
|
||||
coderClient.mockGetCoderUserByGithubID.mockResolvedValue(mockUser);
|
||||
coderClient.mockGetTask.mockResolvedValue(null);
|
||||
coderClient.mockGetTemplateByOrganizationAndName.mockResolvedValue(
|
||||
mockTemplate,
|
||||
);
|
||||
coderClient.mockGetTemplateVersionPresets.mockResolvedValue([]);
|
||||
coderClient.mockCreateTask.mockResolvedValue(mockTask);
|
||||
octokit.rest.issues.listComments.mockResolvedValue({
|
||||
data: [
|
||||
{
|
||||
id: 23455,
|
||||
body: "An unrelated comment",
|
||||
},
|
||||
{
|
||||
id: 23456,
|
||||
body: "Task created:",
|
||||
},
|
||||
{
|
||||
id: 23457,
|
||||
body: "Another unrelated comment",
|
||||
},
|
||||
],
|
||||
} as ReturnType<typeof octokit.rest.issues.listComments>);
|
||||
octokit.rest.issues.updateComment.mockResolvedValue(
|
||||
{} as ReturnType<typeof octokit.rest.issues.updateComment>,
|
||||
);
|
||||
|
||||
const inputs = createMockInputs({
|
||||
githubUserID: 12345,
|
||||
githubIssueURL: "https://github.com/owner/repo/issues/123",
|
||||
});
|
||||
const action = new CoderTaskAction(
|
||||
coderClient,
|
||||
octokit as unknown as Octokit,
|
||||
inputs,
|
||||
);
|
||||
|
||||
// Execute
|
||||
await action.run();
|
||||
|
||||
// Verify
|
||||
expect(octokit.rest.issues.updateComment).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
owner: "owner",
|
||||
repo: "repo",
|
||||
comment_id: 23456,
|
||||
body: "Task created: https://coder.test/tasks/testuser/task-123",
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
test("handles error when comment on issue fails", async () => {
|
||||
// Setup
|
||||
coderClient.mockGetCoderUserByGithubID.mockResolvedValue(mockUser);
|
||||
coderClient.mockGetTask.mockResolvedValue(null);
|
||||
coderClient.mockGetTemplateByOrganizationAndName.mockResolvedValue(
|
||||
mockTemplate,
|
||||
);
|
||||
coderClient.mockGetTemplateVersionPresets.mockResolvedValue([]);
|
||||
coderClient.mockCreateTask.mockResolvedValue(mockTask);
|
||||
octokit.rest.issues.listComments.mockResolvedValue({
|
||||
data: [],
|
||||
} as ReturnType<typeof octokit.rest.issues.listComments>);
|
||||
octokit.rest.issues.createComment.mockRejectedValue(
|
||||
new Error("Failed to comment on issue"),
|
||||
);
|
||||
|
||||
const inputs = createMockInputs({
|
||||
githubUserID: 12345,
|
||||
githubIssueURL: "https://github.com/owner/repo/issues/123",
|
||||
});
|
||||
const action = new CoderTaskAction(
|
||||
coderClient,
|
||||
octokit as unknown as Octokit,
|
||||
inputs,
|
||||
);
|
||||
|
||||
await action.run();
|
||||
expect(octokit.rest.issues.createComment).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
owner: "owner",
|
||||
repo: "repo",
|
||||
issue_number: 123,
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
describe("run - Error Scenarios", () => {
|
||||
test("throws error when Coder user not found", async () => {
|
||||
coderClient.mockGetCoderUserByGithubID.mockRejectedValue(
|
||||
new Error("No Coder user found with GitHub user ID 12345"),
|
||||
);
|
||||
|
||||
const inputs = createMockInputs({ githubUserID: 12345 });
|
||||
const action = new CoderTaskAction(
|
||||
coderClient,
|
||||
octokit as unknown as Octokit,
|
||||
inputs,
|
||||
);
|
||||
|
||||
expect(action.run()).rejects.toThrow(
|
||||
"No Coder user found with GitHub user ID 12345",
|
||||
);
|
||||
});
|
||||
|
||||
test("throws error when template not found", async () => {
|
||||
coderClient.mockGetCoderUserByGithubID.mockResolvedValue(mockUser);
|
||||
coderClient.mockGetTask.mockResolvedValue(null);
|
||||
coderClient.mockGetTemplateByOrganizationAndName.mockRejectedValue(
|
||||
new Error("Template not found"),
|
||||
);
|
||||
coderClient.mockCreateTask.mockRejectedValue(
|
||||
new Error("Template not found: nonexistent"),
|
||||
);
|
||||
|
||||
const inputs = createMockInputs({
|
||||
githubUserID: 12345,
|
||||
coderTemplateName: "nonexistent",
|
||||
});
|
||||
const action = new CoderTaskAction(
|
||||
coderClient,
|
||||
octokit as unknown as Octokit,
|
||||
inputs,
|
||||
);
|
||||
|
||||
expect(action.run()).rejects.toThrow("Template not found");
|
||||
});
|
||||
|
||||
test("throws error when task creation fails", async () => {
|
||||
coderClient.mockGetCoderUserByGithubID.mockResolvedValue(mockUser);
|
||||
coderClient.mockGetTask.mockResolvedValue(null);
|
||||
coderClient.mockGetTemplateByOrganizationAndName.mockResolvedValue(
|
||||
mockTemplate,
|
||||
);
|
||||
coderClient.mockGetTemplateVersionPresets.mockResolvedValue([]);
|
||||
coderClient.mockCreateTask.mockRejectedValue(
|
||||
new Error("Failed to create task"),
|
||||
);
|
||||
|
||||
const inputs = createMockInputs({ githubUserID: 12345 });
|
||||
const action = new CoderTaskAction(
|
||||
coderClient,
|
||||
octokit as unknown as Octokit,
|
||||
inputs,
|
||||
);
|
||||
|
||||
expect(action.run()).rejects.toThrow("Failed to create task");
|
||||
});
|
||||
|
||||
test("throws error on permission denied", async () => {
|
||||
coderClient.mockGetCoderUserByGithubID.mockResolvedValue(mockUser);
|
||||
coderClient.mockGetTask.mockResolvedValue(null);
|
||||
coderClient.mockGetTemplateByOrganizationAndName.mockResolvedValue(
|
||||
mockTemplate,
|
||||
);
|
||||
coderClient.mockGetTemplateVersionPresets.mockResolvedValue([]);
|
||||
coderClient.mockCreateTask.mockRejectedValue(
|
||||
new Error("Permission denied"),
|
||||
);
|
||||
|
||||
const inputs = createMockInputs({ githubUserID: 12345 });
|
||||
const action = new CoderTaskAction(
|
||||
coderClient,
|
||||
octokit as unknown as Octokit,
|
||||
inputs,
|
||||
);
|
||||
|
||||
expect(action.run()).rejects.toThrow("Permission denied");
|
||||
});
|
||||
});
|
||||
|
||||
// NOTE: this may or may not work in the real world depending on the permissions of the user
|
||||
test("handles cross-repository issue", async () => {
|
||||
// Setup
|
||||
coderClient.mockGetCoderUserByGithubID.mockResolvedValue(mockUser);
|
||||
coderClient.mockGetTask.mockResolvedValue(null);
|
||||
coderClient.mockGetTemplateByOrganizationAndName.mockResolvedValue(
|
||||
mockTemplate,
|
||||
);
|
||||
coderClient.mockGetTemplateVersionPresets.mockResolvedValue([]);
|
||||
coderClient.mockCreateTask.mockResolvedValue(mockTask);
|
||||
octokit.rest.issues.listComments.mockResolvedValue({
|
||||
data: [],
|
||||
} as ReturnType<typeof octokit.rest.issues.listComments>);
|
||||
octokit.rest.issues.createComment.mockResolvedValue(
|
||||
{} as ReturnType<typeof octokit.rest.issues.createComment>,
|
||||
);
|
||||
|
||||
const inputs = createMockInputs({
|
||||
githubIssueURL:
|
||||
"https://github.com/different-owner/different-repo/issues/456",
|
||||
});
|
||||
const action = new CoderTaskAction(
|
||||
coderClient,
|
||||
octokit as unknown as Octokit,
|
||||
inputs,
|
||||
);
|
||||
|
||||
await action.run();
|
||||
expect(octokit.rest.issues.createComment).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
owner: "different-owner",
|
||||
repo: "different-repo",
|
||||
issue_number: 456,
|
||||
}),
|
||||
);
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,198 @@
|
||||
import * as core from "@actions/core";
|
||||
import {
|
||||
ExperimentalCoderSDKCreateTaskRequest,
|
||||
type CoderClient,
|
||||
} from "./coder-client";
|
||||
import type { ActionInputs, ActionOutputs } from "./schemas";
|
||||
import type { getOctokit } from "@actions/github";
|
||||
|
||||
export type Octokit = ReturnType<typeof getOctokit>;
|
||||
|
||||
export class CoderTaskAction {
|
||||
constructor(
|
||||
private readonly coder: CoderClient,
|
||||
private readonly octokit: Octokit,
|
||||
private readonly inputs: ActionInputs,
|
||||
) {}
|
||||
|
||||
/**
|
||||
* Parse owner and repo from issue URL
|
||||
*/
|
||||
parseGithubIssueURL(): {
|
||||
githubOrg: string;
|
||||
githubRepo: string;
|
||||
githubIssueNumber: number;
|
||||
} {
|
||||
if (!this.inputs.githubIssueURL) {
|
||||
throw new Error(`Missing issue URL`);
|
||||
}
|
||||
|
||||
// Parse: https://github.com/owner/repo/issues/123
|
||||
const match = this.inputs.githubIssueURL.match(
|
||||
/([^/]+)\/([^/]+)\/issues\/(\d+)/,
|
||||
);
|
||||
if (!match) {
|
||||
throw new Error(`Invalid issue URL: ${this.inputs.githubIssueURL}`);
|
||||
}
|
||||
return {
|
||||
githubOrg: match[1],
|
||||
githubRepo: match[2],
|
||||
githubIssueNumber: parseInt(match[3], 10),
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate task URL
|
||||
*/
|
||||
generateTaskUrl(coderUsername: string, taskName: string): string {
|
||||
return `${this.inputs.coderURL}/tasks/${coderUsername}/${taskName}`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Comment on GitHub issue with task link
|
||||
*/
|
||||
async commentOnIssue(
|
||||
taskUrl: string,
|
||||
owner: string,
|
||||
repo: string,
|
||||
issueNumber: number,
|
||||
): Promise<void> {
|
||||
const body = `Task created: ${taskUrl}`;
|
||||
|
||||
try {
|
||||
// Try to find existing comment from bot
|
||||
const { data: comments } = await this.octokit.rest.issues.listComments({
|
||||
owner,
|
||||
repo,
|
||||
issue_number: issueNumber,
|
||||
});
|
||||
|
||||
// Find the last comment that starts with "Task created:"
|
||||
const existingComment = comments
|
||||
.reverse()
|
||||
.find((comment: { body?: string }) =>
|
||||
comment.body?.startsWith("Task created:"),
|
||||
);
|
||||
|
||||
if (existingComment) {
|
||||
// Update existing comment
|
||||
await this.octokit.rest.issues.updateComment({
|
||||
owner,
|
||||
repo,
|
||||
comment_id: existingComment.id,
|
||||
body,
|
||||
});
|
||||
} else {
|
||||
// Create new comment
|
||||
await this.octokit.rest.issues.createComment({
|
||||
owner,
|
||||
repo,
|
||||
issue_number: issueNumber,
|
||||
body,
|
||||
});
|
||||
}
|
||||
} catch (error) {
|
||||
core.error(`Failed to comment on issue: ${error}`);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Main action execution
|
||||
*/
|
||||
async run(): Promise<ActionOutputs> {
|
||||
core.debug(`GitHub user ID: ${this.inputs.githubUserID}`);
|
||||
const coderUser = await this.coder.getCoderUserByGitHubId(
|
||||
this.inputs.githubUserID,
|
||||
);
|
||||
const { githubOrg, githubRepo, githubIssueNumber } =
|
||||
this.parseGithubIssueURL();
|
||||
core.debug(`GitHub owner: ${githubOrg}`);
|
||||
core.debug(`GitHub repo: ${githubRepo}`);
|
||||
core.debug(`GitHub issue number: ${githubIssueNumber}`);
|
||||
core.debug(`Coder username: ${coderUser.username}`);
|
||||
if (!this.inputs.coderTaskNamePrefix || !this.inputs.githubIssueURL) {
|
||||
throw new Error(
|
||||
"either taskName or both taskNamePrefix and issueURL must be provided",
|
||||
);
|
||||
}
|
||||
const taskName = `${this.inputs.coderTaskNamePrefix}-${githubIssueNumber}`;
|
||||
core.debug(`Coder Task name: ${taskName}`);
|
||||
const template = await this.coder.getTemplateByOrganizationAndName(
|
||||
this.inputs.coderOrganization,
|
||||
this.inputs.coderTemplateName,
|
||||
);
|
||||
core.debug(
|
||||
`Coder Template: ${template.name} (id:${template.id}, active_version_id:${template.active_version_id})`,
|
||||
);
|
||||
const templateVersionPresets = await this.coder.getTemplateVersionPresets(
|
||||
template.active_version_id,
|
||||
);
|
||||
let presetID = undefined;
|
||||
// If no preset specified, use default preset
|
||||
if (!this.inputs.coderTemplatePreset) {
|
||||
for (const preset of templateVersionPresets) {
|
||||
if (preset.Name === this.inputs.coderTemplatePreset) {
|
||||
presetID = preset.ID;
|
||||
core.debug(`Coder Template Preset ID: ${presetID}`);
|
||||
break;
|
||||
}
|
||||
}
|
||||
// User requested a preset that does not exist
|
||||
if (this.inputs.coderTemplatePreset && !presetID) {
|
||||
throw new Error(`Preset ${this.inputs.coderTemplatePreset} not found`);
|
||||
}
|
||||
}
|
||||
|
||||
const existingTask = await this.coder.getTask(coderUser.username, taskName);
|
||||
if (existingTask) {
|
||||
core.debug(`Task already exists: ${existingTask.id}`);
|
||||
core.debug("Sending prompt to existing task...");
|
||||
// Send prompt to existing task
|
||||
await this.coder.sendTaskInput(
|
||||
coderUser.username,
|
||||
taskName,
|
||||
this.inputs.coderTaskPrompt,
|
||||
);
|
||||
core.debug("Prompt sent successfully");
|
||||
return {
|
||||
coderUsername: coderUser.username,
|
||||
taskName: existingTask.name,
|
||||
taskUrl: this.generateTaskUrl(coderUser.username, taskName),
|
||||
taskCreated: false,
|
||||
};
|
||||
}
|
||||
core.debug("Creating Coder task...");
|
||||
|
||||
const req: ExperimentalCoderSDKCreateTaskRequest = {
|
||||
name: taskName,
|
||||
template_version_id: this.inputs.coderTemplateName,
|
||||
template_version_preset_id: presetID,
|
||||
input: this.inputs.coderTaskPrompt,
|
||||
};
|
||||
// Create new task
|
||||
const createdTask = await this.coder.createTask(coderUser.username, req);
|
||||
core.debug("Task created successfully");
|
||||
|
||||
// 5. Generate task URL
|
||||
const taskUrl = this.generateTaskUrl(coderUser.username, createdTask.name);
|
||||
core.debug(`Task URL: ${taskUrl}`);
|
||||
|
||||
// 6. Comment on issue if requested
|
||||
core.debug(
|
||||
`Commenting on issue ${githubOrg}/${githubRepo}#${githubIssueNumber}`,
|
||||
);
|
||||
await this.commentOnIssue(
|
||||
taskUrl,
|
||||
githubOrg,
|
||||
githubRepo,
|
||||
githubIssueNumber,
|
||||
);
|
||||
core.debug(`Comment posted successfully`);
|
||||
return {
|
||||
coderUsername: coderUser.username,
|
||||
taskName: taskName,
|
||||
taskUrl,
|
||||
taskCreated: true,
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,326 @@
|
||||
import { describe, expect, test, beforeEach, mock } from "bun:test";
|
||||
import {
|
||||
RealCoderClient,
|
||||
CoderAPIError,
|
||||
ExperimentalCoderSDKCreateTaskRequestSchema,
|
||||
ExperimentalCoderSDKCreateTaskRequest,
|
||||
} from "./coder-client";
|
||||
import {
|
||||
mockUser,
|
||||
mockUserList,
|
||||
mockUserListEmpty,
|
||||
mockUserListDuplicate,
|
||||
mockTemplate,
|
||||
mockTemplateVersionPresets,
|
||||
mockTask,
|
||||
mockTaskList,
|
||||
mockTaskListEmpty,
|
||||
createMockInputs,
|
||||
createMockResponse,
|
||||
mockTemplateVersionPreset,
|
||||
} from "./test-helpers";
|
||||
|
||||
describe("CoderClient", () => {
|
||||
let client: RealCoderClient;
|
||||
let mockFetch: ReturnType<typeof mock>;
|
||||
|
||||
beforeEach(() => {
|
||||
const mockInputs = createMockInputs();
|
||||
client = new RealCoderClient(mockInputs.coderURL, mockInputs.coderToken);
|
||||
mockFetch = mock(() => Promise.resolve(createMockResponse([])));
|
||||
global.fetch = mockFetch as unknown as typeof fetch;
|
||||
});
|
||||
|
||||
describe("getCoderUserByGitHubId", () => {
|
||||
test("returns the user when found", async () => {
|
||||
mockFetch.mockResolvedValue(createMockResponse(mockUserList));
|
||||
const result = await client.getCoderUserByGitHubId(
|
||||
mockUser.github_com_user_id,
|
||||
);
|
||||
expect(mockFetch).toHaveBeenCalledWith(
|
||||
`https://coder.test/api/v2/users?q=github_com_user_id%3A${mockUser.github_com_user_id!.toString()}`,
|
||||
expect.objectContaining({
|
||||
headers: expect.objectContaining({
|
||||
"Coder-Session-Token": "test-token",
|
||||
}),
|
||||
}),
|
||||
);
|
||||
expect(result.id).toBe(mockUser.id);
|
||||
expect(result.username).toBe(mockUser.username);
|
||||
expect(result.github_com_user_id).toBe(mockUser.github_com_user_id);
|
||||
});
|
||||
|
||||
test("throws an error if multiple Coder users are found with the same GitHub ID", async () => {
|
||||
mockFetch.mockResolvedValue(createMockResponse(mockUserListDuplicate));
|
||||
expect(
|
||||
client.getCoderUserByGitHubId(mockUser.github_com_user_id!),
|
||||
).rejects.toThrow(CoderAPIError);
|
||||
expect(mockFetch).toHaveBeenCalledWith(
|
||||
`https://coder.test/api/v2/users?q=github_com_user_id%3A${mockUser.github_com_user_id!.toString()}`,
|
||||
expect.objectContaining({
|
||||
headers: expect.objectContaining({
|
||||
"Coder-Session-Token": "test-token",
|
||||
}),
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
test("throws an error if no Coder user is found with the given GitHub ID", async () => {
|
||||
mockFetch.mockResolvedValue(createMockResponse(mockUserListEmpty));
|
||||
expect(
|
||||
client.getCoderUserByGitHubId(mockUser.github_com_user_id!),
|
||||
).rejects.toThrow(CoderAPIError);
|
||||
expect(mockFetch).toHaveBeenCalledWith(
|
||||
`https://coder.test/api/v2/users?q=github_com_user_id%3A${mockUser.github_com_user_id}`,
|
||||
expect.objectContaining({
|
||||
headers: expect.objectContaining({
|
||||
"Coder-Session-Token": "test-token",
|
||||
}),
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
test("throws error on 401 unauthorized", async () => {
|
||||
mockFetch.mockResolvedValue(
|
||||
createMockResponse(
|
||||
{ error: "Unauthorized" },
|
||||
{ ok: false, status: 401, statusText: "Unauthorized" },
|
||||
),
|
||||
);
|
||||
expect(
|
||||
client.getCoderUserByGitHubId(mockUser.github_com_user_id!),
|
||||
).rejects.toThrow(CoderAPIError);
|
||||
});
|
||||
|
||||
test("throws error on 500 server error", async () => {
|
||||
mockFetch.mockResolvedValue(
|
||||
createMockResponse(
|
||||
{ error: "Internal Server Error" },
|
||||
{ ok: false, status: 500, statusText: "Internal Server Error" },
|
||||
),
|
||||
);
|
||||
expect(
|
||||
client.getCoderUserByGitHubId(mockUser.github_com_user_id!),
|
||||
).rejects.toThrow(CoderAPIError);
|
||||
});
|
||||
|
||||
test("throws an error when GitHub user ID is 0", async () => {
|
||||
mockFetch.mockResolvedValue(createMockResponse([mockUser]));
|
||||
expect(client.getCoderUserByGitHubId(0)).rejects.toThrow(
|
||||
"GitHub user ID cannot be 0",
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe("getTemplateByOrganizationAndName", () => {
|
||||
test("the given template is returned successfully if it exists", async () => {
|
||||
mockFetch.mockResolvedValue(createMockResponse(mockTemplate));
|
||||
const mockInputs = createMockInputs();
|
||||
const result = await client.getTemplateByOrganizationAndName(
|
||||
mockInputs.coderOrganization,
|
||||
mockTemplate.name,
|
||||
);
|
||||
expect(mockFetch).toHaveBeenCalledWith(
|
||||
`https://coder.test/api/v2/organizations/${mockInputs.coderOrganization}/templates/${mockTemplate.name}`,
|
||||
expect.objectContaining({
|
||||
headers: expect.objectContaining({
|
||||
"Coder-Session-Token": "test-token",
|
||||
}),
|
||||
}),
|
||||
);
|
||||
expect(result.id).toBe(mockTemplate.id);
|
||||
expect(result.name).toBe(mockTemplate.name);
|
||||
expect(result.active_version_id).toBe(mockTemplate.active_version_id);
|
||||
});
|
||||
|
||||
test("throws an error when the given template is not found", async () => {
|
||||
mockFetch.mockResolvedValue(
|
||||
createMockResponse(
|
||||
{ error: "Not found" },
|
||||
{ ok: false, status: 404, statusText: "Not Found" },
|
||||
),
|
||||
);
|
||||
const mockInputs = createMockInputs();
|
||||
expect(
|
||||
client.getTemplateByOrganizationAndName(
|
||||
mockInputs.coderOrganization,
|
||||
"nonexistent",
|
||||
),
|
||||
).rejects.toThrow(CoderAPIError);
|
||||
});
|
||||
});
|
||||
|
||||
describe("getTemplateVersionPresets", () => {
|
||||
test("returns template version presets", async () => {
|
||||
mockFetch.mockResolvedValue(
|
||||
createMockResponse(mockTemplateVersionPresets),
|
||||
);
|
||||
const result = await client.getTemplateVersionPresets(
|
||||
mockTemplate.active_version_id,
|
||||
);
|
||||
expect(result).not.toBeNull();
|
||||
expect(result).toHaveLength(mockTemplateVersionPresets.length);
|
||||
for (let idx = 0; idx < result.length; idx++) {
|
||||
expect(result[idx].ID).toBe(mockTemplateVersionPresets[idx].ID);
|
||||
expect(result[idx].Name).toBe(mockTemplateVersionPresets[idx].Name);
|
||||
}
|
||||
expect(mockFetch).toHaveBeenCalledWith(
|
||||
`https://coder.test/api/v2/templateversions/${mockTemplate.active_version_id}/presets`,
|
||||
expect.objectContaining({
|
||||
headers: expect.objectContaining({
|
||||
"Coder-Session-Token": "test-token",
|
||||
}),
|
||||
}),
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe("getTask", () => {
|
||||
test("returns task when task exists", async () => {
|
||||
mockFetch.mockResolvedValue(createMockResponse(mockTaskList));
|
||||
const result = await client.getTask(mockUser.username, mockTask.name);
|
||||
expect(result).not.toBeNull();
|
||||
expect(result?.id).toBe(mockTask.id);
|
||||
expect(result?.name).toBe(mockTask.name);
|
||||
expect(mockFetch).toHaveBeenCalledWith(
|
||||
`https://coder.test/api/experimental/tasks?q=owner%3A${mockUser.username}`,
|
||||
expect.objectContaining({
|
||||
headers: expect.objectContaining({
|
||||
"Coder-Session-Token": "test-token",
|
||||
}),
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
test("returns null when task doesn't exist (404)", async () => {
|
||||
mockFetch.mockResolvedValue(createMockResponse(mockTaskListEmpty));
|
||||
const result = await client.getTask(mockUser.username, mockTask.name);
|
||||
expect(result).toBeNull();
|
||||
expect(mockFetch).toHaveBeenCalledWith(
|
||||
`https://coder.test/api/experimental/tasks?q=owner%3A${mockUser.username}`,
|
||||
expect.objectContaining({
|
||||
headers: expect.objectContaining({
|
||||
"Coder-Session-Token": "test-token",
|
||||
}),
|
||||
}),
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe("createTask", () => {
|
||||
test("creates task successfully given valid input", async () => {
|
||||
mockFetch.mockResolvedValueOnce(createMockResponse(mockTask));
|
||||
const mockInputs = createMockInputs();
|
||||
const result = await client.createTask(mockUser.username, {
|
||||
name: mockTask.name,
|
||||
template_version_id: mockTemplate.active_version_id,
|
||||
input: mockInputs.coderTaskPrompt,
|
||||
});
|
||||
expect(result.id).toBe(mockTask.id);
|
||||
expect(result.name).toBe(mockTask.name);
|
||||
expect(mockFetch).toHaveBeenNthCalledWith(
|
||||
1,
|
||||
`https://coder.test/api/experimental/tasks/${mockUser.username}`,
|
||||
expect.objectContaining({
|
||||
method: "POST",
|
||||
headers: expect.objectContaining({
|
||||
"Coder-Session-Token": "test-token",
|
||||
}),
|
||||
body: JSON.stringify({
|
||||
name: mockTask.name,
|
||||
template_version_id: mockTemplate.active_version_id,
|
||||
input: mockInputs.coderTaskPrompt,
|
||||
}),
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
test("creates task successfully with a given preset", async () => {
|
||||
mockFetch.mockResolvedValueOnce(createMockResponse(mockTask));
|
||||
const mockInputs = {
|
||||
...createMockInputs(),
|
||||
template_version_preset_id: mockTemplateVersionPreset.ID,
|
||||
};
|
||||
const result = await client.createTask(mockUser.username, {
|
||||
name: mockTask.name,
|
||||
template_version_id: mockTemplate.active_version_id,
|
||||
template_version_preset_id: mockTemplateVersionPreset.ID,
|
||||
input: mockInputs.coderTaskPrompt,
|
||||
});
|
||||
expect(result.id).toBe(mockTask.id);
|
||||
expect(result.name).toBe(mockTask.name);
|
||||
expect(mockFetch).toHaveBeenNthCalledWith(
|
||||
1,
|
||||
`https://coder.test/api/experimental/tasks/${mockUser.username}`,
|
||||
expect.objectContaining({
|
||||
method: "POST",
|
||||
headers: expect.objectContaining({
|
||||
"Coder-Session-Token": "test-token",
|
||||
}),
|
||||
body: JSON.stringify({
|
||||
name: mockTask.name,
|
||||
template_version_id: mockTemplate.active_version_id,
|
||||
template_version_preset_id: mockTemplateVersionPreset.ID,
|
||||
input: mockInputs.coderTaskPrompt,
|
||||
}),
|
||||
}),
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe("sendTaskInput", () => {
|
||||
test("sends input successfully", async () => {
|
||||
mockFetch.mockResolvedValue(createMockResponse({}));
|
||||
|
||||
const testInput = "Test input";
|
||||
await client.sendTaskInput(mockUser.username, mockTask.name, testInput);
|
||||
|
||||
expect(mockFetch).toHaveBeenCalledWith(
|
||||
`https://coder.test/api/v2/users/${mockUser.username}/tasks/${mockTask.name}/send`,
|
||||
expect.objectContaining({
|
||||
method: "POST",
|
||||
body: expect.stringContaining(testInput),
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
test("request body contains input field", async () => {
|
||||
mockFetch.mockResolvedValue(createMockResponse({}));
|
||||
|
||||
const testInput = "Test input";
|
||||
await client.sendTaskInput(mockUser.username, mockTask.name, testInput);
|
||||
|
||||
const call = mockFetch.mock.calls[0];
|
||||
const body = JSON.parse(call[1].body);
|
||||
expect(body.input).toBe(testInput);
|
||||
});
|
||||
|
||||
test("throws error when task not found (404)", async () => {
|
||||
mockFetch.mockResolvedValue(
|
||||
createMockResponse(
|
||||
{ error: "Not Found" },
|
||||
{ ok: false, status: 404, statusText: "Not Found" },
|
||||
),
|
||||
);
|
||||
|
||||
const testInput = "Test input";
|
||||
expect(
|
||||
client.sendTaskInput(mockUser.username, mockTask.name, testInput),
|
||||
).rejects.toThrow(CoderAPIError);
|
||||
});
|
||||
|
||||
test("throws error when task not running (400)", async () => {
|
||||
mockFetch.mockResolvedValue(
|
||||
createMockResponse(
|
||||
{ error: "Bad Request" },
|
||||
{ ok: false, status: 400, statusText: "Bad Request" },
|
||||
),
|
||||
);
|
||||
|
||||
const testInput = "Test input";
|
||||
expect(
|
||||
client.sendTaskInput(mockUser.username, mockTask.name, testInput),
|
||||
).rejects.toThrow(CoderAPIError);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,271 @@
|
||||
import { z } from "zod";
|
||||
|
||||
export interface CoderClient {
|
||||
getCoderUserByGitHubId(
|
||||
githubUserId: number | undefined,
|
||||
): Promise<CoderSDKUser>;
|
||||
|
||||
getTemplateByOrganizationAndName(
|
||||
organizationName: string,
|
||||
templateName: string,
|
||||
): Promise<CoderSDKTemplate>;
|
||||
|
||||
getTemplateVersionPresets(
|
||||
templateVersionId: string,
|
||||
): Promise<CoderSDKTemplateVersionPreset[]>;
|
||||
|
||||
getTask(
|
||||
owner: string,
|
||||
taskName: string,
|
||||
): Promise<ExperimentalCoderSDKTask | null>;
|
||||
|
||||
createTask(
|
||||
owner: string,
|
||||
params: ExperimentalCoderSDKCreateTaskRequest,
|
||||
): Promise<ExperimentalCoderSDKTask>;
|
||||
|
||||
sendTaskInput(owner: string, taskName: string, input: string): Promise<void>;
|
||||
}
|
||||
|
||||
// CoderClient provides a minimal set of methods for interacting with the Coder API.
|
||||
export class RealCoderClient implements CoderClient {
|
||||
private readonly headers: Record<string, string>;
|
||||
|
||||
constructor(
|
||||
private readonly serverURL: string,
|
||||
apiToken: string,
|
||||
) {
|
||||
this.headers = {
|
||||
"Coder-Session-Token": apiToken,
|
||||
"Content-Type": "application/json",
|
||||
};
|
||||
}
|
||||
|
||||
private async request<T>(
|
||||
endpoint: string,
|
||||
options?: RequestInit,
|
||||
): Promise<T> {
|
||||
const url = `${this.serverURL}${endpoint}`;
|
||||
const response = await fetch(url, {
|
||||
...options,
|
||||
headers: { ...this.headers, ...options?.headers },
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
const body = await response.text().catch(() => "");
|
||||
throw new CoderAPIError(
|
||||
`Coder API error: ${response.statusText}`,
|
||||
response.status,
|
||||
body,
|
||||
);
|
||||
}
|
||||
|
||||
return response.json() as Promise<T>;
|
||||
}
|
||||
|
||||
/**
|
||||
* getCoderUserByGitHubId retrieves an existing Coder user with the given GitHub user ID using Coder's stable API.
|
||||
* Throws an error if more than one user exists with the same GitHub user ID or if a GitHub user ID of 0 is provided.
|
||||
*/
|
||||
async getCoderUserByGitHubId(
|
||||
githubUserId: number | undefined,
|
||||
): Promise<CoderSDKUser> {
|
||||
if (githubUserId === undefined) {
|
||||
throw new CoderAPIError("GitHub user ID cannot be undefined", 400);
|
||||
}
|
||||
if (githubUserId === 0) {
|
||||
throw "GitHub user ID cannot be 0";
|
||||
}
|
||||
const endpoint = `/api/v2/users?q=${encodeURIComponent(`github_com_user_id:${githubUserId}`)}`;
|
||||
const response = await this.request<unknown[]>(endpoint);
|
||||
const userList = CoderSDKGetUsersResponseSchema.parse(response);
|
||||
if (userList.users.length === 0) {
|
||||
throw new CoderAPIError(
|
||||
`No Coder user found with GitHub user ID ${githubUserId}`,
|
||||
404,
|
||||
);
|
||||
}
|
||||
if (userList.users.length > 1) {
|
||||
throw new CoderAPIError(
|
||||
`Multiple Coder users found with GitHub user ID ${githubUserId}`,
|
||||
409,
|
||||
);
|
||||
}
|
||||
return CoderSDKUserSchema.parse(userList.users[0]);
|
||||
}
|
||||
|
||||
/**
|
||||
* getTemplateByOrganizationAndName retrieves a template via Coder's stable API.
|
||||
*/
|
||||
async getTemplateByOrganizationAndName(
|
||||
organizationName: string,
|
||||
templateName: string,
|
||||
): Promise<CoderSDKTemplate> {
|
||||
const endpoint = `/api/v2/organizations/${encodeURIComponent(organizationName)}/templates/${encodeURIComponent(templateName)}`;
|
||||
const response =
|
||||
await this.request<typeof CoderSDKTemplateSchema>(endpoint);
|
||||
return CoderSDKTemplateSchema.parse(response);
|
||||
}
|
||||
|
||||
/**
|
||||
* getTemplateVersionPresets retrieves the presets for a given template version (UUID).
|
||||
*/
|
||||
async getTemplateVersionPresets(
|
||||
templateVersionId: string,
|
||||
): Promise<CoderSDKTemplateVersionPresetsResponse> {
|
||||
const endpoint = `/api/v2/templateversions/${encodeURIComponent(templateVersionId)}/presets`;
|
||||
const response =
|
||||
await this.request<CoderSDKTemplateVersionPresetsResponse>(endpoint);
|
||||
return CoderSDKTemplateVersionPresetsResponseSchema.parse(response);
|
||||
}
|
||||
|
||||
/**
|
||||
* getTask retrieves an existing task via Coder's experimental Tasks API.
|
||||
* Returns null if the task does not exist.
|
||||
*/
|
||||
async getTask(
|
||||
owner: string,
|
||||
taskName: string,
|
||||
): Promise<ExperimentalCoderSDKTask | null> {
|
||||
// TODO: needs taskByOwnerAndName endpoint, fake it for now with the list endpoint.
|
||||
try {
|
||||
const allTasksResponse = await this.request<unknown>(
|
||||
`/api/experimental/tasks?q=${encodeURIComponent(`owner:${owner}`)}`,
|
||||
);
|
||||
const allTasks =
|
||||
ExperimentalCoderSDKTaskListResponseSchema.parse(allTasksResponse);
|
||||
const task = allTasks.tasks.find((t) => t.name === taskName);
|
||||
if (!task) {
|
||||
return null;
|
||||
}
|
||||
return task;
|
||||
} catch (error) {
|
||||
if (error instanceof CoderAPIError && error.statusCode === 404) {
|
||||
return null;
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* createTask creates a new task with the given parameters using Coder's experimental Tasks API.
|
||||
*/
|
||||
async createTask(
|
||||
owner: string,
|
||||
params: ExperimentalCoderSDKCreateTaskRequest,
|
||||
): Promise<ExperimentalCoderSDKTask> {
|
||||
const endpoint = `/api/experimental/tasks/${encodeURIComponent(owner)}`;
|
||||
const response = await this.request<unknown>(endpoint, {
|
||||
method: "POST",
|
||||
body: JSON.stringify(params),
|
||||
});
|
||||
return ExperimentalCoderSDKTaskSchema.parse(response);
|
||||
}
|
||||
|
||||
/**
|
||||
* sendTaskInput sends the given input to an existing task via Coder's experimental Tasks API.
|
||||
*/
|
||||
async sendTaskInput(
|
||||
ownerUsername: string,
|
||||
taskName: string,
|
||||
input: string,
|
||||
): Promise<void> {
|
||||
const endpoint = `/api/v2/users/${ownerUsername}/tasks/${taskName}/send`;
|
||||
await this.request<unknown>(endpoint, {
|
||||
method: "POST",
|
||||
body: JSON.stringify({ input }),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// CoderSDKUserSchema is the schema for codersdk.User.
|
||||
export const CoderSDKUserSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
username: z.string(),
|
||||
email: z.string().email(),
|
||||
organization_ids: z.array(z.string().uuid()),
|
||||
github_com_user_id: z.number().optional(),
|
||||
});
|
||||
export type CoderSDKUser = z.infer<typeof CoderSDKUserSchema>;
|
||||
|
||||
// CoderSDKUserListSchema is the schema for codersdk.GetUsersResponse.
|
||||
export const CoderSDKGetUsersResponseSchema = z.object({
|
||||
users: z.array(CoderSDKUserSchema),
|
||||
});
|
||||
export type CoderSDKGetUsersResponse = z.infer<
|
||||
typeof CoderSDKGetUsersResponseSchema
|
||||
>;
|
||||
|
||||
// CoderSDKTemplateSchema is the schema for codersdk.Template.
|
||||
export const CoderSDKTemplateSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
name: z.string(),
|
||||
description: z.string().optional(),
|
||||
organization_id: z.string().uuid(),
|
||||
active_version_id: z.string().uuid(),
|
||||
});
|
||||
export type CoderSDKTemplate = z.infer<typeof CoderSDKTemplateSchema>;
|
||||
|
||||
// CoderSDKTemplateVersionPresetSchema is the schema for codersdk.Preset.
|
||||
export const CoderSDKTemplateVersionPresetSchema = z.object({
|
||||
ID: z.string().uuid(),
|
||||
Name: z.string(),
|
||||
Default: z.boolean(),
|
||||
});
|
||||
export type CoderSDKTemplateVersionPreset = z.infer<
|
||||
typeof CoderSDKTemplateVersionPresetSchema
|
||||
>;
|
||||
|
||||
// CoderSDKTemplateVersionPresetsResponseSchema is the schema for []codersdk.Preset which is returned by the API.
|
||||
export const CoderSDKTemplateVersionPresetsResponseSchema = z.array(
|
||||
CoderSDKTemplateVersionPresetSchema,
|
||||
);
|
||||
export type CoderSDKTemplateVersionPresetsResponse = z.infer<
|
||||
typeof CoderSDKTemplateVersionPresetsResponseSchema
|
||||
>;
|
||||
|
||||
// ExperimentalCoderSDKCreateTaskRequestSchema is the schema for experimental codersdk.CreateTaskRequest.
|
||||
export const ExperimentalCoderSDKCreateTaskRequestSchema = z.object({
|
||||
name: z.string().min(1),
|
||||
template_version_id: z.string().min(1),
|
||||
template_version_preset_id: z.string().min(1).optional(),
|
||||
input: z.string().min(1),
|
||||
});
|
||||
export type ExperimentalCoderSDKCreateTaskRequest = z.infer<
|
||||
typeof ExperimentalCoderSDKCreateTaskRequestSchema
|
||||
>;
|
||||
|
||||
// ExperimentalCoderSDKTaskSchema is the schema for experimental codersdk.Task.
|
||||
export const ExperimentalCoderSDKTaskSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
name: z.string(),
|
||||
owner_id: z.string().uuid(),
|
||||
template_id: z.string().uuid(),
|
||||
created_at: z.string(),
|
||||
updated_at: z.string(),
|
||||
status: z.string(),
|
||||
});
|
||||
export type ExperimentalCoderSDKTask = z.infer<
|
||||
typeof ExperimentalCoderSDKTaskSchema
|
||||
>;
|
||||
|
||||
// ExperimentalCoderSDKTaskListResponseSchema is the schema for Coder's GET /api/experimental/tasks endpoint.
|
||||
// At the time of writing, this type is not exported by github.com/coder/coder/v2/codersdk.
|
||||
export const ExperimentalCoderSDKTaskListResponseSchema = z.object({
|
||||
tasks: z.array(ExperimentalCoderSDKTaskSchema),
|
||||
});
|
||||
export type ExperimentalCoderSDKTaskListResponse = z.infer<
|
||||
typeof ExperimentalCoderSDKTaskListResponseSchema
|
||||
>;
|
||||
|
||||
// CoderAPIError is a custom error class for Coder API errors.
|
||||
export class CoderAPIError extends Error {
|
||||
constructor(
|
||||
message: string,
|
||||
public readonly statusCode: number,
|
||||
public readonly response?: unknown,
|
||||
) {
|
||||
super(message);
|
||||
this.name = "CoderAPIError";
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,67 @@
|
||||
import * as core from "@actions/core";
|
||||
import * as github from "@actions/github";
|
||||
import { CoderTaskAction } from "./action";
|
||||
import { RealCoderClient } from "./coder-client";
|
||||
import { ActionInputsSchema } from "./schemas";
|
||||
|
||||
async function main() {
|
||||
try {
|
||||
// Parse and validate inputs
|
||||
const inputs = ActionInputsSchema.parse({
|
||||
coderUrl: core.getInput("coder-url", { required: true }),
|
||||
coderToken: core.getInput("coder-token", { required: true }),
|
||||
templateName: core.getInput("template-name", { required: true }),
|
||||
taskPrompt: core.getInput("task-prompt", { required: true }),
|
||||
githubUserId: core.getInput("github-user-id")
|
||||
? Number.parseInt(core.getInput("github-user-id"), 10)
|
||||
: undefined,
|
||||
githubUsername: core.getInput("github-username") || undefined,
|
||||
templatePreset: core.getInput("template-preset") || "Default",
|
||||
taskNamePrefix: core.getInput("task-name-prefix") || "task",
|
||||
taskName: core.getInput("task-name") || undefined,
|
||||
organization: core.getInput("organization") || "coder",
|
||||
issueUrl: core.getInput("issue-url") || undefined,
|
||||
commentOnIssue: core.getBooleanInput("comment-on-issue") !== false,
|
||||
coderWebUrl: core.getInput("coder-web-url") || undefined,
|
||||
githubToken: core.getInput("github-token", { required: true }),
|
||||
});
|
||||
|
||||
core.debug("Inputs validated successfully");
|
||||
core.debug(`Coder URL: ${inputs.coderURL}`);
|
||||
core.debug(`Template: ${inputs.coderTemplateName}`);
|
||||
core.debug(`Organization: ${inputs.coderOrganization}`);
|
||||
|
||||
// Initialize clients
|
||||
const coder = new RealCoderClient(inputs.coderURL, inputs.coderToken);
|
||||
const octokit = github.getOctokit(inputs.githubToken);
|
||||
|
||||
core.debug("Clients initialized");
|
||||
|
||||
// Execute action
|
||||
const action = new CoderTaskAction(coder, octokit, inputs);
|
||||
const outputs = await action.run();
|
||||
|
||||
// Set outputs
|
||||
core.setOutput("coder-username", outputs.coderUsername);
|
||||
core.setOutput("task-name", outputs.taskName);
|
||||
core.setOutput("task-url", outputs.taskUrl);
|
||||
core.setOutput("task-exists", outputs.taskCreated.toString());
|
||||
|
||||
core.debug("Action completed successfully");
|
||||
core.debug(`Outputs: ${JSON.stringify(outputs, null, 2)}`);
|
||||
} catch (error) {
|
||||
if (error instanceof Error) {
|
||||
core.setFailed(error.message);
|
||||
console.error("Action failed:", error);
|
||||
if (error.stack) {
|
||||
console.error("Stack trace:", error.stack);
|
||||
}
|
||||
} else {
|
||||
core.setFailed("Unknown error occurred");
|
||||
console.error("Unknown error:", error);
|
||||
}
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
main();
|
||||
@@ -0,0 +1,96 @@
|
||||
import { describe, expect, test } from "bun:test";
|
||||
import { ActionInputs, ActionInputsSchema } from "./schemas";
|
||||
|
||||
const actionInputValid: ActionInputs = {
|
||||
coderURL: "https://coder.test",
|
||||
coderToken: "test-token",
|
||||
coderOrganization: "my-org",
|
||||
coderTaskNamePrefix: "gh",
|
||||
coderTaskPrompt: "test prompt",
|
||||
coderTemplateName: "test-template",
|
||||
githubIssueURL: "https://github.com/owner/repo/issues/123",
|
||||
githubToken: "github-token",
|
||||
githubUserID: 12345,
|
||||
coderTemplatePreset: "",
|
||||
};
|
||||
|
||||
describe("ActionInputsSchema", () => {
|
||||
describe("Valid Input Cases", () => {
|
||||
test("accepts minimal required inputs and sets default values correctly", () => {
|
||||
const result = ActionInputsSchema.parse(actionInputValid);
|
||||
expect(result.coderURL).toBe(actionInputValid.coderURL);
|
||||
expect(result.coderToken).toBe(actionInputValid.coderToken);
|
||||
expect(result.coderOrganization).toBe(actionInputValid.coderOrganization);
|
||||
expect(result.coderTaskNamePrefix).toBe(
|
||||
actionInputValid.coderTaskNamePrefix,
|
||||
);
|
||||
expect(result.coderTaskPrompt).toBe(actionInputValid.coderTaskPrompt);
|
||||
expect(result.coderTemplateName).toBe(actionInputValid.coderTemplateName);
|
||||
expect(result.githubIssueURL).toBe(actionInputValid.githubIssueURL);
|
||||
expect(result.githubToken).toBe(actionInputValid.githubToken);
|
||||
expect(result.githubUserID).toBe(actionInputValid.githubUserID);
|
||||
expect(result.coderTemplatePreset).toBeEmpty();
|
||||
});
|
||||
|
||||
test("accepts all optional inputs", () => {
|
||||
const input: ActionInputs = {
|
||||
...actionInputValid,
|
||||
coderTemplatePreset: "custom",
|
||||
};
|
||||
const result = ActionInputsSchema.parse(input);
|
||||
expect(result.coderTemplatePreset).toBe(input.coderTemplatePreset);
|
||||
});
|
||||
|
||||
test("accepts valid URL formats", () => {
|
||||
const validUrls = [
|
||||
"https://coder.test",
|
||||
"https://coder.example.com:8080",
|
||||
"http://12.34.56.78",
|
||||
"https://12.34.56.78:9000",
|
||||
"http://localhost:3000",
|
||||
"http://127.0.0.1:3000",
|
||||
"http://[::1]:3000",
|
||||
];
|
||||
|
||||
for (const url of validUrls) {
|
||||
const input: ActionInputs = {
|
||||
...actionInputValid,
|
||||
coderURL: url,
|
||||
};
|
||||
const result = ActionInputsSchema.parse(input);
|
||||
expect(result.coderURL).toBe(url);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
describe("Invalid Input Cases", () => {
|
||||
test("rejects missing required fields", () => {
|
||||
const input = {} as ActionInputs;
|
||||
expect(() => ActionInputsSchema.parse(input)).toThrow();
|
||||
});
|
||||
|
||||
test("rejects invalid URL format for coderUrl", () => {
|
||||
const input: ActionInputs = {
|
||||
...actionInputValid,
|
||||
coderURL: "not-a-url",
|
||||
};
|
||||
expect(() => ActionInputsSchema.parse(input)).toThrow();
|
||||
});
|
||||
|
||||
test("rejects invalid URL format for issueUrl", () => {
|
||||
const input: ActionInputs = {
|
||||
...actionInputValid,
|
||||
githubIssueURL: "not-a-url",
|
||||
};
|
||||
expect(() => ActionInputsSchema.parse(input)).toThrow();
|
||||
});
|
||||
|
||||
test("rejects empty strings for required fields", () => {
|
||||
const input: ActionInputs = {
|
||||
...actionInputValid,
|
||||
coderToken: "",
|
||||
};
|
||||
expect(() => ActionInputsSchema.parse(input)).toThrow();
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,27 @@
|
||||
import { z } from "zod";
|
||||
|
||||
export type ActionInputs = z.infer<typeof ActionInputsSchema>;
|
||||
|
||||
export const ActionInputsSchema = z.object({
|
||||
// Required
|
||||
coderTaskPrompt: z.string().min(1),
|
||||
coderToken: z.string().min(1),
|
||||
coderURL: z.string().url(),
|
||||
coderOrganization: z.string().min(1),
|
||||
coderTaskNamePrefix: z.string().min(1),
|
||||
coderTemplateName: z.string().min(1),
|
||||
githubIssueURL: z.string().url(),
|
||||
githubToken: z.string(),
|
||||
githubUserID: z.number().min(1),
|
||||
// Optional
|
||||
coderTemplatePreset: z.string().optional(),
|
||||
});
|
||||
|
||||
export const ActionOutputsSchema = z.object({
|
||||
coderUsername: z.string(),
|
||||
taskName: z.string(),
|
||||
taskUrl: z.string().url(),
|
||||
taskCreated: z.boolean(),
|
||||
});
|
||||
|
||||
export type ActionOutputs = z.infer<typeof ActionOutputsSchema>;
|
||||
@@ -0,0 +1,207 @@
|
||||
import { mock } from "bun:test";
|
||||
import { CoderClient } from "./coder-client";
|
||||
import type {
|
||||
CoderSDKUser,
|
||||
CoderSDKGetUsersResponse,
|
||||
CoderSDKTemplate,
|
||||
CoderSDKTemplateVersionPreset,
|
||||
ExperimentalCoderSDKTask,
|
||||
ExperimentalCoderSDKTaskListResponse,
|
||||
ExperimentalCoderSDKCreateTaskRequest,
|
||||
} from "./coder-client";
|
||||
import type { ActionInputs } from "./schemas";
|
||||
|
||||
/**
|
||||
* Mock data for tests
|
||||
*/
|
||||
export const mockUser: CoderSDKUser = {
|
||||
id: "550e8400-e29b-41d4-a716-446655440000",
|
||||
username: "testuser",
|
||||
email: "test@example.com",
|
||||
organization_ids: ["660e8400-e29b-41d4-a716-446655440000"],
|
||||
github_com_user_id: 12345,
|
||||
};
|
||||
|
||||
export const mockUserList: CoderSDKGetUsersResponse = {
|
||||
users: [mockUser],
|
||||
};
|
||||
|
||||
export const mockUserListEmpty: CoderSDKGetUsersResponse = {
|
||||
users: [],
|
||||
};
|
||||
|
||||
export const mockUserListDuplicate: CoderSDKGetUsersResponse = {
|
||||
users: [
|
||||
mockUser,
|
||||
{
|
||||
...mockUser,
|
||||
id: "660e8400-e29b-41d4-a716-446655440001",
|
||||
username: "testuser2",
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
export const mockTemplate: CoderSDKTemplate = {
|
||||
id: "770e8400-e29b-41d4-a716-446655440000",
|
||||
name: "my-template",
|
||||
description: "AI triage template",
|
||||
organization_id: "660e8400-e29b-41d4-a716-446655440000",
|
||||
active_version_id: "880e8400-e29b-41d4-a716-446655440000",
|
||||
};
|
||||
|
||||
export const mockTemplateVersionPreset: CoderSDKTemplateVersionPreset = {
|
||||
ID: "880e8400-e29b-41d4-a716-446655440000",
|
||||
Name: "default-preset",
|
||||
Default: true,
|
||||
};
|
||||
|
||||
export const mockTemplateVersionPreset2: CoderSDKTemplateVersionPreset = {
|
||||
ID: "990e8400-e29b-41d4-a716-446655440000",
|
||||
Name: "another-preset",
|
||||
Default: false,
|
||||
};
|
||||
|
||||
export const mockTemplateVersionPresets = [
|
||||
mockTemplateVersionPreset,
|
||||
mockTemplateVersionPreset2,
|
||||
];
|
||||
|
||||
export const mockTask: ExperimentalCoderSDKTask = {
|
||||
id: "990e8400-e29b-41d4-a716-446655440000",
|
||||
name: "task-123",
|
||||
owner_id: "550e8400-e29b-41d4-a716-446655440000",
|
||||
template_id: "770e8400-e29b-41d4-a716-446655440000",
|
||||
created_at: "2024-01-01T00:00:00Z",
|
||||
updated_at: "2024-01-01T00:00:00Z",
|
||||
status: "running",
|
||||
};
|
||||
|
||||
export const mockTaskList: ExperimentalCoderSDKTaskListResponse = {
|
||||
tasks: [mockTask],
|
||||
};
|
||||
|
||||
export const mockTaskListEmpty: ExperimentalCoderSDKTaskListResponse = {
|
||||
tasks: [],
|
||||
};
|
||||
|
||||
/**
|
||||
* Create mock ActionInputs with defaults
|
||||
*/
|
||||
export function createMockInputs(
|
||||
overrides?: Partial<ActionInputs>,
|
||||
): ActionInputs {
|
||||
return {
|
||||
coderTaskPrompt: "Test prompt",
|
||||
coderToken: "test-token",
|
||||
coderURL: "https://coder.test",
|
||||
coderOrganization: "coder",
|
||||
coderTaskNamePrefix: "task",
|
||||
coderTemplateName: "my-template",
|
||||
githubToken: "github-token",
|
||||
githubIssueURL: "https://github.com/test-org/test-repo/issues/12345",
|
||||
githubUserID: 12345,
|
||||
...overrides,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Mock CoderClient for testing
|
||||
*/
|
||||
export class MockCoderClient implements CoderClient {
|
||||
private readonly headers: Record<string, string>;
|
||||
public mockGetCoderUserByGithubID = mock();
|
||||
public mockGetTemplateByOrganizationAndName = mock();
|
||||
public mockGetTemplateVersionPresets = mock();
|
||||
public mockGetTask = mock();
|
||||
public mockCreateTask = mock();
|
||||
public mockSendTaskInput = mock();
|
||||
|
||||
constructor() // private readonly serverURL: string,
|
||||
// apiToken: string,
|
||||
{
|
||||
this.headers = {};
|
||||
}
|
||||
|
||||
async getCoderUserByGitHubId(githubUserId: number): Promise<CoderSDKUser> {
|
||||
return this.mockGetCoderUserByGithubID(githubUserId);
|
||||
}
|
||||
|
||||
async getTemplateByOrganizationAndName(
|
||||
organization: string,
|
||||
templateName: string,
|
||||
): Promise<CoderSDKTemplate> {
|
||||
return this.mockGetTemplateByOrganizationAndName(
|
||||
organization,
|
||||
templateName,
|
||||
);
|
||||
}
|
||||
|
||||
async getTemplateVersionPresets(
|
||||
templateVersionId: string,
|
||||
): Promise<CoderSDKTemplateVersionPreset[]> {
|
||||
return this.mockGetTemplateVersionPresets(templateVersionId);
|
||||
}
|
||||
|
||||
async getTask(
|
||||
username: string,
|
||||
taskName: string,
|
||||
): Promise<ExperimentalCoderSDKTask | null> {
|
||||
return this.mockGetTask(username, taskName);
|
||||
}
|
||||
|
||||
async createTask(
|
||||
username: string,
|
||||
params: ExperimentalCoderSDKCreateTaskRequest,
|
||||
): Promise<ExperimentalCoderSDKTask> {
|
||||
return this.mockCreateTask(username, params);
|
||||
}
|
||||
|
||||
async sendTaskInput(
|
||||
username: string,
|
||||
taskName: string,
|
||||
input: string,
|
||||
): Promise<void> {
|
||||
return this.mockSendTaskInput(username, taskName, input);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Mock Octokit for testing
|
||||
*/
|
||||
export function createMockOctokit() {
|
||||
return {
|
||||
rest: {
|
||||
users: {
|
||||
getByUsername: mock(),
|
||||
},
|
||||
issues: {
|
||||
listComments: mock(),
|
||||
createComment: mock(),
|
||||
updateComment: mock(),
|
||||
},
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Mock fetch for testing
|
||||
*/
|
||||
export function createMockFetch() {
|
||||
return mock();
|
||||
}
|
||||
|
||||
/**
|
||||
* Create mock fetch response
|
||||
*/
|
||||
export function createMockResponse(
|
||||
body: unknown,
|
||||
options: { ok?: boolean; status?: number; statusText?: string } = {},
|
||||
) {
|
||||
return {
|
||||
ok: options.ok ?? true,
|
||||
status: options.status ?? 200,
|
||||
statusText: options.statusText ?? "OK",
|
||||
json: async () => body,
|
||||
text: async () => JSON.stringify(body),
|
||||
};
|
||||
}
|
||||
@@ -0,0 +1,17 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"target": "ES2022",
|
||||
"module": "ESNext",
|
||||
"moduleResolution": "bundler",
|
||||
"lib": ["ES2022"],
|
||||
"strict": true,
|
||||
"esModuleInterop": true,
|
||||
"skipLibCheck": true,
|
||||
"forceConsistentCasingInFileNames": true,
|
||||
"resolveJsonModule": true,
|
||||
"isolatedModules": true,
|
||||
"noEmit": true
|
||||
},
|
||||
"include": ["src/**/*"],
|
||||
"exclude": ["node_modules", "dist"]
|
||||
}
|
||||
@@ -80,6 +80,9 @@ updates:
|
||||
mui:
|
||||
patterns:
|
||||
- "@mui*"
|
||||
radix:
|
||||
patterns:
|
||||
- "@radix-ui/*"
|
||||
react:
|
||||
patterns:
|
||||
- "react"
|
||||
@@ -104,6 +107,7 @@ updates:
|
||||
- dependency-name: "*"
|
||||
update-types:
|
||||
- version-update:semver-major
|
||||
- dependency-name: "@playwright/test"
|
||||
open-pull-requests-limit: 15
|
||||
|
||||
- package-ecosystem: "terraform"
|
||||
|
||||
+39
-115
@@ -4,6 +4,7 @@ on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- release/*
|
||||
|
||||
pull_request:
|
||||
workflow_dispatch:
|
||||
@@ -919,6 +920,7 @@ jobs:
|
||||
required:
|
||||
runs-on: ubuntu-latest
|
||||
needs:
|
||||
- changes
|
||||
- fmt
|
||||
- lint
|
||||
- gen
|
||||
@@ -942,6 +944,7 @@ jobs:
|
||||
- name: Ensure required checks
|
||||
run: | # zizmor: ignore[template-injection] We're just reading needs.x.result here, no risk of injection
|
||||
echo "Checking required checks"
|
||||
echo "- changes: ${{ needs.changes.result }}"
|
||||
echo "- fmt: ${{ needs.fmt.result }}"
|
||||
echo "- lint: ${{ needs.lint.result }}"
|
||||
echo "- gen: ${{ needs.gen.result }}"
|
||||
@@ -967,7 +970,7 @@ jobs:
|
||||
needs: changes
|
||||
# We always build the dylibs on Go changes to verify we're not merging unbuildable code,
|
||||
# but they need only be signed and uploaded on coder/coder main.
|
||||
if: needs.changes.outputs.go == 'true' || needs.changes.outputs.ci == 'true' || github.ref == 'refs/heads/main'
|
||||
if: needs.changes.outputs.go == 'true' || needs.changes.outputs.ci == 'true' || github.ref == 'refs/heads/main' || startsWith(github.ref, 'refs/heads/release/')
|
||||
runs-on: ${{ github.repository_owner == 'coder' && 'depot-macos-latest' || 'macos-latest' }}
|
||||
steps:
|
||||
# Harden Runner doesn't work on macOS
|
||||
@@ -995,7 +998,7 @@ jobs:
|
||||
uses: ./.github/actions/setup-go
|
||||
|
||||
- name: Install rcodesign
|
||||
if: ${{ github.repository_owner == 'coder' && github.ref == 'refs/heads/main' }}
|
||||
if: ${{ github.repository_owner == 'coder' && (github.ref == 'refs/heads/main' || startsWith(github.ref, 'refs/heads/release/')) }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
wget -O /tmp/rcodesign.tar.gz https://github.com/indygreg/apple-platform-rs/releases/download/apple-codesign%2F0.22.0/apple-codesign-0.22.0-macos-universal.tar.gz
|
||||
@@ -1006,7 +1009,7 @@ jobs:
|
||||
rm /tmp/rcodesign.tar.gz
|
||||
|
||||
- name: Setup Apple Developer certificate and API key
|
||||
if: ${{ github.repository_owner == 'coder' && github.ref == 'refs/heads/main' }}
|
||||
if: ${{ github.repository_owner == 'coder' && (github.ref == 'refs/heads/main' || startsWith(github.ref, 'refs/heads/release/')) }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
touch /tmp/{apple_cert.p12,apple_cert_password.txt,apple_apikey.p8}
|
||||
@@ -1027,12 +1030,12 @@ jobs:
|
||||
make gen/mark-fresh
|
||||
make build/coder-dylib
|
||||
env:
|
||||
CODER_SIGN_DARWIN: ${{ github.ref == 'refs/heads/main' && '1' || '0' }}
|
||||
CODER_SIGN_DARWIN: ${{ (github.ref == 'refs/heads/main' || startsWith(github.ref, 'refs/heads/release/')) && '1' || '0' }}
|
||||
AC_CERTIFICATE_FILE: /tmp/apple_cert.p12
|
||||
AC_CERTIFICATE_PASSWORD_FILE: /tmp/apple_cert_password.txt
|
||||
|
||||
- name: Upload build artifacts
|
||||
if: ${{ github.repository_owner == 'coder' && github.ref == 'refs/heads/main' }}
|
||||
if: ${{ github.repository_owner == 'coder' && (github.ref == 'refs/heads/main' || startsWith(github.ref, 'refs/heads/release/')) }}
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: dylibs
|
||||
@@ -1042,7 +1045,7 @@ jobs:
|
||||
retention-days: 7
|
||||
|
||||
- name: Delete Apple Developer certificate and API key
|
||||
if: ${{ github.repository_owner == 'coder' && github.ref == 'refs/heads/main' }}
|
||||
if: ${{ github.repository_owner == 'coder' && (github.ref == 'refs/heads/main' || startsWith(github.ref, 'refs/heads/release/')) }}
|
||||
run: rm -f /tmp/{apple_cert.p12,apple_cert_password.txt,apple_apikey.p8}
|
||||
|
||||
check-build:
|
||||
@@ -1092,7 +1095,7 @@ jobs:
|
||||
needs:
|
||||
- changes
|
||||
- build-dylib
|
||||
if: github.ref == 'refs/heads/main' && needs.changes.outputs.docs-only == 'false' && !github.event.pull_request.head.repo.fork
|
||||
if: (github.ref == 'refs/heads/main' || startsWith(github.ref, 'refs/heads/release/')) && needs.changes.outputs.docs-only == 'false' && !github.event.pull_request.head.repo.fork
|
||||
runs-on: ${{ github.repository_owner == 'coder' && 'depot-ubuntu-22.04-8' || 'ubuntu-22.04' }}
|
||||
permissions:
|
||||
# Necessary to push docker images to ghcr.io.
|
||||
@@ -1245,40 +1248,45 @@ jobs:
|
||||
id: build-docker
|
||||
env:
|
||||
CODER_IMAGE_BASE: ghcr.io/coder/coder-preview
|
||||
CODER_IMAGE_TAG_PREFIX: main
|
||||
DOCKER_CLI_EXPERIMENTAL: "enabled"
|
||||
run: |
|
||||
set -euxo pipefail
|
||||
|
||||
# build Docker images for each architecture
|
||||
version="$(./scripts/version.sh)"
|
||||
tag="main-${version//+/-}"
|
||||
tag="${version//+/-}"
|
||||
echo "tag=$tag" >> "$GITHUB_OUTPUT"
|
||||
|
||||
# build images for each architecture
|
||||
# note: omitting the -j argument to avoid race conditions when pushing
|
||||
make build/coder_"$version"_linux_{amd64,arm64,armv7}.tag
|
||||
|
||||
# only push if we are on main branch
|
||||
if [ "${GITHUB_REF}" == "refs/heads/main" ]; then
|
||||
# only push if we are on main branch or release branch
|
||||
if [[ "${GITHUB_REF}" == "refs/heads/main" || "${GITHUB_REF}" == refs/heads/release/* ]]; then
|
||||
# build and push multi-arch manifest, this depends on the other images
|
||||
# being pushed so will automatically push them
|
||||
# note: omitting the -j argument to avoid race conditions when pushing
|
||||
make push/build/coder_"$version"_linux_{amd64,arm64,armv7}.tag
|
||||
|
||||
# Define specific tags
|
||||
tags=("$tag" "main" "latest")
|
||||
tags=("$tag")
|
||||
if [ "${GITHUB_REF}" == "refs/heads/main" ]; then
|
||||
tags+=("main" "latest")
|
||||
elif [[ "${GITHUB_REF}" == refs/heads/release/* ]]; then
|
||||
tags+=("release-${GITHUB_REF#refs/heads/release/}")
|
||||
fi
|
||||
|
||||
# Create and push a multi-arch manifest for each tag
|
||||
# we are adding `latest` tag and keeping `main` for backward
|
||||
# compatibality
|
||||
for t in "${tags[@]}"; do
|
||||
# shellcheck disable=SC2046
|
||||
./scripts/build_docker_multiarch.sh \
|
||||
--push \
|
||||
--target "ghcr.io/coder/coder-preview:$t" \
|
||||
--version "$version" \
|
||||
$(cat build/coder_"$version"_linux_{amd64,arm64,armv7}.tag)
|
||||
echo "Pushing multi-arch manifest for tag: $t"
|
||||
# shellcheck disable=SC2046
|
||||
./scripts/build_docker_multiarch.sh \
|
||||
--push \
|
||||
--target "ghcr.io/coder/coder-preview:$t" \
|
||||
--version "$version" \
|
||||
$(cat build/coder_"$version"_linux_{amd64,arm64,armv7}.tag)
|
||||
done
|
||||
fi
|
||||
|
||||
@@ -1469,112 +1477,28 @@ jobs:
|
||||
./build/*.deb
|
||||
retention-days: 7
|
||||
|
||||
# Deploy is handled in deploy.yaml so we can apply concurrency limits.
|
||||
deploy:
|
||||
name: "deploy"
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 30
|
||||
needs:
|
||||
- changes
|
||||
- build
|
||||
if: |
|
||||
github.ref == 'refs/heads/main' && !github.event.pull_request.head.repo.fork
|
||||
(github.ref == 'refs/heads/main' || startsWith(github.ref, 'refs/heads/release/'))
|
||||
&& needs.changes.outputs.docs-only == 'false'
|
||||
&& !github.event.pull_request.head.repo.fork
|
||||
uses: ./.github/workflows/deploy.yaml
|
||||
with:
|
||||
image: ${{ needs.build.outputs.IMAGE }}
|
||||
permissions:
|
||||
contents: read
|
||||
id-token: write
|
||||
steps:
|
||||
- name: Harden Runner
|
||||
uses: step-security/harden-runner@f4a75cfd619ee5ce8d5b864b0d183aff3c69b55a # v2.13.1
|
||||
with:
|
||||
egress-policy: audit
|
||||
|
||||
- name: Checkout
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
with:
|
||||
fetch-depth: 0
|
||||
persist-credentials: false
|
||||
|
||||
- name: Authenticate to Google Cloud
|
||||
uses: google-github-actions/auth@7c6bc770dae815cd3e89ee6cdf493a5fab2cc093 # v3.0.0
|
||||
with:
|
||||
workload_identity_provider: ${{ vars.GCP_WORKLOAD_ID_PROVIDER }}
|
||||
service_account: ${{ vars.GCP_SERVICE_ACCOUNT }}
|
||||
|
||||
- name: Set up Google Cloud SDK
|
||||
uses: google-github-actions/setup-gcloud@aa5489c8933f4cc7a4f7d45035b3b1440c9c10db # v3.0.1
|
||||
|
||||
- name: Set up Flux CLI
|
||||
uses: fluxcd/flux2/action@6bf37f6a560fd84982d67f853162e4b3c2235edb # v2.6.4
|
||||
with:
|
||||
# Keep this and the github action up to date with the version of flux installed in dogfood cluster
|
||||
version: "2.5.1"
|
||||
|
||||
- name: Get Cluster Credentials
|
||||
uses: google-github-actions/get-gke-credentials@3da1e46a907576cefaa90c484278bb5b259dd395 # v3.0.0
|
||||
with:
|
||||
cluster_name: dogfood-v2
|
||||
location: us-central1-a
|
||||
project_id: coder-dogfood-v2
|
||||
|
||||
- name: Reconcile Flux
|
||||
run: |
|
||||
set -euxo pipefail
|
||||
flux --namespace flux-system reconcile source git flux-system
|
||||
flux --namespace flux-system reconcile source git coder-main
|
||||
flux --namespace flux-system reconcile kustomization flux-system
|
||||
flux --namespace flux-system reconcile kustomization coder
|
||||
flux --namespace flux-system reconcile source chart coder-coder
|
||||
flux --namespace flux-system reconcile source chart coder-coder-provisioner
|
||||
flux --namespace coder reconcile helmrelease coder
|
||||
flux --namespace coder reconcile helmrelease coder-provisioner
|
||||
|
||||
# Just updating Flux is usually not enough. The Helm release may get
|
||||
# redeployed, but unless something causes the Deployment to update the
|
||||
# pods won't be recreated. It's important that the pods get recreated,
|
||||
# since we use `imagePullPolicy: Always` to ensure we're running the
|
||||
# latest image.
|
||||
- name: Rollout Deployment
|
||||
run: |
|
||||
set -euxo pipefail
|
||||
kubectl --namespace coder rollout restart deployment/coder
|
||||
kubectl --namespace coder rollout status deployment/coder
|
||||
kubectl --namespace coder rollout restart deployment/coder-provisioner
|
||||
kubectl --namespace coder rollout status deployment/coder-provisioner
|
||||
kubectl --namespace coder rollout restart deployment/coder-provisioner-tagged
|
||||
kubectl --namespace coder rollout status deployment/coder-provisioner-tagged
|
||||
|
||||
deploy-wsproxies:
|
||||
runs-on: ubuntu-latest
|
||||
needs: build
|
||||
if: github.ref == 'refs/heads/main' && !github.event.pull_request.head.repo.fork
|
||||
steps:
|
||||
- name: Harden Runner
|
||||
uses: step-security/harden-runner@f4a75cfd619ee5ce8d5b864b0d183aff3c69b55a # v2.13.1
|
||||
with:
|
||||
egress-policy: audit
|
||||
|
||||
- name: Checkout
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
with:
|
||||
fetch-depth: 0
|
||||
persist-credentials: false
|
||||
|
||||
- name: Setup flyctl
|
||||
uses: superfly/flyctl-actions/setup-flyctl@fc53c09e1bc3be6f54706524e3b82c4f462f77be # v1.5
|
||||
|
||||
- name: Deploy workspace proxies
|
||||
run: |
|
||||
flyctl deploy --image "$IMAGE" --app paris-coder --config ./.github/fly-wsproxies/paris-coder.toml --env "CODER_PROXY_SESSION_TOKEN=$TOKEN_PARIS" --yes
|
||||
flyctl deploy --image "$IMAGE" --app sydney-coder --config ./.github/fly-wsproxies/sydney-coder.toml --env "CODER_PROXY_SESSION_TOKEN=$TOKEN_SYDNEY" --yes
|
||||
flyctl deploy --image "$IMAGE" --app sao-paulo-coder --config ./.github/fly-wsproxies/sao-paulo-coder.toml --env "CODER_PROXY_SESSION_TOKEN=$TOKEN_SAO_PAULO" --yes
|
||||
flyctl deploy --image "$IMAGE" --app jnb-coder --config ./.github/fly-wsproxies/jnb-coder.toml --env "CODER_PROXY_SESSION_TOKEN=$TOKEN_JNB" --yes
|
||||
env:
|
||||
FLY_API_TOKEN: ${{ secrets.FLY_API_TOKEN }}
|
||||
IMAGE: ${{ needs.build.outputs.IMAGE }}
|
||||
TOKEN_PARIS: ${{ secrets.FLY_PARIS_CODER_PROXY_SESSION_TOKEN }}
|
||||
TOKEN_SYDNEY: ${{ secrets.FLY_SYDNEY_CODER_PROXY_SESSION_TOKEN }}
|
||||
TOKEN_SAO_PAULO: ${{ secrets.FLY_SAO_PAULO_CODER_PROXY_SESSION_TOKEN }}
|
||||
TOKEN_JNB: ${{ secrets.FLY_JNB_CODER_PROXY_SESSION_TOKEN }}
|
||||
packages: write # to retag image as dogfood
|
||||
secrets:
|
||||
FLY_API_TOKEN: ${{ secrets.FLY_API_TOKEN }}
|
||||
FLY_PARIS_CODER_PROXY_SESSION_TOKEN: ${{ secrets.FLY_PARIS_CODER_PROXY_SESSION_TOKEN }}
|
||||
FLY_SYDNEY_CODER_PROXY_SESSION_TOKEN: ${{ secrets.FLY_SYDNEY_CODER_PROXY_SESSION_TOKEN }}
|
||||
FLY_SAO_PAULO_CODER_PROXY_SESSION_TOKEN: ${{ secrets.FLY_SAO_PAULO_CODER_PROXY_SESSION_TOKEN }}
|
||||
FLY_JNB_CODER_PROXY_SESSION_TOKEN: ${{ secrets.FLY_JNB_CODER_PROXY_SESSION_TOKEN }}
|
||||
|
||||
# sqlc-vet runs a postgres docker container, runs Coder migrations, and then
|
||||
# runs sqlc-vet to ensure all queries are valid. This catches any mistakes
|
||||
|
||||
@@ -0,0 +1,174 @@
|
||||
name: deploy
|
||||
|
||||
on:
|
||||
# Via workflow_call, called from ci.yaml
|
||||
workflow_call:
|
||||
inputs:
|
||||
image:
|
||||
description: "Image and tag to potentially deploy. Current branch will be validated against should-deploy check."
|
||||
required: true
|
||||
type: string
|
||||
secrets:
|
||||
FLY_API_TOKEN:
|
||||
required: true
|
||||
FLY_PARIS_CODER_PROXY_SESSION_TOKEN:
|
||||
required: true
|
||||
FLY_SYDNEY_CODER_PROXY_SESSION_TOKEN:
|
||||
required: true
|
||||
FLY_SAO_PAULO_CODER_PROXY_SESSION_TOKEN:
|
||||
required: true
|
||||
FLY_JNB_CODER_PROXY_SESSION_TOKEN:
|
||||
required: true
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }} # no per-branch concurrency
|
||||
cancel-in-progress: false
|
||||
|
||||
jobs:
|
||||
# Determines if the given branch should be deployed to dogfood.
|
||||
should-deploy:
|
||||
name: should-deploy
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
verdict: ${{ steps.check.outputs.verdict }} # DEPLOY or NOOP
|
||||
steps:
|
||||
- name: Harden Runner
|
||||
uses: step-security/harden-runner@f4a75cfd619ee5ce8d5b864b0d183aff3c69b55a # v2.13.1
|
||||
with:
|
||||
egress-policy: audit
|
||||
|
||||
- name: Checkout
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
with:
|
||||
fetch-depth: 0
|
||||
persist-credentials: false
|
||||
|
||||
- name: Check if deploy is enabled
|
||||
id: check
|
||||
run: |
|
||||
set -euo pipefail
|
||||
verdict="$(./scripts/should_deploy.sh)"
|
||||
echo "verdict=$verdict" >> "$GITHUB_OUTPUT"
|
||||
|
||||
deploy:
|
||||
name: "deploy"
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 30
|
||||
needs: should-deploy
|
||||
if: needs.should-deploy.outputs.verdict == 'DEPLOY'
|
||||
permissions:
|
||||
contents: read
|
||||
id-token: write
|
||||
packages: write # to retag image as dogfood
|
||||
steps:
|
||||
- name: Harden Runner
|
||||
uses: step-security/harden-runner@f4a75cfd619ee5ce8d5b864b0d183aff3c69b55a # v2.13.1
|
||||
with:
|
||||
egress-policy: audit
|
||||
|
||||
- name: Checkout
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
with:
|
||||
fetch-depth: 0
|
||||
persist-credentials: false
|
||||
|
||||
- name: GHCR Login
|
||||
uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Authenticate to Google Cloud
|
||||
uses: google-github-actions/auth@7c6bc770dae815cd3e89ee6cdf493a5fab2cc093 # v3.0.0
|
||||
with:
|
||||
workload_identity_provider: ${{ vars.GCP_WORKLOAD_ID_PROVIDER }}
|
||||
service_account: ${{ vars.GCP_SERVICE_ACCOUNT }}
|
||||
|
||||
- name: Set up Google Cloud SDK
|
||||
uses: google-github-actions/setup-gcloud@aa5489c8933f4cc7a4f7d45035b3b1440c9c10db # v3.0.1
|
||||
|
||||
- name: Set up Flux CLI
|
||||
uses: fluxcd/flux2/action@6bf37f6a560fd84982d67f853162e4b3c2235edb # v2.6.4
|
||||
with:
|
||||
# Keep this and the github action up to date with the version of flux installed in dogfood cluster
|
||||
version: "2.7.0"
|
||||
|
||||
- name: Get Cluster Credentials
|
||||
uses: google-github-actions/get-gke-credentials@3da1e46a907576cefaa90c484278bb5b259dd395 # v3.0.0
|
||||
with:
|
||||
cluster_name: dogfood-v2
|
||||
location: us-central1-a
|
||||
project_id: coder-dogfood-v2
|
||||
|
||||
# Retag image as dogfood while maintaining the multi-arch manifest
|
||||
- name: Tag image as dogfood
|
||||
run: docker buildx imagetools create --tag "ghcr.io/coder/coder-preview:dogfood" "$IMAGE"
|
||||
env:
|
||||
IMAGE: ${{ inputs.image }}
|
||||
|
||||
- name: Reconcile Flux
|
||||
run: |
|
||||
set -euxo pipefail
|
||||
flux --namespace flux-system reconcile source git flux-system
|
||||
flux --namespace flux-system reconcile source git coder-main
|
||||
flux --namespace flux-system reconcile kustomization flux-system
|
||||
flux --namespace flux-system reconcile kustomization coder
|
||||
flux --namespace flux-system reconcile source chart coder-coder
|
||||
flux --namespace flux-system reconcile source chart coder-coder-provisioner
|
||||
flux --namespace coder reconcile helmrelease coder
|
||||
flux --namespace coder reconcile helmrelease coder-provisioner
|
||||
flux --namespace coder reconcile helmrelease coder-provisioner-tagged
|
||||
flux --namespace coder reconcile helmrelease coder-provisioner-tagged-prebuilds
|
||||
|
||||
# Just updating Flux is usually not enough. The Helm release may get
|
||||
# redeployed, but unless something causes the Deployment to update the
|
||||
# pods won't be recreated. It's important that the pods get recreated,
|
||||
# since we use `imagePullPolicy: Always` to ensure we're running the
|
||||
# latest image.
|
||||
- name: Rollout Deployment
|
||||
run: |
|
||||
set -euxo pipefail
|
||||
kubectl --namespace coder rollout restart deployment/coder
|
||||
kubectl --namespace coder rollout status deployment/coder
|
||||
kubectl --namespace coder rollout restart deployment/coder-provisioner
|
||||
kubectl --namespace coder rollout status deployment/coder-provisioner
|
||||
kubectl --namespace coder rollout restart deployment/coder-provisioner-tagged
|
||||
kubectl --namespace coder rollout status deployment/coder-provisioner-tagged
|
||||
kubectl --namespace coder rollout restart deployment/coder-provisioner-tagged-prebuilds
|
||||
kubectl --namespace coder rollout status deployment/coder-provisioner-tagged-prebuilds
|
||||
|
||||
deploy-wsproxies:
|
||||
runs-on: ubuntu-latest
|
||||
needs: deploy
|
||||
steps:
|
||||
- name: Harden Runner
|
||||
uses: step-security/harden-runner@f4a75cfd619ee5ce8d5b864b0d183aff3c69b55a # v2.13.1
|
||||
with:
|
||||
egress-policy: audit
|
||||
|
||||
- name: Checkout
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
with:
|
||||
fetch-depth: 0
|
||||
persist-credentials: false
|
||||
|
||||
- name: Setup flyctl
|
||||
uses: superfly/flyctl-actions/setup-flyctl@fc53c09e1bc3be6f54706524e3b82c4f462f77be # v1.5
|
||||
|
||||
- name: Deploy workspace proxies
|
||||
run: |
|
||||
flyctl deploy --image "$IMAGE" --app paris-coder --config ./.github/fly-wsproxies/paris-coder.toml --env "CODER_PROXY_SESSION_TOKEN=$TOKEN_PARIS" --yes
|
||||
flyctl deploy --image "$IMAGE" --app sydney-coder --config ./.github/fly-wsproxies/sydney-coder.toml --env "CODER_PROXY_SESSION_TOKEN=$TOKEN_SYDNEY" --yes
|
||||
flyctl deploy --image "$IMAGE" --app sao-paulo-coder --config ./.github/fly-wsproxies/sao-paulo-coder.toml --env "CODER_PROXY_SESSION_TOKEN=$TOKEN_SAO_PAULO" --yes
|
||||
flyctl deploy --image "$IMAGE" --app jnb-coder --config ./.github/fly-wsproxies/jnb-coder.toml --env "CODER_PROXY_SESSION_TOKEN=$TOKEN_JNB" --yes
|
||||
env:
|
||||
FLY_API_TOKEN: ${{ secrets.FLY_API_TOKEN }}
|
||||
IMAGE: ${{ inputs.image }}
|
||||
TOKEN_PARIS: ${{ secrets.FLY_PARIS_CODER_PROXY_SESSION_TOKEN }}
|
||||
TOKEN_SYDNEY: ${{ secrets.FLY_SYDNEY_CODER_PROXY_SESSION_TOKEN }}
|
||||
TOKEN_SAO_PAULO: ${{ secrets.FLY_SAO_PAULO_CODER_PROXY_SESSION_TOKEN }}
|
||||
TOKEN_JNB: ${{ secrets.FLY_JNB_CODER_PROXY_SESSION_TOKEN }}
|
||||
+80
-133
@@ -1,6 +1,9 @@
|
||||
name: AI Triage Automation
|
||||
|
||||
on:
|
||||
issues:
|
||||
types:
|
||||
- labeled
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
issue_url:
|
||||
@@ -22,164 +25,108 @@ on:
|
||||
required: false
|
||||
default: "traiage"
|
||||
type: string
|
||||
cleanup:
|
||||
description: "Cleanup workspace after triage."
|
||||
required: false
|
||||
default: false
|
||||
type: boolean
|
||||
|
||||
jobs:
|
||||
traiage:
|
||||
name: Triage GitHub Issue with Claude Code
|
||||
runs-on: ubuntu-latest
|
||||
if: github.event.label.name == 'traiage' || github.event_name == 'workflow_dispatch'
|
||||
timeout-minutes: 30
|
||||
env:
|
||||
CODER_URL: ${{ secrets.TRAIAGE_CODER_URL }}
|
||||
CODER_SESSION_TOKEN: ${{ secrets.TRAIAGE_CODER_SESSION_TOKEN }}
|
||||
TEMPLATE_NAME: ${{ inputs.template_name }}
|
||||
permissions:
|
||||
contents: read
|
||||
issues: write
|
||||
actions: write
|
||||
|
||||
steps:
|
||||
- name: Determine Inputs
|
||||
id: determine-inputs
|
||||
env:
|
||||
GITHUB_EVENT_ISSUE_HTML_URL: ${{ github.event.issue.html_url }}
|
||||
GITHUB_EVENT_NAME: ${{ github.event_name }}
|
||||
INPUTS_ISSUE_URL: ${{ inputs.issue_url }}
|
||||
INPUTS_TEMPLATE_NAME: ${{ inputs.template_name || 'traiage' }}
|
||||
INPUTS_TEMPLATE_PRESET: ${{ inputs.template_preset || 'Default'}}
|
||||
INPUTS_PREFIX: ${{ inputs.prefix || 'traiage' }}
|
||||
run: |
|
||||
echo "template_name=${INPUTS_TEMPLATE_NAME}" >> "${GITHUB_OUTPUT}"
|
||||
echo "template_preset=${INPUTS_TEMPLATE_PRESET}" >> "${GITHUB_OUTPUT}"
|
||||
echo "prefix=${INPUTS_PREFIX}" >> "${GITHUB_OUTPUT}"
|
||||
|
||||
# Determine issue URL based on event type
|
||||
if [[ "${GITHUB_EVENT_NAME}" == "workflow_dispatch" ]]; then
|
||||
echo "issue_url=${INPUTS_ISSUE_URL}" >> "${GITHUB_OUTPUT}"
|
||||
elif [[ "${GITHUB_EVENT_NAME}" == "issues" ]]; then
|
||||
echo "issue_url=${GITHUB_EVENT_ISSUE_HTML_URL}" >> "${GITHUB_OUTPUT}"
|
||||
else
|
||||
echo "::error::Unsupported event type: ${GITHUB_EVENT_NAME}"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Verify push access
|
||||
env:
|
||||
GITHUB_REPOSITORY: ${{ github.repository }}
|
||||
GITHUB_ACTOR: ${{ github.actor }}
|
||||
GITHUB_EVENT_USER_LOGIN: ${{ github.event.sender.login }}
|
||||
GITHUB_EVENT_NAME: ${{ github.event_name }}
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
run: |
|
||||
# Determine username based on event type
|
||||
if [[ "${GITHUB_EVENT_NAME}" == "workflow_dispatch" ]]; then
|
||||
USERNAME="${GITHUB_ACTOR}"
|
||||
else
|
||||
USERNAME="${GITHUB_EVENT_USER_LOGIN}"
|
||||
fi
|
||||
|
||||
# Query the user's permission on this repo
|
||||
can_push="$(gh api "/repos/${GITHUB_REPOSITORY}/collaborators/${USERNAME}/permission" --jq '.user.permissions.push')"
|
||||
if [[ "${can_push}" != "true" ]]; then
|
||||
echo "::error title=Access Denied::${USERNAME} does not have push access to ${GITHUB_REPOSITORY}"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Extract context key from issue
|
||||
id: extract-context
|
||||
- name: Fetch issue description
|
||||
id: fetch-issue
|
||||
env:
|
||||
ISSUE_URL: ${{ inputs.issue_url }}
|
||||
GITHUB_TOKEN: ${{ github.token }}
|
||||
run: |
|
||||
issue_number="$(gh issue view "${ISSUE_URL}" --json number --jq '.number')"
|
||||
context_key="gh-${issue_number}"
|
||||
echo "context_key=${context_key}" >> "${GITHUB_OUTPUT}"
|
||||
echo "CONTEXT_KEY=${context_key}" >> "${GITHUB_ENV}"
|
||||
|
||||
- name: Download and install Coder binary
|
||||
shell: bash
|
||||
env:
|
||||
CODER_URL: ${{ secrets.TRAIAGE_CODER_URL }}
|
||||
run: |
|
||||
if [ "${{ runner.arch }}" == "ARM64" ]; then
|
||||
ARCH="arm64"
|
||||
else
|
||||
ARCH="amd64"
|
||||
fi
|
||||
mkdir -p "${HOME}/.local/bin"
|
||||
curl -fsSL --compressed "$CODER_URL/bin/coder-linux-${ARCH}" -o "${HOME}/.local/bin/coder"
|
||||
chmod +x "${HOME}/.local/bin/coder"
|
||||
export PATH="$HOME/.local/bin:$PATH"
|
||||
coder version
|
||||
coder whoami
|
||||
echo "$HOME/.local/bin" >> "${GITHUB_PATH}"
|
||||
|
||||
- name: Get Coder username from GitHub actor
|
||||
id: get-coder-username
|
||||
env:
|
||||
CODER_SESSION_TOKEN: ${{ secrets.TRAIAGE_CODER_SESSION_TOKEN }}
|
||||
GITHUB_USER_ID: ${{
|
||||
(github.event_name == 'workflow_dispatch' && github.actor_id)
|
||||
}}
|
||||
run: |
|
||||
[[ -z "${GITHUB_USER_ID}" || "${GITHUB_USER_ID}" == "null" ]] && echo "No GitHub actor ID found" && exit 1
|
||||
user_json=$(
|
||||
coder users list --github-user-id="${GITHUB_USER_ID}" --output=json
|
||||
)
|
||||
coder_username=$(jq -r 'first | .username' <<< "$user_json")
|
||||
[[ -z "${coder_username}" || "${coder_username}" == "null" ]] && echo "No Coder user with GitHub user ID ${GITHUB_USER_ID} found" && exit 1
|
||||
echo "coder_username=${coder_username}" >> "${GITHUB_OUTPUT}"
|
||||
|
||||
# TODO(Cian): this is a good use-case for 'recipes'
|
||||
- name: Create Coder task
|
||||
id: create-task
|
||||
env:
|
||||
CODER_USERNAME: ${{ steps.get-coder-username.outputs.coder_username }}
|
||||
CONTEXT_KEY: ${{ steps.extract-context.outputs.context_key }}
|
||||
GITHUB_TOKEN: ${{ github.token }}
|
||||
ISSUE_URL: ${{ inputs.issue_url }}
|
||||
PREFIX: ${{ inputs.prefix }}
|
||||
RUN_ID: ${{ github.run_id }}
|
||||
TEMPLATE_PARAMETERS: ${{ secrets.TRAIAGE_TEMPLATE_PARAMETERS }}
|
||||
TEMPLATE_PRESET: ${{ inputs.template_preset }}
|
||||
ISSUE_URL: ${{ steps.determine-inputs.outputs.issue_url }}
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
run: |
|
||||
# Fetch issue description using `gh` CLI
|
||||
issue_description=$(gh issue view "${ISSUE_URL}")
|
||||
#shellcheck disable=SC2016 # The template string should not be subject to shell expansion
|
||||
issue_description=$(gh issue view "${ISSUE_URL}" \
|
||||
--json 'title,body,comments' \
|
||||
--template '{{printf "%s\n\n%s\n\nComments:\n" .title .body}}{{range $k, $v := .comments}} - {{index $v.author "login"}}: {{printf "%s\n" $v.body}}{{end}}')
|
||||
|
||||
# Write a prompt to PROMPT_FILE
|
||||
PROMPT=$(cat <<EOF
|
||||
Analyze the below GitHub issue description, understand the root cause, and make appropriate changes to resolve the issue.
|
||||
|
||||
ISSUE URL: ${ISSUE_URL}
|
||||
ISSUE DESCRIPTION BELOW:
|
||||
|
||||
${issue_description}
|
||||
EOF
|
||||
)
|
||||
export PROMPT
|
||||
|
||||
export TASK_NAME="${PREFIX}-${CONTEXT_KEY}-${RUN_ID}"
|
||||
echo "Creating task: $TASK_NAME"
|
||||
./scripts/traiage.sh create
|
||||
coder exp task status "${CODER_USERNAME}/$TASK_NAME" --watch
|
||||
echo "TASK_NAME=${CODER_USERNAME}/${TASK_NAME}" >> "${GITHUB_OUTPUT}"
|
||||
echo "TASK_NAME=${CODER_USERNAME}/${TASK_NAME}" >> "${GITHUB_ENV}"
|
||||
|
||||
- name: Create and upload archive
|
||||
id: create-archive
|
||||
if: inputs.cleanup
|
||||
env:
|
||||
BUCKET_PREFIX: "gs://coder-traiage-outputs/traiage"
|
||||
run: |
|
||||
echo "Creating archive for workspace: $TASK_NAME"
|
||||
./scripts/traiage.sh archive
|
||||
echo "archive_url=${BUCKET_PREFIX%%/}/$TASK_NAME.tar.gz" >> "${GITHUB_OUTPUT}"
|
||||
|
||||
- name: Generate a summary of the changes and post a comment on GitHub.
|
||||
id: generate-summary
|
||||
if: inputs.cleanup
|
||||
env:
|
||||
ARCHIVE_URL: ${{ steps.create-archive.outputs.archive_url }}
|
||||
BUCKET_PREFIX: "gs://coder-traiage-outputs/traiage"
|
||||
CONTEXT_KEY: ${{ steps.extract-context.outputs.context_key }}
|
||||
GITHUB_TOKEN: ${{ github.token }}
|
||||
GITHUB_REPOSITORY: ${{ github.repository }}
|
||||
ISSUE_URL: ${{ inputs.issue_url }}
|
||||
TASK_NAME: ${{ steps.create-task.outputs.TASK_NAME }}
|
||||
run: |
|
||||
SUMMARY_FILE=$(mktemp)
|
||||
trap 'rm -f "${SUMMARY_FILE}"' EXIT
|
||||
AUTO_SUMMARY=$(./scripts/traiage.sh summary)
|
||||
# Create prompt for the task
|
||||
{
|
||||
echo "## TrAIage Results"
|
||||
echo "- **Issue URL:** ${ISSUE_URL}"
|
||||
echo "- **Context Key:** ${CONTEXT_KEY}"
|
||||
echo "- **Workspace:** ${TASK_NAME}"
|
||||
echo "- **Archive URL:** ${ARCHIVE_URL}"
|
||||
echo
|
||||
echo "${AUTO_SUMMARY}"
|
||||
echo
|
||||
echo "To fetch the output to your own workspace:"
|
||||
echo
|
||||
echo '```bash'
|
||||
echo "BUCKET_PREFIX=${BUCKET_PREFIX} TASK_NAME=${TASK_NAME} ./scripts/traiage.sh resume"
|
||||
echo '```'
|
||||
echo
|
||||
} >> "${SUMMARY_FILE}"
|
||||
echo "prompt<<EOF"
|
||||
cat <<PROMPT
|
||||
Fix ${ISSUE_URL}
|
||||
|
||||
if [[ "${ISSUE_URL}" == "https://github.com/${GITHUB_REPOSITORY}"* ]]; then
|
||||
gh issue comment "${ISSUE_URL}" --body-file "${SUMMARY_FILE}" --create-if-none --edit-last
|
||||
else
|
||||
echo "Skipping comment on other repo."
|
||||
fi
|
||||
cat "${SUMMARY_FILE}" >> "${GITHUB_STEP_SUMMARY}"
|
||||
Analyze the below GitHub issue description, understand the root cause, and make appropriate changes to resolve the issue.
|
||||
---
|
||||
${issue_description}
|
||||
PROMPT
|
||||
echo "EOF"
|
||||
} >> "${GITHUB_OUTPUT}"
|
||||
|
||||
- name: Cleanup task
|
||||
if: inputs.cleanup && steps.create-task.outputs.TASK_NAME != '' && steps.create-archive.outputs.archive_url != ''
|
||||
run: |
|
||||
echo "Cleaning up task: $TASK_NAME"
|
||||
./scripts/traiage.sh delete || true
|
||||
- name: Create Coder Task
|
||||
uses: ./.github/actions/coder-task
|
||||
with:
|
||||
coder-url: ${{ secrets.TRAIAGE_CODER_URL }}
|
||||
coder-token: ${{ secrets.TRAIAGE_CODER_SESSION_TOKEN }}
|
||||
template-name: ${{ steps.determine-inputs.outputs.template_name }}
|
||||
template-preset: ${{ steps.determine-inputs.outputs.template_preset }}
|
||||
task-name-prefix: ${{ steps.determine-inputs.outputs.prefix }}
|
||||
task-prompt: ${{ steps.fetch-issue.outputs.prompt }}
|
||||
issue-url: ${{ steps.determine-inputs.outputs.issue_url }}
|
||||
coder-web-url: "https://dev.coder.com"
|
||||
github-token: ${{ github.token }}
|
||||
|
||||
@@ -0,0 +1,4 @@
|
||||
rules:
|
||||
cache-poisoning:
|
||||
ignore:
|
||||
- "ci.yaml:184"
|
||||
Vendored
+2
-1
@@ -61,5 +61,6 @@
|
||||
"typos.config": ".github/workflows/typos.toml",
|
||||
"[markdown]": {
|
||||
"editor.defaultFormatter": "DavidAnson.vscode-markdownlint"
|
||||
}
|
||||
},
|
||||
"biome.lsp.bin": "site/node_modules/.bin/biome"
|
||||
}
|
||||
|
||||
@@ -1020,19 +1020,11 @@ endif
|
||||
|
||||
TEST_PACKAGES ?= ./...
|
||||
|
||||
warm-go-cache-db-cleaner:
|
||||
# ensure Go's build cache for the cleanercmd is fresh so that tests don't have to build from scratch. This
|
||||
# could take some time and counts against the test's timeout, which can lead to flakes.
|
||||
# c.f. https://github.com/coder/internal/issues/1026
|
||||
mkdir -p build
|
||||
$(GIT_FLAGS) go build -o ./build/cleaner github.com/coder/coder/v2/coderd/database/dbtestutil/cleanercmd
|
||||
.PHONY: warm-go-cache-db-cleaner
|
||||
|
||||
test: warm-go-cache-db-cleaner
|
||||
test:
|
||||
$(GIT_FLAGS) gotestsum --format standard-quiet $(GOTESTSUM_RETRY_FLAGS) --packages="$(TEST_PACKAGES)" -- $(GOTEST_FLAGS)
|
||||
.PHONY: test
|
||||
|
||||
test-cli: warm-go-cache-db-cleaner
|
||||
test-cli:
|
||||
$(MAKE) test TEST_PACKAGES="./cli..."
|
||||
.PHONY: test-cli
|
||||
|
||||
|
||||
+14
-3
@@ -781,11 +781,15 @@ func (a *agent) reportConnectionsLoop(ctx context.Context, aAPI proto.DRPCAgentC
|
||||
logger.Debug(ctx, "reporting connection")
|
||||
_, err := aAPI.ReportConnection(ctx, payload)
|
||||
if err != nil {
|
||||
return xerrors.Errorf("failed to report connection: %w", err)
|
||||
// Do not fail the loop if we fail to report a connection, just
|
||||
// log a warning.
|
||||
// Related to https://github.com/coder/coder/issues/20194
|
||||
logger.Warn(ctx, "failed to report connection to server", slog.Error(err))
|
||||
// keep going, we still need to remove it from the slice
|
||||
} else {
|
||||
logger.Debug(ctx, "successfully reported connection")
|
||||
}
|
||||
|
||||
logger.Debug(ctx, "successfully reported connection")
|
||||
|
||||
// Remove the payload we sent.
|
||||
a.reportConnectionsMu.Lock()
|
||||
a.reportConnections[0] = nil // Release the pointer from the underlying array.
|
||||
@@ -816,6 +820,13 @@ func (a *agent) reportConnection(id uuid.UUID, connectionType proto.Connection_T
|
||||
ip = host
|
||||
}
|
||||
|
||||
// If the IP is "localhost" (which it can be in some cases), set it to
|
||||
// 127.0.0.1 instead.
|
||||
// Related to https://github.com/coder/coder/issues/20194
|
||||
if ip == "localhost" {
|
||||
ip = "127.0.0.1"
|
||||
}
|
||||
|
||||
a.reportConnectionsMu.Lock()
|
||||
defer a.reportConnectionsMu.Unlock()
|
||||
|
||||
|
||||
+4
-2
@@ -1807,11 +1807,12 @@ func TestAgent_ReconnectingPTY(t *testing.T) {
|
||||
|
||||
//nolint:dogsled
|
||||
conn, agentClient, _, _, _ := setupAgent(t, agentsdk.Manifest{}, 0)
|
||||
idConnectionReport := uuid.New()
|
||||
id := uuid.New()
|
||||
|
||||
// Test that the connection is reported. This must be tested in the
|
||||
// first connection because we care about verifying all of these.
|
||||
netConn0, err := conn.ReconnectingPTY(ctx, id, 80, 80, "bash --norc")
|
||||
netConn0, err := conn.ReconnectingPTY(ctx, idConnectionReport, 80, 80, "bash --norc")
|
||||
require.NoError(t, err)
|
||||
_ = netConn0.Close()
|
||||
assertConnectionReport(t, agentClient, proto.Connection_RECONNECTING_PTY, 0, "")
|
||||
@@ -2027,7 +2028,8 @@ func runSubAgentMain() int {
|
||||
ctx, cancel := context.WithTimeout(context.Background(), testutil.WaitLong)
|
||||
defer cancel()
|
||||
req = req.WithContext(ctx)
|
||||
resp, err := http.DefaultClient.Do(req)
|
||||
client := &http.Client{}
|
||||
resp, err := client.Do(req)
|
||||
if err != nil {
|
||||
_, _ = fmt.Fprintf(os.Stderr, "agent connection failed: %v\n", err)
|
||||
return 11
|
||||
|
||||
+2
-1
@@ -63,6 +63,7 @@ func NewAppHealthReporterWithClock(
|
||||
// run a ticker for each app health check.
|
||||
var mu sync.RWMutex
|
||||
failures := make(map[uuid.UUID]int, 0)
|
||||
client := &http.Client{}
|
||||
for _, nextApp := range apps {
|
||||
if !shouldStartTicker(nextApp) {
|
||||
continue
|
||||
@@ -91,7 +92,7 @@ func NewAppHealthReporterWithClock(
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
res, err := http.DefaultClient.Do(req)
|
||||
res, err := client.Do(req)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
@@ -25,6 +25,7 @@ import (
|
||||
|
||||
// screenReconnectingPTY provides a reconnectable PTY via `screen`.
|
||||
type screenReconnectingPTY struct {
|
||||
logger slog.Logger
|
||||
execer agentexec.Execer
|
||||
command *pty.Cmd
|
||||
|
||||
@@ -62,6 +63,7 @@ type screenReconnectingPTY struct {
|
||||
// own which causes it to spawn with the specified size.
|
||||
func newScreen(ctx context.Context, logger slog.Logger, execer agentexec.Execer, cmd *pty.Cmd, options *Options) *screenReconnectingPTY {
|
||||
rpty := &screenReconnectingPTY{
|
||||
logger: logger,
|
||||
execer: execer,
|
||||
command: cmd,
|
||||
metrics: options.Metrics,
|
||||
@@ -173,6 +175,7 @@ func (rpty *screenReconnectingPTY) Attach(ctx context.Context, _ string, conn ne
|
||||
|
||||
ptty, process, err := rpty.doAttach(ctx, conn, height, width, logger)
|
||||
if err != nil {
|
||||
logger.Debug(ctx, "unable to attach to screen reconnecting pty", slog.Error(err))
|
||||
if errors.Is(err, context.Canceled) {
|
||||
// Likely the process was too short-lived and canceled the version command.
|
||||
// TODO: Is it worth distinguishing between that and a cancel from the
|
||||
@@ -182,6 +185,7 @@ func (rpty *screenReconnectingPTY) Attach(ctx context.Context, _ string, conn ne
|
||||
}
|
||||
return err
|
||||
}
|
||||
logger.Debug(ctx, "attached to screen reconnecting pty")
|
||||
|
||||
defer func() {
|
||||
// Log only for debugging since the process might have already exited on its
|
||||
@@ -403,6 +407,7 @@ func (rpty *screenReconnectingPTY) Wait() {
|
||||
}
|
||||
|
||||
func (rpty *screenReconnectingPTY) Close(err error) {
|
||||
rpty.logger.Debug(context.Background(), "closing screen reconnecting pty", slog.Error(err))
|
||||
// The closing state change will be handled by the lifecycle.
|
||||
rpty.state.setState(StateClosing, err)
|
||||
}
|
||||
|
||||
+5
-11
@@ -6,10 +6,7 @@
|
||||
"defaultBranch": "main"
|
||||
},
|
||||
"files": {
|
||||
"includes": [
|
||||
"**",
|
||||
"!**/pnpm-lock.yaml"
|
||||
],
|
||||
"includes": ["**", "!**/pnpm-lock.yaml"],
|
||||
"ignoreUnknown": true
|
||||
},
|
||||
"linter": {
|
||||
@@ -48,13 +45,14 @@
|
||||
"options": {
|
||||
"paths": {
|
||||
"@mui/material": "Use @mui/material/<name> instead. See: https://material-ui.com/guides/minimizing-bundle-size/.",
|
||||
"@mui/icons-material": "Use @mui/icons-material/<name> instead. See: https://material-ui.com/guides/minimizing-bundle-size/.",
|
||||
"@mui/material/Avatar": "Use components/Avatar/Avatar instead.",
|
||||
"@mui/material/Alert": "Use components/Alert/Alert instead.",
|
||||
"@mui/material/Popover": "Use components/Popover/Popover instead.",
|
||||
"@mui/material/Typography": "Use native HTML elements instead. Eg: <span>, <p>, <h1>, etc.",
|
||||
"@mui/material/Box": "Use a <div> instead.",
|
||||
"@mui/material/Button": "Use a components/Button/Button instead.",
|
||||
"@mui/material/styles": "Import from @emotion/react instead.",
|
||||
"@mui/material/Table*": "Import from components/Table/Table instead.",
|
||||
"lodash": "Use lodash/<name> instead."
|
||||
}
|
||||
}
|
||||
@@ -69,11 +67,7 @@
|
||||
"noConsole": {
|
||||
"level": "error",
|
||||
"options": {
|
||||
"allow": [
|
||||
"error",
|
||||
"info",
|
||||
"warn"
|
||||
]
|
||||
"allow": ["error", "info", "warn"]
|
||||
}
|
||||
}
|
||||
},
|
||||
@@ -82,5 +76,5 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"$schema": "https://biomejs.dev/schemas/2.2.0/schema.json"
|
||||
"$schema": "./node_modules/@biomejs/biome/configuration_schema.json"
|
||||
}
|
||||
|
||||
@@ -29,15 +29,18 @@ import (
|
||||
|
||||
"github.com/coder/coder/v2/cli/cliui"
|
||||
"github.com/coder/coder/v2/coderd/httpapi"
|
||||
notificationsLib "github.com/coder/coder/v2/coderd/notifications"
|
||||
"github.com/coder/coder/v2/coderd/tracing"
|
||||
"github.com/coder/coder/v2/codersdk"
|
||||
"github.com/coder/coder/v2/codersdk/workspacesdk"
|
||||
"github.com/coder/coder/v2/scaletest/agentconn"
|
||||
"github.com/coder/coder/v2/scaletest/autostart"
|
||||
"github.com/coder/coder/v2/scaletest/createusers"
|
||||
"github.com/coder/coder/v2/scaletest/createworkspaces"
|
||||
"github.com/coder/coder/v2/scaletest/dashboard"
|
||||
"github.com/coder/coder/v2/scaletest/harness"
|
||||
"github.com/coder/coder/v2/scaletest/loadtestutil"
|
||||
"github.com/coder/coder/v2/scaletest/notifications"
|
||||
"github.com/coder/coder/v2/scaletest/reconnectingpty"
|
||||
"github.com/coder/coder/v2/scaletest/workspacebuild"
|
||||
"github.com/coder/coder/v2/scaletest/workspacetraffic"
|
||||
@@ -57,9 +60,12 @@ func (r *RootCmd) scaletestCmd() *serpent.Command {
|
||||
Children: []*serpent.Command{
|
||||
r.scaletestCleanup(),
|
||||
r.scaletestDashboard(),
|
||||
r.scaletestDynamicParameters(),
|
||||
r.scaletestCreateWorkspaces(),
|
||||
r.scaletestWorkspaceUpdates(),
|
||||
r.scaletestWorkspaceTraffic(),
|
||||
r.scaletestAutostart(),
|
||||
r.scaletestNotifications(),
|
||||
},
|
||||
}
|
||||
|
||||
@@ -1682,6 +1688,492 @@ func (r *RootCmd) scaletestDashboard() *serpent.Command {
|
||||
return cmd
|
||||
}
|
||||
|
||||
const (
|
||||
autostartTestName = "autostart"
|
||||
)
|
||||
|
||||
func (r *RootCmd) scaletestAutostart() *serpent.Command {
|
||||
var (
|
||||
workspaceCount int64
|
||||
workspaceJobTimeout time.Duration
|
||||
autostartDelay time.Duration
|
||||
autostartTimeout time.Duration
|
||||
template string
|
||||
noCleanup bool
|
||||
|
||||
parameterFlags workspaceParameterFlags
|
||||
tracingFlags = &scaletestTracingFlags{}
|
||||
timeoutStrategy = &timeoutFlags{}
|
||||
cleanupStrategy = newScaletestCleanupStrategy()
|
||||
output = &scaletestOutputFlags{}
|
||||
prometheusFlags = &scaletestPrometheusFlags{}
|
||||
)
|
||||
|
||||
cmd := &serpent.Command{
|
||||
Use: "autostart",
|
||||
Short: "Replicate a thundering herd of autostarting workspaces",
|
||||
Handler: func(inv *serpent.Invocation) error {
|
||||
ctx := inv.Context()
|
||||
client, err := r.InitClient(inv)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
notifyCtx, stop := signal.NotifyContext(ctx, StopSignals...) // Checked later.
|
||||
defer stop()
|
||||
ctx = notifyCtx
|
||||
|
||||
me, err := requireAdmin(ctx, client)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
client.HTTPClient = &http.Client{
|
||||
Transport: &codersdk.HeaderTransport{
|
||||
Transport: http.DefaultTransport,
|
||||
Header: map[string][]string{
|
||||
codersdk.BypassRatelimitHeader: {"true"},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
if workspaceCount <= 0 {
|
||||
return xerrors.Errorf("--workspace-count must be greater than zero")
|
||||
}
|
||||
|
||||
outputs, err := output.parse()
|
||||
if err != nil {
|
||||
return xerrors.Errorf("could not parse --output flags")
|
||||
}
|
||||
|
||||
tpl, err := parseTemplate(ctx, client, me.OrganizationIDs, template)
|
||||
if err != nil {
|
||||
return xerrors.Errorf("parse template: %w", err)
|
||||
}
|
||||
|
||||
cliRichParameters, err := asWorkspaceBuildParameters(parameterFlags.richParameters)
|
||||
if err != nil {
|
||||
return xerrors.Errorf("can't parse given parameter values: %w", err)
|
||||
}
|
||||
|
||||
richParameters, err := prepWorkspaceBuild(inv, client, prepWorkspaceBuildArgs{
|
||||
Action: WorkspaceCreate,
|
||||
TemplateVersionID: tpl.ActiveVersionID,
|
||||
|
||||
RichParameterFile: parameterFlags.richParameterFile,
|
||||
RichParameters: cliRichParameters,
|
||||
})
|
||||
if err != nil {
|
||||
return xerrors.Errorf("prepare build: %w", err)
|
||||
}
|
||||
|
||||
tracerProvider, closeTracing, tracingEnabled, err := tracingFlags.provider(ctx)
|
||||
if err != nil {
|
||||
return xerrors.Errorf("create tracer provider: %w", err)
|
||||
}
|
||||
tracer := tracerProvider.Tracer(scaletestTracerName)
|
||||
|
||||
reg := prometheus.NewRegistry()
|
||||
metrics := autostart.NewMetrics(reg)
|
||||
|
||||
setupBarrier := new(sync.WaitGroup)
|
||||
setupBarrier.Add(int(workspaceCount))
|
||||
|
||||
th := harness.NewTestHarness(timeoutStrategy.wrapStrategy(harness.ConcurrentExecutionStrategy{}), cleanupStrategy.toStrategy())
|
||||
for i := range workspaceCount {
|
||||
id := strconv.Itoa(int(i))
|
||||
config := autostart.Config{
|
||||
User: createusers.Config{
|
||||
OrganizationID: me.OrganizationIDs[0],
|
||||
},
|
||||
Workspace: workspacebuild.Config{
|
||||
OrganizationID: me.OrganizationIDs[0],
|
||||
Request: codersdk.CreateWorkspaceRequest{
|
||||
TemplateID: tpl.ID,
|
||||
RichParameterValues: richParameters,
|
||||
},
|
||||
},
|
||||
WorkspaceJobTimeout: workspaceJobTimeout,
|
||||
AutostartDelay: autostartDelay,
|
||||
AutostartTimeout: autostartTimeout,
|
||||
Metrics: metrics,
|
||||
SetupBarrier: setupBarrier,
|
||||
}
|
||||
if err := config.Validate(); err != nil {
|
||||
return xerrors.Errorf("validate config: %w", err)
|
||||
}
|
||||
var runner harness.Runnable = autostart.NewRunner(client, config)
|
||||
if tracingEnabled {
|
||||
runner = &runnableTraceWrapper{
|
||||
tracer: tracer,
|
||||
spanName: fmt.Sprintf("%s/%s", autostartTestName, id),
|
||||
runner: runner,
|
||||
}
|
||||
}
|
||||
th.AddRun(autostartTestName, id, runner)
|
||||
}
|
||||
|
||||
logger := inv.Logger
|
||||
prometheusSrvClose := ServeHandler(ctx, logger, promhttp.HandlerFor(reg, promhttp.HandlerOpts{}), prometheusFlags.Address, "prometheus")
|
||||
defer prometheusSrvClose()
|
||||
|
||||
defer func() {
|
||||
_, _ = fmt.Fprintln(inv.Stderr, "\nUploading traces...")
|
||||
if err := closeTracing(ctx); err != nil {
|
||||
_, _ = fmt.Fprintf(inv.Stderr, "\nError uploading traces: %+v\n", err)
|
||||
}
|
||||
// Wait for prometheus metrics to be scraped
|
||||
_, _ = fmt.Fprintf(inv.Stderr, "Waiting %s for prometheus metrics to be scraped\n", prometheusFlags.Wait)
|
||||
<-time.After(prometheusFlags.Wait)
|
||||
}()
|
||||
|
||||
_, _ = fmt.Fprintln(inv.Stderr, "Running autostart load test...")
|
||||
testCtx, testCancel := timeoutStrategy.toContext(ctx)
|
||||
defer testCancel()
|
||||
err = th.Run(testCtx)
|
||||
if err != nil {
|
||||
return xerrors.Errorf("run test harness (harness failure, not a test failure): %w", err)
|
||||
}
|
||||
|
||||
// If the command was interrupted, skip stats.
|
||||
if notifyCtx.Err() != nil {
|
||||
return notifyCtx.Err()
|
||||
}
|
||||
|
||||
res := th.Results()
|
||||
for _, o := range outputs {
|
||||
err = o.write(res, inv.Stdout)
|
||||
if err != nil {
|
||||
return xerrors.Errorf("write output %q to %q: %w", o.format, o.path, err)
|
||||
}
|
||||
}
|
||||
|
||||
if !noCleanup {
|
||||
_, _ = fmt.Fprintln(inv.Stderr, "\nCleaning up...")
|
||||
cleanupCtx, cleanupCancel := cleanupStrategy.toContext(ctx)
|
||||
defer cleanupCancel()
|
||||
err = th.Cleanup(cleanupCtx)
|
||||
if err != nil {
|
||||
return xerrors.Errorf("cleanup tests: %w", err)
|
||||
}
|
||||
}
|
||||
|
||||
if res.TotalFail > 0 {
|
||||
return xerrors.New("load test failed, see above for more details")
|
||||
}
|
||||
|
||||
return nil
|
||||
},
|
||||
}
|
||||
|
||||
cmd.Options = serpent.OptionSet{
|
||||
{
|
||||
Flag: "workspace-count",
|
||||
FlagShorthand: "c",
|
||||
Env: "CODER_SCALETEST_WORKSPACE_COUNT",
|
||||
Description: "Required: Total number of workspaces to create.",
|
||||
Value: serpent.Int64Of(&workspaceCount),
|
||||
Required: true,
|
||||
},
|
||||
{
|
||||
Flag: "workspace-job-timeout",
|
||||
Env: "CODER_SCALETEST_WORKSPACE_JOB_TIMEOUT",
|
||||
Default: "5m",
|
||||
Description: "Timeout for workspace jobs (e.g. build, start).",
|
||||
Value: serpent.DurationOf(&workspaceJobTimeout),
|
||||
},
|
||||
{
|
||||
Flag: "autostart-delay",
|
||||
Env: "CODER_SCALETEST_AUTOSTART_DELAY",
|
||||
Default: "2m",
|
||||
Description: "How long after all the workspaces have been stopped to schedule them to be started again.",
|
||||
Value: serpent.DurationOf(&autostartDelay),
|
||||
},
|
||||
{
|
||||
Flag: "autostart-timeout",
|
||||
Env: "CODER_SCALETEST_AUTOSTART_TIMEOUT",
|
||||
Default: "5m",
|
||||
Description: "Timeout for the autostart build to be initiated after the scheduled start time.",
|
||||
Value: serpent.DurationOf(&autostartTimeout),
|
||||
},
|
||||
{
|
||||
Flag: "template",
|
||||
FlagShorthand: "t",
|
||||
Env: "CODER_SCALETEST_TEMPLATE",
|
||||
Description: "Required: Name or ID of the template to use for workspaces.",
|
||||
Value: serpent.StringOf(&template),
|
||||
Required: true,
|
||||
},
|
||||
{
|
||||
Flag: "no-cleanup",
|
||||
Env: "CODER_SCALETEST_NO_CLEANUP",
|
||||
Description: "Do not clean up resources after the test completes.",
|
||||
Value: serpent.BoolOf(&noCleanup),
|
||||
},
|
||||
}
|
||||
|
||||
cmd.Options = append(cmd.Options, parameterFlags.cliParameters()...)
|
||||
tracingFlags.attach(&cmd.Options)
|
||||
timeoutStrategy.attach(&cmd.Options)
|
||||
cleanupStrategy.attach(&cmd.Options)
|
||||
output.attach(&cmd.Options)
|
||||
prometheusFlags.attach(&cmd.Options)
|
||||
return cmd
|
||||
}
|
||||
|
||||
func (r *RootCmd) scaletestNotifications() *serpent.Command {
|
||||
var (
|
||||
userCount int64
|
||||
ownerUserPercentage float64
|
||||
notificationTimeout time.Duration
|
||||
dialTimeout time.Duration
|
||||
noCleanup bool
|
||||
|
||||
tracingFlags = &scaletestTracingFlags{}
|
||||
|
||||
// This test requires unlimited concurrency.
|
||||
timeoutStrategy = &timeoutFlags{}
|
||||
cleanupStrategy = newScaletestCleanupStrategy()
|
||||
output = &scaletestOutputFlags{}
|
||||
prometheusFlags = &scaletestPrometheusFlags{}
|
||||
)
|
||||
|
||||
cmd := &serpent.Command{
|
||||
Use: "notifications",
|
||||
Short: "Simulate notification delivery by creating many users listening to notifications.",
|
||||
Handler: func(inv *serpent.Invocation) error {
|
||||
ctx := inv.Context()
|
||||
client, err := r.InitClient(inv)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
notifyCtx, stop := signal.NotifyContext(ctx, StopSignals...)
|
||||
defer stop()
|
||||
ctx = notifyCtx
|
||||
|
||||
me, err := requireAdmin(ctx, client)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
client.HTTPClient = &http.Client{
|
||||
Transport: &codersdk.HeaderTransport{
|
||||
Transport: http.DefaultTransport,
|
||||
Header: map[string][]string{
|
||||
codersdk.BypassRatelimitHeader: {"true"},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
if userCount <= 0 {
|
||||
return xerrors.Errorf("--user-count must be greater than 0")
|
||||
}
|
||||
|
||||
if ownerUserPercentage < 0 || ownerUserPercentage > 100 {
|
||||
return xerrors.Errorf("--owner-user-percentage must be between 0 and 100")
|
||||
}
|
||||
|
||||
ownerUserCount := int64(float64(userCount) * ownerUserPercentage / 100)
|
||||
if ownerUserCount == 0 && ownerUserPercentage > 0 {
|
||||
ownerUserCount = 1
|
||||
}
|
||||
regularUserCount := userCount - ownerUserCount
|
||||
|
||||
_, _ = fmt.Fprintf(inv.Stderr, "Distribution plan:\n")
|
||||
_, _ = fmt.Fprintf(inv.Stderr, " Total users: %d\n", userCount)
|
||||
_, _ = fmt.Fprintf(inv.Stderr, " Owner users: %d (%.1f%%)\n", ownerUserCount, ownerUserPercentage)
|
||||
_, _ = fmt.Fprintf(inv.Stderr, " Regular users: %d (%.1f%%)\n", regularUserCount, 100.0-ownerUserPercentage)
|
||||
|
||||
outputs, err := output.parse()
|
||||
if err != nil {
|
||||
return xerrors.Errorf("could not parse --output flags")
|
||||
}
|
||||
|
||||
tracerProvider, closeTracing, tracingEnabled, err := tracingFlags.provider(ctx)
|
||||
if err != nil {
|
||||
return xerrors.Errorf("create tracer provider: %w", err)
|
||||
}
|
||||
tracer := tracerProvider.Tracer(scaletestTracerName)
|
||||
|
||||
reg := prometheus.NewRegistry()
|
||||
metrics := notifications.NewMetrics(reg)
|
||||
|
||||
logger := inv.Logger
|
||||
prometheusSrvClose := ServeHandler(ctx, logger, promhttp.HandlerFor(reg, promhttp.HandlerOpts{}), prometheusFlags.Address, "prometheus")
|
||||
defer prometheusSrvClose()
|
||||
|
||||
defer func() {
|
||||
_, _ = fmt.Fprintln(inv.Stderr, "\nUploading traces...")
|
||||
if err := closeTracing(ctx); err != nil {
|
||||
_, _ = fmt.Fprintf(inv.Stderr, "\nError uploading traces: %+v\n", err)
|
||||
}
|
||||
// Wait for prometheus metrics to be scraped
|
||||
_, _ = fmt.Fprintf(inv.Stderr, "Waiting %s for prometheus metrics to be scraped\n", prometheusFlags.Wait)
|
||||
<-time.After(prometheusFlags.Wait)
|
||||
}()
|
||||
|
||||
_, _ = fmt.Fprintln(inv.Stderr, "Creating users...")
|
||||
|
||||
dialBarrier := &sync.WaitGroup{}
|
||||
ownerWatchBarrier := &sync.WaitGroup{}
|
||||
dialBarrier.Add(int(userCount))
|
||||
ownerWatchBarrier.Add(int(ownerUserCount))
|
||||
|
||||
expectedNotifications := map[uuid.UUID]chan time.Time{
|
||||
notificationsLib.TemplateUserAccountCreated: make(chan time.Time, 1),
|
||||
notificationsLib.TemplateUserAccountDeleted: make(chan time.Time, 1),
|
||||
}
|
||||
|
||||
configs := make([]notifications.Config, 0, userCount)
|
||||
for range ownerUserCount {
|
||||
config := notifications.Config{
|
||||
User: createusers.Config{
|
||||
OrganizationID: me.OrganizationIDs[0],
|
||||
},
|
||||
Roles: []string{codersdk.RoleOwner},
|
||||
NotificationTimeout: notificationTimeout,
|
||||
DialTimeout: dialTimeout,
|
||||
DialBarrier: dialBarrier,
|
||||
ReceivingWatchBarrier: ownerWatchBarrier,
|
||||
ExpectedNotifications: expectedNotifications,
|
||||
Metrics: metrics,
|
||||
}
|
||||
if err := config.Validate(); err != nil {
|
||||
return xerrors.Errorf("validate config: %w", err)
|
||||
}
|
||||
configs = append(configs, config)
|
||||
}
|
||||
for range regularUserCount {
|
||||
config := notifications.Config{
|
||||
User: createusers.Config{
|
||||
OrganizationID: me.OrganizationIDs[0],
|
||||
},
|
||||
Roles: []string{},
|
||||
NotificationTimeout: notificationTimeout,
|
||||
DialTimeout: dialTimeout,
|
||||
DialBarrier: dialBarrier,
|
||||
ReceivingWatchBarrier: ownerWatchBarrier,
|
||||
Metrics: metrics,
|
||||
}
|
||||
if err := config.Validate(); err != nil {
|
||||
return xerrors.Errorf("validate config: %w", err)
|
||||
}
|
||||
configs = append(configs, config)
|
||||
}
|
||||
|
||||
go triggerUserNotifications(
|
||||
ctx,
|
||||
logger,
|
||||
client,
|
||||
me.OrganizationIDs[0],
|
||||
dialBarrier,
|
||||
dialTimeout,
|
||||
expectedNotifications,
|
||||
)
|
||||
|
||||
th := harness.NewTestHarness(timeoutStrategy.wrapStrategy(harness.ConcurrentExecutionStrategy{}), cleanupStrategy.toStrategy())
|
||||
|
||||
for i, config := range configs {
|
||||
id := strconv.Itoa(i)
|
||||
name := fmt.Sprintf("notifications-%s", id)
|
||||
var runner harness.Runnable = notifications.NewRunner(client, config)
|
||||
if tracingEnabled {
|
||||
runner = &runnableTraceWrapper{
|
||||
tracer: tracer,
|
||||
spanName: name,
|
||||
runner: runner,
|
||||
}
|
||||
}
|
||||
|
||||
th.AddRun(name, id, runner)
|
||||
}
|
||||
|
||||
_, _ = fmt.Fprintln(inv.Stderr, "Running notification delivery scaletest...")
|
||||
testCtx, testCancel := timeoutStrategy.toContext(ctx)
|
||||
defer testCancel()
|
||||
err = th.Run(testCtx)
|
||||
if err != nil {
|
||||
return xerrors.Errorf("run test harness (harness failure, not a test failure): %w", err)
|
||||
}
|
||||
|
||||
// If the command was interrupted, skip stats.
|
||||
if notifyCtx.Err() != nil {
|
||||
return notifyCtx.Err()
|
||||
}
|
||||
|
||||
res := th.Results()
|
||||
for _, o := range outputs {
|
||||
err = o.write(res, inv.Stdout)
|
||||
if err != nil {
|
||||
return xerrors.Errorf("write output %q to %q: %w", o.format, o.path, err)
|
||||
}
|
||||
}
|
||||
|
||||
if !noCleanup {
|
||||
_, _ = fmt.Fprintln(inv.Stderr, "\nCleaning up...")
|
||||
cleanupCtx, cleanupCancel := cleanupStrategy.toContext(ctx)
|
||||
defer cleanupCancel()
|
||||
err = th.Cleanup(cleanupCtx)
|
||||
if err != nil {
|
||||
return xerrors.Errorf("cleanup tests: %w", err)
|
||||
}
|
||||
}
|
||||
|
||||
if res.TotalFail > 0 {
|
||||
return xerrors.New("load test failed, see above for more details")
|
||||
}
|
||||
|
||||
return nil
|
||||
},
|
||||
}
|
||||
|
||||
cmd.Options = serpent.OptionSet{
|
||||
{
|
||||
Flag: "user-count",
|
||||
FlagShorthand: "c",
|
||||
Env: "CODER_SCALETEST_NOTIFICATION_USER_COUNT",
|
||||
Description: "Required: Total number of users to create.",
|
||||
Value: serpent.Int64Of(&userCount),
|
||||
Required: true,
|
||||
},
|
||||
{
|
||||
Flag: "owner-user-percentage",
|
||||
Env: "CODER_SCALETEST_NOTIFICATION_OWNER_USER_PERCENTAGE",
|
||||
Default: "20.0",
|
||||
Description: "Percentage of users to assign Owner role to (0-100).",
|
||||
Value: serpent.Float64Of(&ownerUserPercentage),
|
||||
},
|
||||
{
|
||||
Flag: "notification-timeout",
|
||||
Env: "CODER_SCALETEST_NOTIFICATION_TIMEOUT",
|
||||
Default: "5m",
|
||||
Description: "How long to wait for notifications after triggering.",
|
||||
Value: serpent.DurationOf(¬ificationTimeout),
|
||||
},
|
||||
{
|
||||
Flag: "dial-timeout",
|
||||
Env: "CODER_SCALETEST_DIAL_TIMEOUT",
|
||||
Default: "2m",
|
||||
Description: "Timeout for dialing the notification websocket endpoint.",
|
||||
Value: serpent.DurationOf(&dialTimeout),
|
||||
},
|
||||
{
|
||||
Flag: "no-cleanup",
|
||||
Env: "CODER_SCALETEST_NO_CLEANUP",
|
||||
Description: "Do not clean up resources after the test completes.",
|
||||
Value: serpent.BoolOf(&noCleanup),
|
||||
},
|
||||
}
|
||||
|
||||
tracingFlags.attach(&cmd.Options)
|
||||
timeoutStrategy.attach(&cmd.Options)
|
||||
cleanupStrategy.attach(&cmd.Options)
|
||||
output.attach(&cmd.Options)
|
||||
prometheusFlags.attach(&cmd.Options)
|
||||
return cmd
|
||||
}
|
||||
|
||||
type runnableTraceWrapper struct {
|
||||
tracer trace.Tracer
|
||||
spanName string
|
||||
@@ -1882,6 +2374,73 @@ func parseTargetRange(name, targets string) (start, end int, err error) {
|
||||
return start, end, nil
|
||||
}
|
||||
|
||||
// triggerUserNotifications waits for all test users to connect,
|
||||
// then creates and deletes a test user to trigger notification events for testing.
|
||||
func triggerUserNotifications(
|
||||
ctx context.Context,
|
||||
logger slog.Logger,
|
||||
client *codersdk.Client,
|
||||
orgID uuid.UUID,
|
||||
dialBarrier *sync.WaitGroup,
|
||||
dialTimeout time.Duration,
|
||||
expectedNotifications map[uuid.UUID]chan time.Time,
|
||||
) {
|
||||
logger.Info(ctx, "waiting for all users to connect")
|
||||
|
||||
// Wait for all users to connect
|
||||
waitCtx, cancel := context.WithTimeout(ctx, dialTimeout+30*time.Second)
|
||||
defer cancel()
|
||||
|
||||
done := make(chan struct{})
|
||||
go func() {
|
||||
dialBarrier.Wait()
|
||||
close(done)
|
||||
}()
|
||||
|
||||
select {
|
||||
case <-done:
|
||||
logger.Info(ctx, "all users connected")
|
||||
case <-waitCtx.Done():
|
||||
if waitCtx.Err() == context.DeadlineExceeded {
|
||||
logger.Error(ctx, "timeout waiting for users to connect")
|
||||
} else {
|
||||
logger.Info(ctx, "context canceled while waiting for users")
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
const (
|
||||
triggerUsername = "scaletest-trigger-user"
|
||||
triggerEmail = "scaletest-trigger@example.com"
|
||||
)
|
||||
|
||||
logger.Info(ctx, "creating test user to test notifications",
|
||||
slog.F("username", triggerUsername),
|
||||
slog.F("email", triggerEmail),
|
||||
slog.F("org_id", orgID))
|
||||
|
||||
testUser, err := client.CreateUserWithOrgs(ctx, codersdk.CreateUserRequestWithOrgs{
|
||||
OrganizationIDs: []uuid.UUID{orgID},
|
||||
Username: triggerUsername,
|
||||
Email: triggerEmail,
|
||||
Password: "test-password-123",
|
||||
})
|
||||
if err != nil {
|
||||
logger.Error(ctx, "create test user", slog.Error(err))
|
||||
return
|
||||
}
|
||||
expectedNotifications[notificationsLib.TemplateUserAccountCreated] <- time.Now()
|
||||
|
||||
err = client.DeleteUser(ctx, testUser.ID)
|
||||
if err != nil {
|
||||
logger.Error(ctx, "delete test user", slog.Error(err))
|
||||
return
|
||||
}
|
||||
expectedNotifications[notificationsLib.TemplateUserAccountDeleted] <- time.Now()
|
||||
close(expectedNotifications[notificationsLib.TemplateUserAccountCreated])
|
||||
close(expectedNotifications[notificationsLib.TemplateUserAccountDeleted])
|
||||
}
|
||||
|
||||
func createWorkspaceAppConfig(client *codersdk.Client, appHost, app string, workspace codersdk.Workspace, agent codersdk.WorkspaceAgent) (workspacetraffic.AppConfig, error) {
|
||||
if app == "" {
|
||||
return workspacetraffic.AppConfig{}, nil
|
||||
|
||||
@@ -0,0 +1,110 @@
|
||||
//go:build !slim
|
||||
|
||||
package cli
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
"github.com/prometheus/client_golang/prometheus"
|
||||
"golang.org/x/xerrors"
|
||||
|
||||
"cdr.dev/slog"
|
||||
"cdr.dev/slog/sloggers/sloghuman"
|
||||
|
||||
"github.com/coder/coder/v2/scaletest/dynamicparameters"
|
||||
"github.com/coder/coder/v2/scaletest/harness"
|
||||
"github.com/coder/serpent"
|
||||
)
|
||||
|
||||
const (
|
||||
dynamicParametersTestName = "dynamic-parameters"
|
||||
)
|
||||
|
||||
func (r *RootCmd) scaletestDynamicParameters() *serpent.Command {
|
||||
var templateName string
|
||||
var numEvals int64
|
||||
orgContext := NewOrganizationContext()
|
||||
output := &scaletestOutputFlags{}
|
||||
|
||||
cmd := &serpent.Command{
|
||||
Use: "dynamic-parameters",
|
||||
Short: "Generates load on the Coder server evaluating dynamic parameters",
|
||||
Long: `It is recommended that all rate limits are disabled on the server before running this scaletest. This test generates many login events which will be rate limited against the (most likely single) IP.`,
|
||||
Handler: func(inv *serpent.Invocation) error {
|
||||
ctx := inv.Context()
|
||||
|
||||
outputs, err := output.parse()
|
||||
if err != nil {
|
||||
return xerrors.Errorf("could not parse --output flags")
|
||||
}
|
||||
|
||||
client, err := r.InitClient(inv)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if templateName == "" {
|
||||
return xerrors.Errorf("template cannot be empty")
|
||||
}
|
||||
|
||||
org, err := orgContext.Selected(inv, client)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
logger := slog.Make(sloghuman.Sink(inv.Stdout)).Leveled(slog.LevelDebug)
|
||||
partitions, err := dynamicparameters.SetupPartitions(ctx, client, org.ID, templateName, numEvals, logger)
|
||||
if err != nil {
|
||||
return xerrors.Errorf("setup dynamic parameters partitions: %w", err)
|
||||
}
|
||||
|
||||
th := harness.NewTestHarness(harness.ConcurrentExecutionStrategy{}, harness.ConcurrentExecutionStrategy{})
|
||||
reg := prometheus.NewRegistry()
|
||||
metrics := dynamicparameters.NewMetrics(reg, "concurrent_evaluations")
|
||||
|
||||
for i, part := range partitions {
|
||||
for j := range part.ConcurrentEvaluations {
|
||||
cfg := dynamicparameters.Config{
|
||||
TemplateVersion: part.TemplateVersion.ID,
|
||||
Metrics: metrics,
|
||||
MetricLabelValues: []string{fmt.Sprintf("%d", part.ConcurrentEvaluations)},
|
||||
}
|
||||
runner := dynamicparameters.NewRunner(client, cfg)
|
||||
th.AddRun(dynamicParametersTestName, fmt.Sprintf("%d/%d", j, i), runner)
|
||||
}
|
||||
}
|
||||
|
||||
err = th.Run(ctx)
|
||||
if err != nil {
|
||||
return xerrors.Errorf("run test harness: %w", err)
|
||||
}
|
||||
|
||||
res := th.Results()
|
||||
for _, o := range outputs {
|
||||
err = o.write(res, inv.Stdout)
|
||||
if err != nil {
|
||||
return xerrors.Errorf("write output %q to %q: %w", o.format, o.path, err)
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
},
|
||||
}
|
||||
|
||||
cmd.Options = serpent.OptionSet{
|
||||
{
|
||||
Flag: "template",
|
||||
Description: "Name of the template to use. If it does not exist, it will be created.",
|
||||
Default: "scaletest-dynamic-parameters",
|
||||
Value: serpent.StringOf(&templateName),
|
||||
},
|
||||
{
|
||||
Flag: "concurrent-evaluations",
|
||||
Description: "Number of concurrent dynamic parameter evaluations to perform.",
|
||||
Default: "100",
|
||||
Value: serpent.Int64Of(&numEvals),
|
||||
},
|
||||
}
|
||||
orgContext.AttachOptions(cmd)
|
||||
output.attach(&cmd.Options)
|
||||
return cmd
|
||||
}
|
||||
@@ -29,6 +29,28 @@ func (r *RootCmd) taskCreate() *serpent.Command {
|
||||
cmd := &serpent.Command{
|
||||
Use: "create [input]",
|
||||
Short: "Create an experimental task",
|
||||
Long: FormatExamples(
|
||||
Example{
|
||||
Description: "Create a task with direct input",
|
||||
Command: "coder exp task create \"Add authentication to the user service\"",
|
||||
},
|
||||
Example{
|
||||
Description: "Create a task with stdin input",
|
||||
Command: "echo \"Add authentication to the user service\" | coder exp task create",
|
||||
},
|
||||
Example{
|
||||
Description: "Create a task with a specific name",
|
||||
Command: "coder exp task create --name task1 \"Add authentication to the user service\"",
|
||||
},
|
||||
Example{
|
||||
Description: "Create a task from a specific template / preset",
|
||||
Command: "coder exp task create --template backend-dev --preset \"My Preset\" \"Add authentication to the user service\"",
|
||||
},
|
||||
Example{
|
||||
Description: "Create a task for another user (requires appropriate permissions)",
|
||||
Command: "coder exp task create --owner user@example.com \"Add authentication to the user service\"",
|
||||
},
|
||||
),
|
||||
Middleware: serpent.Chain(
|
||||
serpent.RequireRangeArgs(0, 1),
|
||||
),
|
||||
|
||||
@@ -19,6 +19,20 @@ func (r *RootCmd) taskDelete() *serpent.Command {
|
||||
cmd := &serpent.Command{
|
||||
Use: "delete <task> [<task> ...]",
|
||||
Short: "Delete experimental tasks",
|
||||
Long: FormatExamples(
|
||||
Example{
|
||||
Description: "Delete a single task.",
|
||||
Command: "$ coder exp task delete task1",
|
||||
},
|
||||
Example{
|
||||
Description: "Delete multiple tasks.",
|
||||
Command: "$ coder exp task delete task1 task2 task3",
|
||||
},
|
||||
Example{
|
||||
Description: "Delete a task without confirmation.",
|
||||
Command: "$ coder exp task delete task4 --yes",
|
||||
},
|
||||
),
|
||||
Middleware: serpent.Chain(
|
||||
serpent.RequireRangeArgs(1, -1),
|
||||
),
|
||||
|
||||
+24
-2
@@ -67,8 +67,30 @@ func (r *RootCmd) taskList() *serpent.Command {
|
||||
)
|
||||
|
||||
cmd := &serpent.Command{
|
||||
Use: "list",
|
||||
Short: "List experimental tasks",
|
||||
Use: "list",
|
||||
Short: "List experimental tasks",
|
||||
Long: FormatExamples(
|
||||
Example{
|
||||
Description: "List tasks for the current user.",
|
||||
Command: "coder exp task list",
|
||||
},
|
||||
Example{
|
||||
Description: "List tasks for a specific user.",
|
||||
Command: "coder exp task list --user someone-else",
|
||||
},
|
||||
Example{
|
||||
Description: "List all tasks you can view.",
|
||||
Command: "coder exp task list --all",
|
||||
},
|
||||
Example{
|
||||
Description: "List all your running tasks.",
|
||||
Command: "coder exp task list --status running",
|
||||
},
|
||||
Example{
|
||||
Description: "As above, but only show IDs.",
|
||||
Command: "coder exp task list --status running --quiet",
|
||||
},
|
||||
),
|
||||
Aliases: []string{"ls"},
|
||||
Middleware: serpent.Chain(
|
||||
serpent.RequireNArgs(0),
|
||||
|
||||
@@ -26,6 +26,11 @@ func (r *RootCmd) taskLogs() *serpent.Command {
|
||||
cmd := &serpent.Command{
|
||||
Use: "logs <task>",
|
||||
Short: "Show a task's logs",
|
||||
Long: FormatExamples(
|
||||
Example{
|
||||
Description: "Show logs for a given task.",
|
||||
Command: "coder exp task logs task1",
|
||||
}),
|
||||
Middleware: serpent.Chain(
|
||||
serpent.RequireNArgs(1),
|
||||
),
|
||||
|
||||
@@ -14,8 +14,15 @@ func (r *RootCmd) taskSend() *serpent.Command {
|
||||
var stdin bool
|
||||
|
||||
cmd := &serpent.Command{
|
||||
Use: "send <task> [<input> | --stdin]",
|
||||
Short: "Send input to a task",
|
||||
Use: "send <task> [<input> | --stdin]",
|
||||
Short: "Send input to a task",
|
||||
Long: FormatExamples(Example{
|
||||
Description: "Send direct input to a task.",
|
||||
Command: "coder exp task send task1 \"Please also add unit tests\"",
|
||||
}, Example{
|
||||
Description: "Send input from stdin to a task.",
|
||||
Command: "echo \"Please also add unit tests\" | coder exp task send task1 --stdin",
|
||||
}),
|
||||
Middleware: serpent.RequireRangeArgs(1, 2),
|
||||
Options: serpent.OptionSet{
|
||||
{
|
||||
|
||||
+11
-1
@@ -44,7 +44,17 @@ func (r *RootCmd) taskStatus() *serpent.Command {
|
||||
watchIntervalArg time.Duration
|
||||
)
|
||||
cmd := &serpent.Command{
|
||||
Short: "Show the status of a task.",
|
||||
Short: "Show the status of a task.",
|
||||
Long: FormatExamples(
|
||||
Example{
|
||||
Description: "Show the status of a given task.",
|
||||
Command: "coder exp task status task1",
|
||||
},
|
||||
Example{
|
||||
Description: "Watch the status of a given task until it completes (idle or stopped).",
|
||||
Command: "coder exp task status task1 --watch",
|
||||
},
|
||||
),
|
||||
Use: "status",
|
||||
Aliases: []string{"stat"},
|
||||
Options: serpent.OptionSet{
|
||||
|
||||
@@ -193,6 +193,7 @@ STATE CHANGED STATUS HEALTHY STATE MESSAGE
|
||||
"workspace_agent_id": null,
|
||||
"workspace_agent_lifecycle": null,
|
||||
"workspace_agent_health": null,
|
||||
"workspace_app_id": null,
|
||||
"initial_prompt": "",
|
||||
"status": "running",
|
||||
"current_state": {
|
||||
|
||||
+21
-4
@@ -43,8 +43,9 @@ func (r *RootCmd) provisionerJobsList() *serpent.Command {
|
||||
cliui.TableFormat([]provisionerJobRow{}, []string{"created at", "id", "type", "template display name", "status", "queue", "tags"}),
|
||||
cliui.JSONFormat(),
|
||||
)
|
||||
status []string
|
||||
limit int64
|
||||
status []string
|
||||
limit int64
|
||||
initiator string
|
||||
)
|
||||
|
||||
cmd := &serpent.Command{
|
||||
@@ -65,9 +66,18 @@ func (r *RootCmd) provisionerJobsList() *serpent.Command {
|
||||
return xerrors.Errorf("current organization: %w", err)
|
||||
}
|
||||
|
||||
if initiator != "" {
|
||||
user, err := client.User(ctx, initiator)
|
||||
if err != nil {
|
||||
return xerrors.Errorf("initiator not found: %s", initiator)
|
||||
}
|
||||
initiator = user.ID.String()
|
||||
}
|
||||
|
||||
jobs, err := client.OrganizationProvisionerJobs(ctx, org.ID, &codersdk.OrganizationProvisionerJobsOptions{
|
||||
Status: slice.StringEnums[codersdk.ProvisionerJobStatus](status),
|
||||
Limit: int(limit),
|
||||
Status: slice.StringEnums[codersdk.ProvisionerJobStatus](status),
|
||||
Limit: int(limit),
|
||||
Initiator: initiator,
|
||||
})
|
||||
if err != nil {
|
||||
return xerrors.Errorf("list provisioner jobs: %w", err)
|
||||
@@ -122,6 +132,13 @@ func (r *RootCmd) provisionerJobsList() *serpent.Command {
|
||||
Default: "50",
|
||||
Value: serpent.Int64Of(&limit),
|
||||
},
|
||||
{
|
||||
Flag: "initiator",
|
||||
FlagShorthand: "i",
|
||||
Env: "CODER_PROVISIONER_JOB_LIST_INITIATOR",
|
||||
Description: "Filter by initiator (user ID or username).",
|
||||
Value: serpent.StringOf(&initiator),
|
||||
},
|
||||
}...)
|
||||
|
||||
orgContext.AttachOptions(cmd)
|
||||
|
||||
+168
-24
@@ -5,6 +5,7 @@ import (
|
||||
"database/sql"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"strings"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
@@ -26,33 +27,32 @@ import (
|
||||
func TestProvisionerJobs(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
db, ps := dbtestutil.NewDB(t)
|
||||
client, _, coderdAPI := coderdtest.NewWithAPI(t, &coderdtest.Options{
|
||||
IncludeProvisionerDaemon: false,
|
||||
Database: db,
|
||||
Pubsub: ps,
|
||||
})
|
||||
owner := coderdtest.CreateFirstUser(t, client)
|
||||
templateAdminClient, templateAdmin := coderdtest.CreateAnotherUser(t, client, owner.OrganizationID, rbac.ScopedRoleOrgTemplateAdmin(owner.OrganizationID))
|
||||
memberClient, member := coderdtest.CreateAnotherUser(t, client, owner.OrganizationID)
|
||||
|
||||
// These CLI tests are related to provisioner job CRUD operations and as such
|
||||
// do not require the overhead of starting a provisioner. Other provisioner job
|
||||
// functionalities (acquisition etc.) are tested elsewhere.
|
||||
template := dbgen.Template(t, db, database.Template{
|
||||
OrganizationID: owner.OrganizationID,
|
||||
CreatedBy: owner.UserID,
|
||||
AllowUserCancelWorkspaceJobs: true,
|
||||
})
|
||||
version := dbgen.TemplateVersion(t, db, database.TemplateVersion{
|
||||
OrganizationID: owner.OrganizationID,
|
||||
CreatedBy: owner.UserID,
|
||||
TemplateID: uuid.NullUUID{UUID: template.ID, Valid: true},
|
||||
})
|
||||
|
||||
t.Run("Cancel", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
db, ps := dbtestutil.NewDB(t)
|
||||
client, _, coderdAPI := coderdtest.NewWithAPI(t, &coderdtest.Options{
|
||||
IncludeProvisionerDaemon: false,
|
||||
Database: db,
|
||||
Pubsub: ps,
|
||||
})
|
||||
owner := coderdtest.CreateFirstUser(t, client)
|
||||
templateAdminClient, templateAdmin := coderdtest.CreateAnotherUser(t, client, owner.OrganizationID, rbac.ScopedRoleOrgTemplateAdmin(owner.OrganizationID))
|
||||
memberClient, member := coderdtest.CreateAnotherUser(t, client, owner.OrganizationID)
|
||||
|
||||
// These CLI tests are related to provisioner job CRUD operations and as such
|
||||
// do not require the overhead of starting a provisioner. Other provisioner job
|
||||
// functionalities (acquisition etc.) are tested elsewhere.
|
||||
template := dbgen.Template(t, db, database.Template{
|
||||
OrganizationID: owner.OrganizationID,
|
||||
CreatedBy: owner.UserID,
|
||||
AllowUserCancelWorkspaceJobs: true,
|
||||
})
|
||||
version := dbgen.TemplateVersion(t, db, database.TemplateVersion{
|
||||
OrganizationID: owner.OrganizationID,
|
||||
CreatedBy: owner.UserID,
|
||||
TemplateID: uuid.NullUUID{UUID: template.ID, Valid: true},
|
||||
})
|
||||
// Test helper to create a provisioner job of a given type with a given input.
|
||||
prepareJob := func(t *testing.T, jobType database.ProvisionerJobType, input json.RawMessage) database.ProvisionerJob {
|
||||
t.Helper()
|
||||
@@ -178,4 +178,148 @@ func TestProvisionerJobs(t *testing.T) {
|
||||
})
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("List", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
db, ps := dbtestutil.NewDB(t)
|
||||
client, _, coderdAPI := coderdtest.NewWithAPI(t, &coderdtest.Options{
|
||||
IncludeProvisionerDaemon: false,
|
||||
Database: db,
|
||||
Pubsub: ps,
|
||||
})
|
||||
owner := coderdtest.CreateFirstUser(t, client)
|
||||
_, member := coderdtest.CreateAnotherUser(t, client, owner.OrganizationID)
|
||||
|
||||
// These CLI tests are related to provisioner job CRUD operations and as such
|
||||
// do not require the overhead of starting a provisioner. Other provisioner job
|
||||
// functionalities (acquisition etc.) are tested elsewhere.
|
||||
template := dbgen.Template(t, db, database.Template{
|
||||
OrganizationID: owner.OrganizationID,
|
||||
CreatedBy: owner.UserID,
|
||||
AllowUserCancelWorkspaceJobs: true,
|
||||
})
|
||||
version := dbgen.TemplateVersion(t, db, database.TemplateVersion{
|
||||
OrganizationID: owner.OrganizationID,
|
||||
CreatedBy: owner.UserID,
|
||||
TemplateID: uuid.NullUUID{UUID: template.ID, Valid: true},
|
||||
})
|
||||
// Create some test jobs
|
||||
job1 := dbgen.ProvisionerJob(t, db, coderdAPI.Pubsub, database.ProvisionerJob{
|
||||
OrganizationID: owner.OrganizationID,
|
||||
InitiatorID: owner.UserID,
|
||||
Type: database.ProvisionerJobTypeTemplateVersionImport,
|
||||
Input: []byte(`{"template_version_id":"` + version.ID.String() + `"}`),
|
||||
Tags: database.StringMap{provisionersdk.TagScope: provisionersdk.ScopeOrganization},
|
||||
})
|
||||
|
||||
job2 := dbgen.ProvisionerJob(t, db, coderdAPI.Pubsub, database.ProvisionerJob{
|
||||
OrganizationID: owner.OrganizationID,
|
||||
InitiatorID: member.ID,
|
||||
Type: database.ProvisionerJobTypeWorkspaceBuild,
|
||||
Input: []byte(`{"workspace_build_id":"` + uuid.New().String() + `"}`),
|
||||
Tags: database.StringMap{provisionersdk.TagScope: provisionersdk.ScopeOrganization},
|
||||
})
|
||||
// Test basic list command
|
||||
t.Run("Basic", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
inv, root := clitest.New(t, "provisioner", "jobs", "list")
|
||||
clitest.SetupConfig(t, client, root)
|
||||
var buf bytes.Buffer
|
||||
inv.Stdout = &buf
|
||||
err := inv.Run()
|
||||
require.NoError(t, err)
|
||||
|
||||
// Should contain both jobs
|
||||
output := buf.String()
|
||||
assert.Contains(t, output, job1.ID.String())
|
||||
assert.Contains(t, output, job2.ID.String())
|
||||
})
|
||||
|
||||
// Test list with JSON output
|
||||
t.Run("JSON", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
inv, root := clitest.New(t, "provisioner", "jobs", "list", "--output", "json")
|
||||
clitest.SetupConfig(t, client, root)
|
||||
var buf bytes.Buffer
|
||||
inv.Stdout = &buf
|
||||
err := inv.Run()
|
||||
require.NoError(t, err)
|
||||
|
||||
// Parse JSON output
|
||||
var jobs []codersdk.ProvisionerJob
|
||||
err = json.Unmarshal(buf.Bytes(), &jobs)
|
||||
require.NoError(t, err)
|
||||
|
||||
// Should contain both jobs
|
||||
jobIDs := make([]uuid.UUID, len(jobs))
|
||||
for i, job := range jobs {
|
||||
jobIDs[i] = job.ID
|
||||
}
|
||||
assert.Contains(t, jobIDs, job1.ID)
|
||||
assert.Contains(t, jobIDs, job2.ID)
|
||||
})
|
||||
|
||||
// Test list with limit
|
||||
t.Run("Limit", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
inv, root := clitest.New(t, "provisioner", "jobs", "list", "--limit", "1")
|
||||
clitest.SetupConfig(t, client, root)
|
||||
var buf bytes.Buffer
|
||||
inv.Stdout = &buf
|
||||
err := inv.Run()
|
||||
require.NoError(t, err)
|
||||
|
||||
// Should contain at most 1 job
|
||||
output := buf.String()
|
||||
jobCount := 0
|
||||
if strings.Contains(output, job1.ID.String()) {
|
||||
jobCount++
|
||||
}
|
||||
if strings.Contains(output, job2.ID.String()) {
|
||||
jobCount++
|
||||
}
|
||||
assert.LessOrEqual(t, jobCount, 1)
|
||||
})
|
||||
|
||||
// Test list with initiator filter
|
||||
t.Run("InitiatorFilter", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
// Get owner user details to access username
|
||||
ctx := testutil.Context(t, testutil.WaitShort)
|
||||
ownerUser, err := client.User(ctx, owner.UserID.String())
|
||||
require.NoError(t, err)
|
||||
|
||||
// Test filtering by initiator (using username)
|
||||
inv, root := clitest.New(t, "provisioner", "jobs", "list", "--initiator", ownerUser.Username)
|
||||
clitest.SetupConfig(t, client, root)
|
||||
var buf bytes.Buffer
|
||||
inv.Stdout = &buf
|
||||
err = inv.Run()
|
||||
require.NoError(t, err)
|
||||
|
||||
// Should only contain job1 (initiated by owner)
|
||||
output := buf.String()
|
||||
assert.Contains(t, output, job1.ID.String())
|
||||
assert.NotContains(t, output, job2.ID.String())
|
||||
})
|
||||
|
||||
// Test list with invalid user
|
||||
t.Run("InvalidUser", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
// Test with non-existent user
|
||||
inv, root := clitest.New(t, "provisioner", "jobs", "list", "--initiator", "nonexistent-user")
|
||||
clitest.SetupConfig(t, client, root)
|
||||
var buf bytes.Buffer
|
||||
inv.Stdout = &buf
|
||||
err := inv.Run()
|
||||
require.Error(t, err)
|
||||
assert.Contains(t, err.Error(), "initiator not found: nonexistent-user")
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
+4
-2
@@ -1254,8 +1254,9 @@ func TestServer(t *testing.T) {
|
||||
t.Logf("error creating request: %s", err.Error())
|
||||
return false
|
||||
}
|
||||
client := &http.Client{}
|
||||
// nolint:bodyclose
|
||||
res, err := http.DefaultClient.Do(req)
|
||||
res, err := client.Do(req)
|
||||
if err != nil {
|
||||
t.Logf("error hitting prometheus endpoint: %s", err.Error())
|
||||
return false
|
||||
@@ -1316,8 +1317,9 @@ func TestServer(t *testing.T) {
|
||||
t.Logf("error creating request: %s", err.Error())
|
||||
return false
|
||||
}
|
||||
client := &http.Client{}
|
||||
// nolint:bodyclose
|
||||
res, err := http.DefaultClient.Do(req)
|
||||
res, err := client.Do(req)
|
||||
if err != nil {
|
||||
t.Logf("error hitting prometheus endpoint: %s", err.Error())
|
||||
return false
|
||||
|
||||
+2
-1
@@ -1242,7 +1242,8 @@ func TestSSH(t *testing.T) {
|
||||
// true exits the loop.
|
||||
return true
|
||||
}
|
||||
resp, err := http.DefaultClient.Do(req)
|
||||
client := &http.Client{}
|
||||
resp, err := client.Do(req)
|
||||
if err != nil {
|
||||
t.Logf("HTTP GET http://localhost:8222/ %s", err)
|
||||
return false
|
||||
|
||||
+158
-3
@@ -9,6 +9,7 @@ import (
|
||||
"os"
|
||||
"path/filepath"
|
||||
"slices"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
@@ -461,10 +462,14 @@ func createValidTemplateVersion(inv *serpent.Invocation, args createValidTemplat
|
||||
})
|
||||
if err != nil {
|
||||
var jobErr *cliui.ProvisionerJobError
|
||||
if errors.As(err, &jobErr) && !codersdk.JobIsMissingParameterErrorCode(jobErr.Code) {
|
||||
return nil, err
|
||||
if errors.As(err, &jobErr) {
|
||||
if codersdk.JobIsMissingRequiredTemplateVariableErrorCode(jobErr.Code) {
|
||||
return handleMissingTemplateVariables(inv, args, version.ID)
|
||||
}
|
||||
if !codersdk.JobIsMissingParameterErrorCode(jobErr.Code) {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
return nil, err
|
||||
}
|
||||
version, err = client.TemplateVersion(inv.Context(), version.ID)
|
||||
@@ -528,3 +533,153 @@ func prettyDirectoryPath(dir string) string {
|
||||
}
|
||||
return prettyDir
|
||||
}
|
||||
|
||||
func handleMissingTemplateVariables(inv *serpent.Invocation, args createValidTemplateVersionArgs, failedVersionID uuid.UUID) (*codersdk.TemplateVersion, error) {
|
||||
client := args.Client
|
||||
|
||||
templateVariables, err := client.TemplateVersionVariables(inv.Context(), failedVersionID)
|
||||
if err != nil {
|
||||
return nil, xerrors.Errorf("fetch template variables: %w", err)
|
||||
}
|
||||
|
||||
existingValues := make(map[string]string)
|
||||
for _, v := range args.UserVariableValues {
|
||||
existingValues[v.Name] = v.Value
|
||||
}
|
||||
|
||||
var missingVariables []codersdk.TemplateVersionVariable
|
||||
for _, variable := range templateVariables {
|
||||
if !variable.Required {
|
||||
continue
|
||||
}
|
||||
|
||||
if existingValue, exists := existingValues[variable.Name]; exists && existingValue != "" {
|
||||
continue
|
||||
}
|
||||
|
||||
// Only prompt for variables that don't have a default value or have a redacted default
|
||||
// Sensitive variables have a default value of "*redacted*"
|
||||
// See: https://github.com/coder/coder/blob/a78790c632974e04babfef6de0e2ddf044787a7a/coderd/provisionerdserver/provisionerdserver.go#L3206
|
||||
if variable.DefaultValue == "" || (variable.Sensitive && variable.DefaultValue == "*redacted*") {
|
||||
missingVariables = append(missingVariables, variable)
|
||||
}
|
||||
}
|
||||
|
||||
if len(missingVariables) == 0 {
|
||||
return nil, xerrors.New("no missing required variables found")
|
||||
}
|
||||
|
||||
_, _ = fmt.Fprintf(inv.Stderr, "Found %d missing required variables:\n", len(missingVariables))
|
||||
for _, v := range missingVariables {
|
||||
_, _ = fmt.Fprintf(inv.Stderr, " - %s (%s): %s\n", v.Name, v.Type, v.Description)
|
||||
}
|
||||
|
||||
_, _ = fmt.Fprintln(inv.Stderr, "\nThe template requires values for the following variables:")
|
||||
|
||||
var promptedValues []codersdk.VariableValue
|
||||
for _, variable := range missingVariables {
|
||||
value, err := promptForTemplateVariable(inv, variable)
|
||||
if err != nil {
|
||||
return nil, xerrors.Errorf("prompt for variable %q: %w", variable.Name, err)
|
||||
}
|
||||
promptedValues = append(promptedValues, codersdk.VariableValue{
|
||||
Name: variable.Name,
|
||||
Value: value,
|
||||
})
|
||||
}
|
||||
|
||||
combinedValues := codersdk.CombineVariableValues(args.UserVariableValues, promptedValues)
|
||||
|
||||
_, _ = fmt.Fprintln(inv.Stderr, "\nRetrying template build with provided variables...")
|
||||
|
||||
retryArgs := args
|
||||
retryArgs.UserVariableValues = combinedValues
|
||||
|
||||
return createValidTemplateVersion(inv, retryArgs)
|
||||
}
|
||||
|
||||
func promptForTemplateVariable(inv *serpent.Invocation, variable codersdk.TemplateVersionVariable) (string, error) {
|
||||
displayVariableInfo(inv, variable)
|
||||
|
||||
switch variable.Type {
|
||||
case "bool":
|
||||
return promptForBoolVariable(inv, variable)
|
||||
case "number":
|
||||
return promptForNumberVariable(inv, variable)
|
||||
default:
|
||||
return promptForStringVariable(inv, variable)
|
||||
}
|
||||
}
|
||||
|
||||
func displayVariableInfo(inv *serpent.Invocation, variable codersdk.TemplateVersionVariable) {
|
||||
_, _ = fmt.Fprintf(inv.Stderr, "var.%s", cliui.Bold(variable.Name))
|
||||
if variable.Required {
|
||||
_, _ = fmt.Fprint(inv.Stderr, pretty.Sprint(cliui.DefaultStyles.Error, " (required)"))
|
||||
}
|
||||
if variable.Sensitive {
|
||||
_, _ = fmt.Fprint(inv.Stderr, pretty.Sprint(cliui.DefaultStyles.Warn, ", sensitive"))
|
||||
}
|
||||
_, _ = fmt.Fprintln(inv.Stderr, "")
|
||||
|
||||
if variable.Description != "" {
|
||||
_, _ = fmt.Fprintf(inv.Stderr, " Description: %s\n", variable.Description)
|
||||
}
|
||||
_, _ = fmt.Fprintf(inv.Stderr, " Type: %s\n", variable.Type)
|
||||
_, _ = fmt.Fprintf(inv.Stderr, " Current value: %s\n", pretty.Sprint(cliui.DefaultStyles.Placeholder, "<empty>"))
|
||||
}
|
||||
|
||||
func promptForBoolVariable(inv *serpent.Invocation, variable codersdk.TemplateVersionVariable) (string, error) {
|
||||
defaultValue := variable.DefaultValue
|
||||
if defaultValue == "" {
|
||||
defaultValue = "false"
|
||||
}
|
||||
|
||||
return cliui.Select(inv, cliui.SelectOptions{
|
||||
Options: []string{"true", "false"},
|
||||
Default: defaultValue,
|
||||
Message: "Select value:",
|
||||
})
|
||||
}
|
||||
|
||||
func promptForNumberVariable(inv *serpent.Invocation, variable codersdk.TemplateVersionVariable) (string, error) {
|
||||
prompt := "Enter value:"
|
||||
if !variable.Required && variable.DefaultValue != "" {
|
||||
prompt = fmt.Sprintf("Enter value (default: %q):", variable.DefaultValue)
|
||||
}
|
||||
|
||||
return cliui.Prompt(inv, cliui.PromptOptions{
|
||||
Text: prompt,
|
||||
Default: variable.DefaultValue,
|
||||
Validate: createVariableValidator(variable),
|
||||
})
|
||||
}
|
||||
|
||||
func promptForStringVariable(inv *serpent.Invocation, variable codersdk.TemplateVersionVariable) (string, error) {
|
||||
prompt := "Enter value:"
|
||||
if !variable.Sensitive {
|
||||
if !variable.Required && variable.DefaultValue != "" {
|
||||
prompt = fmt.Sprintf("Enter value (default: %q):", variable.DefaultValue)
|
||||
}
|
||||
}
|
||||
|
||||
return cliui.Prompt(inv, cliui.PromptOptions{
|
||||
Text: prompt,
|
||||
Default: variable.DefaultValue,
|
||||
Secret: variable.Sensitive,
|
||||
Validate: createVariableValidator(variable),
|
||||
})
|
||||
}
|
||||
|
||||
func createVariableValidator(variable codersdk.TemplateVersionVariable) func(string) error {
|
||||
return func(s string) error {
|
||||
if variable.Required && s == "" && variable.DefaultValue == "" {
|
||||
return xerrors.New("value is required")
|
||||
}
|
||||
if variable.Type == "number" && s != "" {
|
||||
if _, err := strconv.ParseFloat(s, 64); err != nil {
|
||||
return xerrors.Errorf("must be a valid number, got: %q", s)
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
+234
-48
@@ -852,54 +852,6 @@ func TestTemplatePush(t *testing.T) {
|
||||
require.Equal(t, "foobar", templateVariables[1].Value)
|
||||
})
|
||||
|
||||
t.Run("VariableIsRequiredButNotProvided", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
client := coderdtest.New(t, &coderdtest.Options{IncludeProvisionerDaemon: true})
|
||||
owner := coderdtest.CreateFirstUser(t, client)
|
||||
templateAdmin, _ := coderdtest.CreateAnotherUser(t, client, owner.OrganizationID, rbac.RoleTemplateAdmin())
|
||||
|
||||
templateVersion := coderdtest.CreateTemplateVersion(t, client, owner.OrganizationID, createEchoResponsesWithTemplateVariables(initialTemplateVariables))
|
||||
_ = coderdtest.AwaitTemplateVersionJobCompleted(t, client, templateVersion.ID)
|
||||
template := coderdtest.CreateTemplate(t, client, owner.OrganizationID, templateVersion.ID)
|
||||
|
||||
// Test the cli command.
|
||||
//nolint:gocritic
|
||||
modifiedTemplateVariables := append(initialTemplateVariables,
|
||||
&proto.TemplateVariable{
|
||||
Name: "second_variable",
|
||||
Description: "This is the second variable.",
|
||||
Type: "string",
|
||||
Required: true,
|
||||
},
|
||||
)
|
||||
source := clitest.CreateTemplateVersionSource(t, createEchoResponsesWithTemplateVariables(modifiedTemplateVariables))
|
||||
inv, root := clitest.New(t, "templates", "push", template.Name, "--directory", source, "--test.provisioner", string(database.ProvisionerTypeEcho), "--name", "example")
|
||||
clitest.SetupConfig(t, templateAdmin, root)
|
||||
pty := ptytest.New(t)
|
||||
inv.Stdin = pty.Input()
|
||||
inv.Stdout = pty.Output()
|
||||
|
||||
execDone := make(chan error)
|
||||
go func() {
|
||||
execDone <- inv.Run()
|
||||
}()
|
||||
|
||||
matches := []struct {
|
||||
match string
|
||||
write string
|
||||
}{
|
||||
{match: "Upload", write: "yes"},
|
||||
}
|
||||
for _, m := range matches {
|
||||
pty.ExpectMatch(m.match)
|
||||
pty.WriteLine(m.write)
|
||||
}
|
||||
|
||||
wantErr := <-execDone
|
||||
require.Error(t, wantErr)
|
||||
require.Contains(t, wantErr.Error(), "required template variables need values")
|
||||
})
|
||||
|
||||
t.Run("VariableIsOptionalButNotProvided", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
client := coderdtest.New(t, &coderdtest.Options{IncludeProvisionerDaemon: true})
|
||||
@@ -1115,6 +1067,240 @@ func TestTemplatePush(t *testing.T) {
|
||||
require.Len(t, templateVersions, 2)
|
||||
require.Equal(t, "example", templateVersions[1].Name)
|
||||
})
|
||||
|
||||
t.Run("PromptForDifferentRequiredTypes", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
client := coderdtest.New(t, &coderdtest.Options{IncludeProvisionerDaemon: true})
|
||||
owner := coderdtest.CreateFirstUser(t, client)
|
||||
templateAdmin, _ := coderdtest.CreateAnotherUser(t, client, owner.OrganizationID, rbac.RoleTemplateAdmin())
|
||||
|
||||
templateVariables := []*proto.TemplateVariable{
|
||||
{
|
||||
Name: "string_var",
|
||||
Description: "A string variable",
|
||||
Type: "string",
|
||||
Required: true,
|
||||
},
|
||||
{
|
||||
Name: "number_var",
|
||||
Description: "A number variable",
|
||||
Type: "number",
|
||||
Required: true,
|
||||
},
|
||||
{
|
||||
Name: "bool_var",
|
||||
Description: "A boolean variable",
|
||||
Type: "bool",
|
||||
Required: true,
|
||||
},
|
||||
{
|
||||
Name: "sensitive_var",
|
||||
Description: "A sensitive variable",
|
||||
Type: "string",
|
||||
Required: true,
|
||||
Sensitive: true,
|
||||
},
|
||||
}
|
||||
|
||||
source := clitest.CreateTemplateVersionSource(t, createEchoResponsesWithTemplateVariables(templateVariables))
|
||||
inv, root := clitest.New(t, "templates", "push", "test-template", "--directory", source, "--test.provisioner", string(database.ProvisionerTypeEcho))
|
||||
clitest.SetupConfig(t, templateAdmin, root)
|
||||
pty := ptytest.New(t).Attach(inv)
|
||||
|
||||
execDone := make(chan error)
|
||||
go func() {
|
||||
execDone <- inv.Run()
|
||||
}()
|
||||
|
||||
// Select "Yes" for the "Upload <template_path>" prompt
|
||||
pty.ExpectMatch("Upload")
|
||||
pty.WriteLine("yes")
|
||||
|
||||
pty.ExpectMatch("var.string_var")
|
||||
pty.ExpectMatch("Enter value:")
|
||||
pty.WriteLine("test-string")
|
||||
|
||||
pty.ExpectMatch("var.number_var")
|
||||
pty.ExpectMatch("Enter value:")
|
||||
pty.WriteLine("42")
|
||||
|
||||
// Boolean variable automatically selects the first option ("true")
|
||||
pty.ExpectMatch("var.bool_var")
|
||||
|
||||
pty.ExpectMatch("var.sensitive_var")
|
||||
pty.ExpectMatch("Enter value:")
|
||||
pty.WriteLine("secret-value")
|
||||
|
||||
require.NoError(t, <-execDone)
|
||||
})
|
||||
|
||||
t.Run("ValidateNumberInput", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
client := coderdtest.New(t, &coderdtest.Options{IncludeProvisionerDaemon: true})
|
||||
owner := coderdtest.CreateFirstUser(t, client)
|
||||
templateAdmin, _ := coderdtest.CreateAnotherUser(t, client, owner.OrganizationID, rbac.RoleTemplateAdmin())
|
||||
|
||||
templateVariables := []*proto.TemplateVariable{
|
||||
{
|
||||
Name: "number_var",
|
||||
Description: "A number that requires validation",
|
||||
Type: "number",
|
||||
Required: true,
|
||||
},
|
||||
}
|
||||
|
||||
source := clitest.CreateTemplateVersionSource(t, createEchoResponsesWithTemplateVariables(templateVariables))
|
||||
inv, root := clitest.New(t, "templates", "push", "test-template", "--directory", source, "--test.provisioner", string(database.ProvisionerTypeEcho))
|
||||
clitest.SetupConfig(t, templateAdmin, root)
|
||||
pty := ptytest.New(t).Attach(inv)
|
||||
|
||||
execDone := make(chan error)
|
||||
go func() {
|
||||
execDone <- inv.Run()
|
||||
}()
|
||||
|
||||
// Select "Yes" for the "Upload <template_path>" prompt
|
||||
pty.ExpectMatch("Upload")
|
||||
pty.WriteLine("yes")
|
||||
|
||||
pty.ExpectMatch("var.number_var")
|
||||
|
||||
pty.WriteLine("not-a-number")
|
||||
pty.ExpectMatch("must be a valid number")
|
||||
|
||||
pty.WriteLine("123.45")
|
||||
|
||||
require.NoError(t, <-execDone)
|
||||
})
|
||||
|
||||
t.Run("DontPromptForDefaultValues", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
client := coderdtest.New(t, &coderdtest.Options{IncludeProvisionerDaemon: true})
|
||||
owner := coderdtest.CreateFirstUser(t, client)
|
||||
templateAdmin, _ := coderdtest.CreateAnotherUser(t, client, owner.OrganizationID, rbac.RoleTemplateAdmin())
|
||||
|
||||
templateVariables := []*proto.TemplateVariable{
|
||||
{
|
||||
Name: "with_default",
|
||||
Type: "string",
|
||||
Required: true,
|
||||
DefaultValue: "default-value",
|
||||
},
|
||||
{
|
||||
Name: "without_default",
|
||||
Type: "string",
|
||||
Required: true,
|
||||
},
|
||||
}
|
||||
|
||||
source := clitest.CreateTemplateVersionSource(t, createEchoResponsesWithTemplateVariables(templateVariables))
|
||||
inv, root := clitest.New(t, "templates", "push", "test-template", "--directory", source, "--test.provisioner", string(database.ProvisionerTypeEcho))
|
||||
clitest.SetupConfig(t, templateAdmin, root)
|
||||
pty := ptytest.New(t).Attach(inv)
|
||||
|
||||
execDone := make(chan error)
|
||||
go func() {
|
||||
execDone <- inv.Run()
|
||||
}()
|
||||
|
||||
// Select "Yes" for the "Upload <template_path>" prompt
|
||||
pty.ExpectMatch("Upload")
|
||||
pty.WriteLine("yes")
|
||||
|
||||
pty.ExpectMatch("var.without_default")
|
||||
pty.WriteLine("test-value")
|
||||
|
||||
require.NoError(t, <-execDone)
|
||||
})
|
||||
|
||||
t.Run("VariableSourcesPriority", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
client := coderdtest.New(t, &coderdtest.Options{IncludeProvisionerDaemon: true})
|
||||
owner := coderdtest.CreateFirstUser(t, client)
|
||||
templateAdmin, _ := coderdtest.CreateAnotherUser(t, client, owner.OrganizationID, rbac.RoleTemplateAdmin())
|
||||
|
||||
templateVariables := []*proto.TemplateVariable{
|
||||
{
|
||||
Name: "cli_flag_var",
|
||||
Description: "Variable provided via CLI flag",
|
||||
Type: "string",
|
||||
Required: true,
|
||||
},
|
||||
{
|
||||
Name: "file_var",
|
||||
Description: "Variable provided via file",
|
||||
Type: "string",
|
||||
Required: true,
|
||||
},
|
||||
{
|
||||
Name: "prompt_var",
|
||||
Description: "Variable provided via prompt",
|
||||
Type: "string",
|
||||
Required: true,
|
||||
},
|
||||
{
|
||||
Name: "cli_overrides_file_var",
|
||||
Description: "Variable in both CLI and file",
|
||||
Type: "string",
|
||||
Required: true,
|
||||
},
|
||||
}
|
||||
|
||||
source := clitest.CreateTemplateVersionSource(t, createEchoResponsesWithTemplateVariables(templateVariables))
|
||||
|
||||
// Create a temporary variables file.
|
||||
tempDir := t.TempDir()
|
||||
removeTmpDirUntilSuccessAfterTest(t, tempDir)
|
||||
variablesFile, err := os.CreateTemp(tempDir, "variables*.yaml")
|
||||
require.NoError(t, err)
|
||||
_, err = variablesFile.WriteString(`file_var: from-file
|
||||
cli_overrides_file_var: from-file`)
|
||||
require.NoError(t, err)
|
||||
require.NoError(t, variablesFile.Close())
|
||||
|
||||
inv, root := clitest.New(t, "templates", "push", "test-template",
|
||||
"--directory", source,
|
||||
"--test.provisioner", string(database.ProvisionerTypeEcho),
|
||||
"--variables-file", variablesFile.Name(),
|
||||
"--variable", "cli_flag_var=from-cli-flag",
|
||||
"--variable", "cli_overrides_file_var=from-cli-override",
|
||||
)
|
||||
clitest.SetupConfig(t, templateAdmin, root)
|
||||
pty := ptytest.New(t).Attach(inv)
|
||||
|
||||
execDone := make(chan error)
|
||||
go func() {
|
||||
execDone <- inv.Run()
|
||||
}()
|
||||
|
||||
// Select "Yes" for the "Upload <template_path>" prompt
|
||||
pty.ExpectMatch("Upload")
|
||||
pty.WriteLine("yes")
|
||||
|
||||
// Only check for prompt_var, other variables should not prompt
|
||||
pty.ExpectMatch("var.prompt_var")
|
||||
pty.ExpectMatch("Enter value:")
|
||||
pty.WriteLine("from-prompt")
|
||||
|
||||
require.NoError(t, <-execDone)
|
||||
|
||||
template, err := client.TemplateByName(context.Background(), owner.OrganizationID, "test-template")
|
||||
require.NoError(t, err)
|
||||
|
||||
templateVersionVars, err := client.TemplateVersionVariables(context.Background(), template.ActiveVersionID)
|
||||
require.NoError(t, err)
|
||||
require.Len(t, templateVersionVars, 4)
|
||||
|
||||
varMap := make(map[string]string)
|
||||
for _, tv := range templateVersionVars {
|
||||
varMap[tv.Name] = tv.Value
|
||||
}
|
||||
|
||||
require.Equal(t, "from-cli-flag", varMap["cli_flag_var"])
|
||||
require.Equal(t, "from-file", varMap["file_var"])
|
||||
require.Equal(t, "from-prompt", varMap["prompt_var"])
|
||||
require.Equal(t, "from-cli-override", varMap["cli_overrides_file_var"])
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
|
||||
@@ -45,6 +45,7 @@
|
||||
"queue_position": 0,
|
||||
"queue_size": 0,
|
||||
"organization_id": "===========[first org ID]===========",
|
||||
"initiator_id": "==========[first user ID]===========",
|
||||
"input": {
|
||||
"workspace_build_id": "========[workspace build ID]========"
|
||||
},
|
||||
|
||||
+4
-1
@@ -11,9 +11,12 @@ OPTIONS:
|
||||
-O, --org string, $CODER_ORGANIZATION
|
||||
Select which organization (uuid or name) to use.
|
||||
|
||||
-c, --column [id|created at|started at|completed at|canceled at|error|error code|status|worker id|worker name|file id|tags|queue position|queue size|organization id|template version id|workspace build id|type|available workers|template version name|template id|template name|template display name|template icon|workspace id|workspace name|logs overflowed|organization|queue] (default: created at,id,type,template display name,status,queue,tags)
|
||||
-c, --column [id|created at|started at|completed at|canceled at|error|error code|status|worker id|worker name|file id|tags|queue position|queue size|organization id|initiator id|template version id|workspace build id|type|available workers|template version name|template id|template name|template display name|template icon|workspace id|workspace name|logs overflowed|organization|queue] (default: created at,id,type,template display name,status,queue,tags)
|
||||
Columns to display in table output.
|
||||
|
||||
-i, --initiator string, $CODER_PROVISIONER_JOB_LIST_INITIATOR
|
||||
Filter by initiator (user ID or username).
|
||||
|
||||
-l, --limit int, $CODER_PROVISIONER_JOB_LIST_LIMIT (default: 50)
|
||||
Limit the number of jobs returned.
|
||||
|
||||
|
||||
@@ -15,6 +15,7 @@
|
||||
"queue_position": 0,
|
||||
"queue_size": 0,
|
||||
"organization_id": "===========[first org ID]===========",
|
||||
"initiator_id": "==========[first user ID]===========",
|
||||
"input": {
|
||||
"template_version_id": "============[version ID]============"
|
||||
},
|
||||
@@ -45,6 +46,7 @@
|
||||
"queue_position": 0,
|
||||
"queue_size": 0,
|
||||
"organization_id": "===========[first org ID]===========",
|
||||
"initiator_id": "==========[first user ID]===========",
|
||||
"input": {
|
||||
"workspace_build_id": "========[workspace build ID]========"
|
||||
},
|
||||
|
||||
@@ -7,7 +7,7 @@
|
||||
"last_seen_at": "====[timestamp]=====",
|
||||
"name": "test-daemon",
|
||||
"version": "v0.0.0-devel",
|
||||
"api_version": "1.10",
|
||||
"api_version": "1.11",
|
||||
"provisioners": [
|
||||
"echo"
|
||||
],
|
||||
|
||||
@@ -61,6 +61,14 @@ func (a *ConnLogAPI) ReportConnection(ctx context.Context, req *agentproto.Repor
|
||||
return nil, xerrors.Errorf("get workspace by agent id: %w", err)
|
||||
}
|
||||
|
||||
// Some older clients may incorrectly report "localhost" as the IP address.
|
||||
// Related to https://github.com/coder/coder/issues/20194
|
||||
logIPRaw := req.GetConnection().GetIp()
|
||||
if logIPRaw == "localhost" {
|
||||
logIPRaw = "127.0.0.1"
|
||||
}
|
||||
logIP := database.ParseIP(logIPRaw) // will return null if invalid
|
||||
|
||||
reason := req.GetConnection().GetReason()
|
||||
connLogger := *a.ConnectionLogger.Load()
|
||||
err = connLogger.Upsert(ctx, database.UpsertConnectionLogParams{
|
||||
@@ -73,7 +81,7 @@ func (a *ConnLogAPI) ReportConnection(ctx context.Context, req *agentproto.Repor
|
||||
AgentName: workspaceAgent.Name,
|
||||
Type: connectionType,
|
||||
Code: code,
|
||||
Ip: database.ParseIP(req.GetConnection().GetIp()),
|
||||
Ip: logIP,
|
||||
ConnectionID: uuid.NullUUID{
|
||||
UUID: connectionID,
|
||||
Valid: true,
|
||||
|
||||
@@ -3,13 +3,11 @@ package agentapi_test
|
||||
import (
|
||||
"context"
|
||||
"database/sql"
|
||||
"net"
|
||||
"sync/atomic"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/google/uuid"
|
||||
"github.com/sqlc-dev/pqtype"
|
||||
"github.com/stretchr/testify/require"
|
||||
"go.uber.org/mock/gomock"
|
||||
"google.golang.org/protobuf/types/known/timestamppb"
|
||||
@@ -75,6 +73,9 @@ func TestConnectionLog(t *testing.T) {
|
||||
action: agentproto.Connection_CONNECT.Enum(),
|
||||
typ: agentproto.Connection_JETBRAINS.Enum(),
|
||||
time: dbtime.Now(),
|
||||
// Sometimes, JetBrains clients report as localhost, see
|
||||
// https://github.com/coder/coder/issues/20194
|
||||
ip: "localhost",
|
||||
},
|
||||
{
|
||||
name: "Reconnecting PTY Connect",
|
||||
@@ -129,6 +130,12 @@ func TestConnectionLog(t *testing.T) {
|
||||
},
|
||||
})
|
||||
|
||||
expectedIPRaw := tt.ip
|
||||
if expectedIPRaw == "localhost" {
|
||||
expectedIPRaw = "127.0.0.1"
|
||||
}
|
||||
expectedIP := database.ParseIP(expectedIPRaw)
|
||||
|
||||
require.True(t, connLogger.Contains(t, database.UpsertConnectionLogParams{
|
||||
Time: dbtime.Time(tt.time).In(time.UTC),
|
||||
OrganizationID: workspace.OrganizationID,
|
||||
@@ -146,7 +153,7 @@ func TestConnectionLog(t *testing.T) {
|
||||
Int32: tt.status,
|
||||
Valid: *tt.action == agentproto.Connection_DISCONNECT,
|
||||
},
|
||||
Ip: pqtype.Inet{Valid: true, IPNet: net.IPNet{IP: net.ParseIP(tt.ip), Mask: net.CIDRMask(32, 32)}},
|
||||
Ip: expectedIP,
|
||||
Type: agentProtoConnectionTypeToConnectionLog(t, *tt.typ),
|
||||
DisconnectReason: sql.NullString{
|
||||
String: tt.reason,
|
||||
|
||||
+119
-153
@@ -1,17 +1,13 @@
|
||||
package coderd
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"database/sql"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"net"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"path"
|
||||
"slices"
|
||||
"strings"
|
||||
"time"
|
||||
@@ -31,6 +27,8 @@ import (
|
||||
"github.com/coder/coder/v2/coderd/taskname"
|
||||
"github.com/coder/coder/v2/coderd/util/slice"
|
||||
"github.com/coder/coder/v2/codersdk"
|
||||
|
||||
aiagentapi "github.com/coder/agentapi-sdk-go"
|
||||
)
|
||||
|
||||
// This endpoint is experimental and not guaranteed to be stable, so we're not
|
||||
@@ -84,8 +82,18 @@ func (api *API) aiTasksPrompts(rw http.ResponseWriter, r *http.Request) {
|
||||
})
|
||||
}
|
||||
|
||||
// This endpoint is experimental and not guaranteed to be stable, so we're not
|
||||
// generating public-facing documentation for it.
|
||||
// @Summary Create a new AI task
|
||||
// @Description: EXPERIMENTAL: this endpoint is experimental and not guaranteed to be stable.
|
||||
// @ID create-task
|
||||
// @Security CoderSessionToken
|
||||
// @Tags Experimental
|
||||
// @Param user path string true "Username, user ID, or 'me' for the authenticated user"
|
||||
// @Param request body codersdk.CreateTaskRequest true "Create task request"
|
||||
// @Success 201 {object} codersdk.Task
|
||||
// @Router /api/experimental/tasks/{user} [post]
|
||||
//
|
||||
// EXPERIMENTAL: This endpoint is experimental and not guaranteed to be stable.
|
||||
// This endpoint creates a new task for the given user.
|
||||
func (api *API) tasksCreate(rw http.ResponseWriter, r *http.Request) {
|
||||
var (
|
||||
ctx = r.Context()
|
||||
@@ -260,6 +268,14 @@ func taskFromWorkspace(ws codersdk.Workspace, initialPrompt string) codersdk.Tas
|
||||
}
|
||||
}
|
||||
|
||||
var appID uuid.NullUUID
|
||||
if ws.LatestBuild.AITaskSidebarAppID != nil {
|
||||
appID = uuid.NullUUID{
|
||||
Valid: true,
|
||||
UUID: *ws.LatestBuild.AITaskSidebarAppID,
|
||||
}
|
||||
}
|
||||
|
||||
return codersdk.Task{
|
||||
ID: ws.ID,
|
||||
OrganizationID: ws.OrganizationID,
|
||||
@@ -271,9 +287,11 @@ func taskFromWorkspace(ws codersdk.Workspace, initialPrompt string) codersdk.Tas
|
||||
TemplateDisplayName: ws.TemplateDisplayName,
|
||||
TemplateIcon: ws.TemplateIcon,
|
||||
WorkspaceID: uuid.NullUUID{Valid: true, UUID: ws.ID},
|
||||
WorkspaceBuildNumber: ws.LatestBuild.BuildNumber,
|
||||
WorkspaceAgentID: taskAgentID,
|
||||
WorkspaceAgentLifecycle: taskAgentLifecycle,
|
||||
WorkspaceAgentHealth: taskAgentHealth,
|
||||
WorkspaceAppID: appID,
|
||||
CreatedAt: ws.CreatedAt,
|
||||
UpdatedAt: ws.UpdatedAt,
|
||||
InitialPrompt: initialPrompt,
|
||||
@@ -318,6 +336,19 @@ type tasksListResponse struct {
|
||||
Count int `json:"count"`
|
||||
}
|
||||
|
||||
// @Summary List AI tasks
|
||||
// @Description: EXPERIMENTAL: this endpoint is experimental and not guaranteed to be stable.
|
||||
// @ID list-tasks
|
||||
// @Security CoderSessionToken
|
||||
// @Tags Experimental
|
||||
// @Param q query string false "Search query for filtering tasks"
|
||||
// @Param after_id query string false "Return tasks after this ID for pagination"
|
||||
// @Param limit query int false "Maximum number of tasks to return" minimum(1) maximum(100) default(25)
|
||||
// @Param offset query int false "Offset for pagination" minimum(0) default(0)
|
||||
// @Success 200 {object} coderd.tasksListResponse
|
||||
// @Router /api/experimental/tasks [get]
|
||||
//
|
||||
// EXPERIMENTAL: This endpoint is experimental and not guaranteed to be stable.
|
||||
// tasksList is an experimental endpoint to list AI tasks by mapping
|
||||
// workspaces to a task-shaped response.
|
||||
func (api *API) tasksList(rw http.ResponseWriter, r *http.Request) {
|
||||
@@ -421,6 +452,17 @@ func (api *API) tasksList(rw http.ResponseWriter, r *http.Request) {
|
||||
})
|
||||
}
|
||||
|
||||
// @Summary Get AI task by ID
|
||||
// @Description: EXPERIMENTAL: this endpoint is experimental and not guaranteed to be stable.
|
||||
// @ID get-task
|
||||
// @Security CoderSessionToken
|
||||
// @Tags Experimental
|
||||
// @Param user path string true "Username, user ID, or 'me' for the authenticated user"
|
||||
// @Param id path string true "Task ID" format(uuid)
|
||||
// @Success 200 {object} codersdk.Task
|
||||
// @Router /api/experimental/tasks/{user}/{id} [get]
|
||||
//
|
||||
// EXPERIMENTAL: This endpoint is experimental and not guaranteed to be stable.
|
||||
// taskGet is an experimental endpoint to fetch a single AI task by ID
|
||||
// (workspace ID). It returns a synthesized task response including
|
||||
// prompt and status.
|
||||
@@ -527,6 +569,17 @@ func (api *API) taskGet(rw http.ResponseWriter, r *http.Request) {
|
||||
httpapi.Write(ctx, rw, http.StatusOK, tasks[0])
|
||||
}
|
||||
|
||||
// @Summary Delete AI task by ID
|
||||
// @Description: EXPERIMENTAL: this endpoint is experimental and not guaranteed to be stable.
|
||||
// @ID delete-task
|
||||
// @Security CoderSessionToken
|
||||
// @Tags Experimental
|
||||
// @Param user path string true "Username, user ID, or 'me' for the authenticated user"
|
||||
// @Param id path string true "Task ID" format(uuid)
|
||||
// @Success 202 "Task deletion initiated"
|
||||
// @Router /api/experimental/tasks/{user}/{id} [delete]
|
||||
//
|
||||
// EXPERIMENTAL: This endpoint is experimental and not guaranteed to be stable.
|
||||
// taskDelete is an experimental endpoint to delete a task by ID (workspace ID).
|
||||
// It creates a delete workspace build and returns 202 Accepted if the build was
|
||||
// created.
|
||||
@@ -602,6 +655,18 @@ func (api *API) taskDelete(rw http.ResponseWriter, r *http.Request) {
|
||||
rw.WriteHeader(http.StatusAccepted)
|
||||
}
|
||||
|
||||
// @Summary Send input to AI task
|
||||
// @Description: EXPERIMENTAL: this endpoint is experimental and not guaranteed to be stable.
|
||||
// @ID send-task-input
|
||||
// @Security CoderSessionToken
|
||||
// @Tags Experimental
|
||||
// @Param user path string true "Username, user ID, or 'me' for the authenticated user"
|
||||
// @Param id path string true "Task ID" format(uuid)
|
||||
// @Param request body codersdk.TaskSendRequest true "Task input request"
|
||||
// @Success 204 "Input sent successfully"
|
||||
// @Router /api/experimental/tasks/{user}/{id}/send [post]
|
||||
//
|
||||
// EXPERIMENTAL: This endpoint is experimental and not guaranteed to be stable.
|
||||
// taskSend submits task input to the tasks sidebar app by dialing the agent
|
||||
// directly over the tailnet. We enforce ApplicationConnect RBAC on the
|
||||
// workspace and validate the sidebar app health.
|
||||
@@ -629,64 +694,40 @@ func (api *API) taskSend(rw http.ResponseWriter, r *http.Request) {
|
||||
}
|
||||
|
||||
if err = api.authAndDoWithTaskSidebarAppClient(r, taskID, func(ctx context.Context, client *http.Client, appURL *url.URL) error {
|
||||
status, err := agentapiDoStatusRequest(ctx, client, appURL)
|
||||
agentAPIClient, err := aiagentapi.NewClient(appURL.String(), aiagentapi.WithHTTPClient(client))
|
||||
if err != nil {
|
||||
return err
|
||||
return httperror.NewResponseError(http.StatusBadGateway, codersdk.Response{
|
||||
Message: "Failed to create agentapi client.",
|
||||
Detail: err.Error(),
|
||||
})
|
||||
}
|
||||
|
||||
if status != "stable" {
|
||||
statusResp, err := agentAPIClient.GetStatus(ctx)
|
||||
if err != nil {
|
||||
return httperror.NewResponseError(http.StatusBadGateway, codersdk.Response{
|
||||
Message: "Failed to get status from task app.",
|
||||
Detail: err.Error(),
|
||||
})
|
||||
}
|
||||
|
||||
if statusResp.Status != aiagentapi.StatusStable {
|
||||
return httperror.NewResponseError(http.StatusBadGateway, codersdk.Response{
|
||||
Message: "Task app is not ready to accept input.",
|
||||
Detail: fmt.Sprintf("Status: %s", status),
|
||||
Detail: fmt.Sprintf("Status: %s", statusResp.Status),
|
||||
})
|
||||
}
|
||||
|
||||
var reqBody struct {
|
||||
Content string `json:"content"`
|
||||
Type string `json:"type"`
|
||||
}
|
||||
reqBody.Content = req.Input
|
||||
reqBody.Type = "user"
|
||||
|
||||
req, err := agentapiNewRequest(ctx, http.MethodPost, appURL, "message", reqBody)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
resp, err := client.Do(req)
|
||||
_, err = agentAPIClient.PostMessage(ctx, aiagentapi.PostMessageParams{
|
||||
Content: req.Input,
|
||||
Type: aiagentapi.MessageTypeUser,
|
||||
})
|
||||
if err != nil {
|
||||
return httperror.NewResponseError(http.StatusBadGateway, codersdk.Response{
|
||||
Message: "Failed to reach task app endpoint.",
|
||||
Detail: err.Error(),
|
||||
})
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
if resp.StatusCode != http.StatusOK {
|
||||
body, _ := io.ReadAll(io.LimitReader(resp.Body, 128))
|
||||
return httperror.NewResponseError(http.StatusBadGateway, codersdk.Response{
|
||||
Message: "Task app rejected the message.",
|
||||
Detail: fmt.Sprintf("Upstream status: %d; Body: %s", resp.StatusCode, body),
|
||||
})
|
||||
}
|
||||
|
||||
// {"$schema":"http://localhost:3284/schemas/MessageResponseBody.json","ok":true}
|
||||
// {"$schema":"http://localhost:3284/schemas/ErrorModel.json","title":"Unprocessable Entity","status":422,"detail":"validation failed","errors":[{"location":"body.type","value":"oof"}]}
|
||||
var respBody map[string]any
|
||||
if err := json.NewDecoder(resp.Body).Decode(&respBody); err != nil {
|
||||
return httperror.NewResponseError(http.StatusBadGateway, codersdk.Response{
|
||||
Message: "Failed to decode task app response body.",
|
||||
Detail: err.Error(),
|
||||
})
|
||||
}
|
||||
|
||||
if v, ok := respBody["ok"].(bool); !ok || !v {
|
||||
return httperror.NewResponseError(http.StatusBadGateway, codersdk.Response{
|
||||
Message: "Task app rejected the message.",
|
||||
Detail: fmt.Sprintf("Upstream response: %v", respBody),
|
||||
})
|
||||
}
|
||||
|
||||
return nil
|
||||
}); err != nil {
|
||||
httperror.WriteResponseError(ctx, rw, err)
|
||||
@@ -696,6 +737,19 @@ func (api *API) taskSend(rw http.ResponseWriter, r *http.Request) {
|
||||
rw.WriteHeader(http.StatusNoContent)
|
||||
}
|
||||
|
||||
// @Summary Get AI task logs
|
||||
// @Description: EXPERIMENTAL: this endpoint is experimental and not guaranteed to be stable.
|
||||
// @ID get-task-logs
|
||||
// @Security CoderSessionToken
|
||||
// @Tags Experimental
|
||||
// @Param user path string true "Username, user ID, or 'me' for the authenticated user"
|
||||
// @Param id path string true "Task ID" format(uuid)
|
||||
// @Success 200 {object} codersdk.TaskLogsResponse
|
||||
// @Router /api/experimental/tasks/{user}/{id}/logs [get]
|
||||
//
|
||||
// EXPERIMENTAL: This endpoint is experimental and not guaranteed to be stable.
|
||||
// taskLogs reads task output by dialing the agent directly over the tailnet.
|
||||
// We enforce ApplicationConnect RBAC on the workspace and validate the sidebar app health.
|
||||
func (api *API) taskLogs(rw http.ResponseWriter, r *http.Request) {
|
||||
ctx := r.Context()
|
||||
|
||||
@@ -710,51 +764,29 @@ func (api *API) taskLogs(rw http.ResponseWriter, r *http.Request) {
|
||||
|
||||
var out codersdk.TaskLogsResponse
|
||||
if err := api.authAndDoWithTaskSidebarAppClient(r, taskID, func(ctx context.Context, client *http.Client, appURL *url.URL) error {
|
||||
req, err := agentapiNewRequest(ctx, http.MethodGet, appURL, "messages", nil)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
resp, err := client.Do(req)
|
||||
agentAPIClient, err := aiagentapi.NewClient(appURL.String(), aiagentapi.WithHTTPClient(client))
|
||||
if err != nil {
|
||||
return httperror.NewResponseError(http.StatusBadGateway, codersdk.Response{
|
||||
Message: "Failed to reach task app endpoint.",
|
||||
Detail: err.Error(),
|
||||
})
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
if resp.StatusCode != http.StatusOK {
|
||||
body, _ := io.ReadAll(io.LimitReader(resp.Body, 128))
|
||||
return httperror.NewResponseError(http.StatusBadGateway, codersdk.Response{
|
||||
Message: "Task app rejected the request.",
|
||||
Detail: fmt.Sprintf("Upstream status: %d; Body: %s", resp.StatusCode, body),
|
||||
})
|
||||
}
|
||||
|
||||
// {"$schema":"http://localhost:3284/schemas/MessagesResponseBody.json","messages":[]}
|
||||
var respBody struct {
|
||||
Messages []struct {
|
||||
ID int `json:"id"`
|
||||
Content string `json:"content"`
|
||||
Role string `json:"role"`
|
||||
Time time.Time `json:"time"`
|
||||
} `json:"messages"`
|
||||
}
|
||||
if err := json.NewDecoder(resp.Body).Decode(&respBody); err != nil {
|
||||
return httperror.NewResponseError(http.StatusBadGateway, codersdk.Response{
|
||||
Message: "Failed to decode task app response body.",
|
||||
Message: "Failed to create agentapi client.",
|
||||
Detail: err.Error(),
|
||||
})
|
||||
}
|
||||
|
||||
logs := make([]codersdk.TaskLogEntry, 0, len(respBody.Messages))
|
||||
for _, m := range respBody.Messages {
|
||||
messagesResp, err := agentAPIClient.GetMessages(ctx)
|
||||
if err != nil {
|
||||
return httperror.NewResponseError(http.StatusBadGateway, codersdk.Response{
|
||||
Message: "Failed to get messages from task app.",
|
||||
Detail: err.Error(),
|
||||
})
|
||||
}
|
||||
|
||||
logs := make([]codersdk.TaskLogEntry, 0, len(messagesResp.Messages))
|
||||
for _, m := range messagesResp.Messages {
|
||||
var typ codersdk.TaskLogType
|
||||
switch strings.ToLower(m.Role) {
|
||||
case "user":
|
||||
switch m.Role {
|
||||
case aiagentapi.RoleUser:
|
||||
typ = codersdk.TaskLogTypeInput
|
||||
case "agent":
|
||||
case aiagentapi.RoleAgent:
|
||||
typ = codersdk.TaskLogTypeOutput
|
||||
default:
|
||||
return httperror.NewResponseError(http.StatusBadGateway, codersdk.Response{
|
||||
@@ -763,7 +795,7 @@ func (api *API) taskLogs(rw http.ResponseWriter, r *http.Request) {
|
||||
})
|
||||
}
|
||||
logs = append(logs, codersdk.TaskLogEntry{
|
||||
ID: m.ID,
|
||||
ID: int(m.Id),
|
||||
Content: m.Content,
|
||||
Type: typ,
|
||||
Time: m.Time,
|
||||
@@ -903,69 +935,3 @@ func (api *API) authAndDoWithTaskSidebarAppClient(
|
||||
}
|
||||
return do(ctx, client, parsedURL)
|
||||
}
|
||||
|
||||
func agentapiNewRequest(ctx context.Context, method string, appURL *url.URL, appURLPath string, body any) (*http.Request, error) {
|
||||
u := *appURL
|
||||
u.Path = path.Join(appURL.Path, appURLPath)
|
||||
|
||||
var bodyReader io.Reader
|
||||
if body != nil {
|
||||
b, err := json.Marshal(body)
|
||||
if err != nil {
|
||||
return nil, httperror.NewResponseError(http.StatusBadRequest, codersdk.Response{
|
||||
Message: "Failed to marshal task app request body.",
|
||||
Detail: err.Error(),
|
||||
})
|
||||
}
|
||||
bodyReader = bytes.NewReader(b)
|
||||
}
|
||||
|
||||
req, err := http.NewRequestWithContext(ctx, method, u.String(), bodyReader)
|
||||
if err != nil {
|
||||
return nil, httperror.NewResponseError(http.StatusBadRequest, codersdk.Response{
|
||||
Message: "Failed to create task app request.",
|
||||
Detail: err.Error(),
|
||||
})
|
||||
}
|
||||
req.Header.Set("Content-Type", "application/json")
|
||||
req.Header.Set("Accept", "application/json")
|
||||
|
||||
return req, nil
|
||||
}
|
||||
|
||||
func agentapiDoStatusRequest(ctx context.Context, client *http.Client, appURL *url.URL) (string, error) {
|
||||
req, err := agentapiNewRequest(ctx, http.MethodGet, appURL, "status", nil)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
resp, err := client.Do(req)
|
||||
if err != nil {
|
||||
return "", httperror.NewResponseError(http.StatusBadGateway, codersdk.Response{
|
||||
Message: "Failed to reach task app endpoint.",
|
||||
Detail: err.Error(),
|
||||
})
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
if resp.StatusCode != http.StatusOK {
|
||||
return "", httperror.NewResponseError(http.StatusBadGateway, codersdk.Response{
|
||||
Message: "Task app status returned an error.",
|
||||
Detail: fmt.Sprintf("Status code: %d", resp.StatusCode),
|
||||
})
|
||||
}
|
||||
|
||||
// {"$schema":"http://localhost:3284/schemas/StatusResponseBody.json","status":"stable"}
|
||||
var respBody struct {
|
||||
Status string `json:"status"`
|
||||
}
|
||||
|
||||
if err := json.NewDecoder(resp.Body).Decode(&respBody); err != nil {
|
||||
return "", httperror.NewResponseError(http.StatusBadGateway, codersdk.Response{
|
||||
Message: "Failed to decode task app status response body.",
|
||||
Detail: err.Error(),
|
||||
})
|
||||
}
|
||||
|
||||
return respBody.Status, nil
|
||||
}
|
||||
|
||||
+97
-4
@@ -6,8 +6,10 @@ import (
|
||||
"io"
|
||||
"net/http"
|
||||
"net/http/httptest"
|
||||
"strings"
|
||||
"testing"
|
||||
"time"
|
||||
"unicode/utf8"
|
||||
|
||||
"github.com/google/uuid"
|
||||
"github.com/stretchr/testify/assert"
|
||||
@@ -977,6 +979,7 @@ func TestTasksNotification(t *testing.T) {
|
||||
isAITask bool
|
||||
isNotificationSent bool
|
||||
notificationTemplate uuid.UUID
|
||||
taskPrompt string
|
||||
}{
|
||||
// Should not send a notification when the agent app is not an AI task.
|
||||
{
|
||||
@@ -985,6 +988,7 @@ func TestTasksNotification(t *testing.T) {
|
||||
newAppStatus: codersdk.WorkspaceAppStatusStateWorking,
|
||||
isAITask: false,
|
||||
isNotificationSent: false,
|
||||
taskPrompt: "NoAITask",
|
||||
},
|
||||
// Should not send a notification when the new app status is neither 'Working' nor 'Idle'.
|
||||
{
|
||||
@@ -993,6 +997,7 @@ func TestTasksNotification(t *testing.T) {
|
||||
newAppStatus: codersdk.WorkspaceAppStatusStateComplete,
|
||||
isAITask: true,
|
||||
isNotificationSent: false,
|
||||
taskPrompt: "NonNotifiedState",
|
||||
},
|
||||
// Should not send a notification when the new app status equals the latest status (Working).
|
||||
{
|
||||
@@ -1001,15 +1006,27 @@ func TestTasksNotification(t *testing.T) {
|
||||
newAppStatus: codersdk.WorkspaceAppStatusStateWorking,
|
||||
isAITask: true,
|
||||
isNotificationSent: false,
|
||||
taskPrompt: "NonNotifiedTransition",
|
||||
},
|
||||
// Should send TemplateTaskWorking when the AI task transitions to 'Working'.
|
||||
// Should NOT send TemplateTaskWorking when the AI task's FIRST status is 'Working' (obvious state).
|
||||
{
|
||||
name: "TemplateTaskWorking",
|
||||
latestAppStatuses: nil,
|
||||
newAppStatus: codersdk.WorkspaceAppStatusStateWorking,
|
||||
isAITask: true,
|
||||
isNotificationSent: true,
|
||||
isNotificationSent: false,
|
||||
notificationTemplate: notifications.TemplateTaskWorking,
|
||||
taskPrompt: "TemplateTaskWorking",
|
||||
},
|
||||
// Should send TemplateTaskIdle when the AI task's FIRST status is 'Idle' (task completed immediately).
|
||||
{
|
||||
name: "InitialTemplateTaskIdle",
|
||||
latestAppStatuses: nil,
|
||||
newAppStatus: codersdk.WorkspaceAppStatusStateIdle,
|
||||
isAITask: true,
|
||||
isNotificationSent: true,
|
||||
notificationTemplate: notifications.TemplateTaskIdle,
|
||||
taskPrompt: "InitialTemplateTaskIdle",
|
||||
},
|
||||
// Should send TemplateTaskWorking when the AI task transitions to 'Working' from 'Idle'.
|
||||
{
|
||||
@@ -1022,6 +1039,7 @@ func TestTasksNotification(t *testing.T) {
|
||||
isAITask: true,
|
||||
isNotificationSent: true,
|
||||
notificationTemplate: notifications.TemplateTaskWorking,
|
||||
taskPrompt: "TemplateTaskWorkingFromIdle",
|
||||
},
|
||||
// Should send TemplateTaskIdle when the AI task transitions to 'Idle'.
|
||||
{
|
||||
@@ -1031,6 +1049,75 @@ func TestTasksNotification(t *testing.T) {
|
||||
isAITask: true,
|
||||
isNotificationSent: true,
|
||||
notificationTemplate: notifications.TemplateTaskIdle,
|
||||
taskPrompt: "TemplateTaskIdle",
|
||||
},
|
||||
// Long task prompts should be truncated to 160 characters.
|
||||
{
|
||||
name: "LongTaskPrompt",
|
||||
latestAppStatuses: []codersdk.WorkspaceAppStatusState{codersdk.WorkspaceAppStatusStateWorking},
|
||||
newAppStatus: codersdk.WorkspaceAppStatusStateIdle,
|
||||
isAITask: true,
|
||||
isNotificationSent: true,
|
||||
notificationTemplate: notifications.TemplateTaskIdle,
|
||||
taskPrompt: "This is a very long task prompt that should be truncated to 160 characters. Lorem ipsum dolor sit amet, consectetur adipiscing elit. Sed do eiusmod tempor incididunt ut labore et dolore magna aliqua.",
|
||||
},
|
||||
// Should send TemplateTaskCompleted when the AI task transitions to 'Complete'.
|
||||
{
|
||||
name: "TemplateTaskCompleted",
|
||||
latestAppStatuses: []codersdk.WorkspaceAppStatusState{codersdk.WorkspaceAppStatusStateWorking},
|
||||
newAppStatus: codersdk.WorkspaceAppStatusStateComplete,
|
||||
isAITask: true,
|
||||
isNotificationSent: true,
|
||||
notificationTemplate: notifications.TemplateTaskCompleted,
|
||||
taskPrompt: "TemplateTaskCompleted",
|
||||
},
|
||||
// Should send TemplateTaskFailed when the AI task transitions to 'Failure'.
|
||||
{
|
||||
name: "TemplateTaskFailed",
|
||||
latestAppStatuses: []codersdk.WorkspaceAppStatusState{codersdk.WorkspaceAppStatusStateWorking},
|
||||
newAppStatus: codersdk.WorkspaceAppStatusStateFailure,
|
||||
isAITask: true,
|
||||
isNotificationSent: true,
|
||||
notificationTemplate: notifications.TemplateTaskFailed,
|
||||
taskPrompt: "TemplateTaskFailed",
|
||||
},
|
||||
// Should send TemplateTaskCompleted when the AI task transitions from 'Idle' to 'Complete'.
|
||||
{
|
||||
name: "TemplateTaskCompletedFromIdle",
|
||||
latestAppStatuses: []codersdk.WorkspaceAppStatusState{codersdk.WorkspaceAppStatusStateIdle},
|
||||
newAppStatus: codersdk.WorkspaceAppStatusStateComplete,
|
||||
isAITask: true,
|
||||
isNotificationSent: true,
|
||||
notificationTemplate: notifications.TemplateTaskCompleted,
|
||||
taskPrompt: "TemplateTaskCompletedFromIdle",
|
||||
},
|
||||
// Should send TemplateTaskFailed when the AI task transitions from 'Idle' to 'Failure'.
|
||||
{
|
||||
name: "TemplateTaskFailedFromIdle",
|
||||
latestAppStatuses: []codersdk.WorkspaceAppStatusState{codersdk.WorkspaceAppStatusStateIdle},
|
||||
newAppStatus: codersdk.WorkspaceAppStatusStateFailure,
|
||||
isAITask: true,
|
||||
isNotificationSent: true,
|
||||
notificationTemplate: notifications.TemplateTaskFailed,
|
||||
taskPrompt: "TemplateTaskFailedFromIdle",
|
||||
},
|
||||
// Should NOT send notification when transitioning from 'Complete' to 'Complete' (no change).
|
||||
{
|
||||
name: "NoNotificationCompleteToComplete",
|
||||
latestAppStatuses: []codersdk.WorkspaceAppStatusState{codersdk.WorkspaceAppStatusStateComplete},
|
||||
newAppStatus: codersdk.WorkspaceAppStatusStateComplete,
|
||||
isAITask: true,
|
||||
isNotificationSent: false,
|
||||
taskPrompt: "NoNotificationCompleteToComplete",
|
||||
},
|
||||
// Should NOT send notification when transitioning from 'Failure' to 'Failure' (no change).
|
||||
{
|
||||
name: "NoNotificationFailureToFailure",
|
||||
latestAppStatuses: []codersdk.WorkspaceAppStatusState{codersdk.WorkspaceAppStatusStateFailure},
|
||||
newAppStatus: codersdk.WorkspaceAppStatusStateFailure,
|
||||
isAITask: true,
|
||||
isNotificationSent: false,
|
||||
taskPrompt: "NoNotificationFailureToFailure",
|
||||
},
|
||||
} {
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
@@ -1067,7 +1154,7 @@ func TestTasksNotification(t *testing.T) {
|
||||
}).Seed(workspaceBuildSeed).Params(database.WorkspaceBuildParameter{
|
||||
WorkspaceBuildID: workspaceBuildID,
|
||||
Name: codersdk.AITaskPromptParameterName,
|
||||
Value: "task prompt",
|
||||
Value: tc.taskPrompt,
|
||||
}).WithAgent(func(agent []*proto.Agent) []*proto.Agent {
|
||||
agent[0].Apps = []*proto.App{{
|
||||
Id: workspaceAgentAppID.String(),
|
||||
@@ -1115,7 +1202,13 @@ func TestTasksNotification(t *testing.T) {
|
||||
require.Len(t, sent, 1)
|
||||
require.Equal(t, memberUser.ID, sent[0].UserID)
|
||||
require.Len(t, sent[0].Labels, 2)
|
||||
require.Equal(t, "task prompt", sent[0].Labels["task"])
|
||||
// NOTE: len(string) is the number of bytes in the string, not the number of runes.
|
||||
require.LessOrEqual(t, utf8.RuneCountInString(sent[0].Labels["task"]), 160)
|
||||
if len(tc.taskPrompt) > 160 {
|
||||
require.Contains(t, tc.taskPrompt, strings.TrimSuffix(sent[0].Labels["task"], "…"))
|
||||
} else {
|
||||
require.Equal(t, tc.taskPrompt, sent[0].Labels["task"])
|
||||
}
|
||||
require.Equal(t, workspace.Name, sent[0].Labels["workspace"])
|
||||
} else {
|
||||
// Then: No notification is sent
|
||||
|
||||
Generated
+808
-4
@@ -130,6 +130,256 @@ const docTemplate = `{
|
||||
}
|
||||
}
|
||||
},
|
||||
"/api/experimental/tasks": {
|
||||
"get": {
|
||||
"security": [
|
||||
{
|
||||
"CoderSessionToken": []
|
||||
}
|
||||
],
|
||||
"tags": [
|
||||
"Experimental"
|
||||
],
|
||||
"summary": "List AI tasks",
|
||||
"operationId": "list-tasks",
|
||||
"parameters": [
|
||||
{
|
||||
"type": "string",
|
||||
"description": "Search query for filtering tasks",
|
||||
"name": "q",
|
||||
"in": "query"
|
||||
},
|
||||
{
|
||||
"type": "string",
|
||||
"description": "Return tasks after this ID for pagination",
|
||||
"name": "after_id",
|
||||
"in": "query"
|
||||
},
|
||||
{
|
||||
"maximum": 100,
|
||||
"minimum": 1,
|
||||
"type": "integer",
|
||||
"default": 25,
|
||||
"description": "Maximum number of tasks to return",
|
||||
"name": "limit",
|
||||
"in": "query"
|
||||
},
|
||||
{
|
||||
"minimum": 0,
|
||||
"type": "integer",
|
||||
"default": 0,
|
||||
"description": "Offset for pagination",
|
||||
"name": "offset",
|
||||
"in": "query"
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "OK",
|
||||
"schema": {
|
||||
"$ref": "#/definitions/coderd.tasksListResponse"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"/api/experimental/tasks/{user}": {
|
||||
"post": {
|
||||
"security": [
|
||||
{
|
||||
"CoderSessionToken": []
|
||||
}
|
||||
],
|
||||
"tags": [
|
||||
"Experimental"
|
||||
],
|
||||
"summary": "Create a new AI task",
|
||||
"operationId": "create-task",
|
||||
"parameters": [
|
||||
{
|
||||
"type": "string",
|
||||
"description": "Username, user ID, or 'me' for the authenticated user",
|
||||
"name": "user",
|
||||
"in": "path",
|
||||
"required": true
|
||||
},
|
||||
{
|
||||
"description": "Create task request",
|
||||
"name": "request",
|
||||
"in": "body",
|
||||
"required": true,
|
||||
"schema": {
|
||||
"$ref": "#/definitions/codersdk.CreateTaskRequest"
|
||||
}
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
"201": {
|
||||
"description": "Created",
|
||||
"schema": {
|
||||
"$ref": "#/definitions/codersdk.Task"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"/api/experimental/tasks/{user}/{id}": {
|
||||
"get": {
|
||||
"security": [
|
||||
{
|
||||
"CoderSessionToken": []
|
||||
}
|
||||
],
|
||||
"tags": [
|
||||
"Experimental"
|
||||
],
|
||||
"summary": "Get AI task by ID",
|
||||
"operationId": "get-task",
|
||||
"parameters": [
|
||||
{
|
||||
"type": "string",
|
||||
"description": "Username, user ID, or 'me' for the authenticated user",
|
||||
"name": "user",
|
||||
"in": "path",
|
||||
"required": true
|
||||
},
|
||||
{
|
||||
"type": "string",
|
||||
"format": "uuid",
|
||||
"description": "Task ID",
|
||||
"name": "id",
|
||||
"in": "path",
|
||||
"required": true
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "OK",
|
||||
"schema": {
|
||||
"$ref": "#/definitions/codersdk.Task"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"delete": {
|
||||
"security": [
|
||||
{
|
||||
"CoderSessionToken": []
|
||||
}
|
||||
],
|
||||
"tags": [
|
||||
"Experimental"
|
||||
],
|
||||
"summary": "Delete AI task by ID",
|
||||
"operationId": "delete-task",
|
||||
"parameters": [
|
||||
{
|
||||
"type": "string",
|
||||
"description": "Username, user ID, or 'me' for the authenticated user",
|
||||
"name": "user",
|
||||
"in": "path",
|
||||
"required": true
|
||||
},
|
||||
{
|
||||
"type": "string",
|
||||
"format": "uuid",
|
||||
"description": "Task ID",
|
||||
"name": "id",
|
||||
"in": "path",
|
||||
"required": true
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
"202": {
|
||||
"description": "Task deletion initiated"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"/api/experimental/tasks/{user}/{id}/logs": {
|
||||
"get": {
|
||||
"security": [
|
||||
{
|
||||
"CoderSessionToken": []
|
||||
}
|
||||
],
|
||||
"tags": [
|
||||
"Experimental"
|
||||
],
|
||||
"summary": "Get AI task logs",
|
||||
"operationId": "get-task-logs",
|
||||
"parameters": [
|
||||
{
|
||||
"type": "string",
|
||||
"description": "Username, user ID, or 'me' for the authenticated user",
|
||||
"name": "user",
|
||||
"in": "path",
|
||||
"required": true
|
||||
},
|
||||
{
|
||||
"type": "string",
|
||||
"format": "uuid",
|
||||
"description": "Task ID",
|
||||
"name": "id",
|
||||
"in": "path",
|
||||
"required": true
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "OK",
|
||||
"schema": {
|
||||
"$ref": "#/definitions/codersdk.TaskLogsResponse"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"/api/experimental/tasks/{user}/{id}/send": {
|
||||
"post": {
|
||||
"security": [
|
||||
{
|
||||
"CoderSessionToken": []
|
||||
}
|
||||
],
|
||||
"tags": [
|
||||
"Experimental"
|
||||
],
|
||||
"summary": "Send input to AI task",
|
||||
"operationId": "send-task-input",
|
||||
"parameters": [
|
||||
{
|
||||
"type": "string",
|
||||
"description": "Username, user ID, or 'me' for the authenticated user",
|
||||
"name": "user",
|
||||
"in": "path",
|
||||
"required": true
|
||||
},
|
||||
{
|
||||
"type": "string",
|
||||
"format": "uuid",
|
||||
"description": "Task ID",
|
||||
"name": "id",
|
||||
"in": "path",
|
||||
"required": true
|
||||
},
|
||||
{
|
||||
"description": "Task input request",
|
||||
"name": "request",
|
||||
"in": "body",
|
||||
"required": true,
|
||||
"schema": {
|
||||
"$ref": "#/definitions/codersdk.TaskSendRequest"
|
||||
}
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
"204": {
|
||||
"description": "Input sent successfully"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"/appearance": {
|
||||
"get": {
|
||||
"security": [
|
||||
@@ -3744,6 +3994,13 @@ const docTemplate = `{
|
||||
"description": "Provisioner tags to filter by (JSON of the form {'tag1':'value1','tag2':'value2'})",
|
||||
"name": "tags",
|
||||
"in": "query"
|
||||
},
|
||||
{
|
||||
"type": "string",
|
||||
"format": "uuid",
|
||||
"description": "Filter results by initiator",
|
||||
"name": "initiator",
|
||||
"in": "query"
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
@@ -11229,6 +11486,20 @@ const docTemplate = `{
|
||||
}
|
||||
}
|
||||
},
|
||||
"coderd.tasksListResponse": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"count": {
|
||||
"type": "integer"
|
||||
},
|
||||
"tasks": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"$ref": "#/definitions/codersdk.Task"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"codersdk.ACLAvailable": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
@@ -11442,6 +11713,17 @@ const docTemplate = `{
|
||||
}
|
||||
}
|
||||
},
|
||||
"codersdk.APIAllowListTarget": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"id": {
|
||||
"type": "string"
|
||||
},
|
||||
"type": {
|
||||
"$ref": "#/definitions/codersdk.RBACResource"
|
||||
}
|
||||
}
|
||||
},
|
||||
"codersdk.APIKey": {
|
||||
"type": "object",
|
||||
"required": [
|
||||
@@ -11523,11 +11805,29 @@ const docTemplate = `{
|
||||
"enum": [
|
||||
"all",
|
||||
"application_connect",
|
||||
"aibridge_interception:*",
|
||||
"aibridge_interception:create",
|
||||
"aibridge_interception:read",
|
||||
"aibridge_interception:update",
|
||||
"api_key:*",
|
||||
"api_key:create",
|
||||
"api_key:delete",
|
||||
"api_key:read",
|
||||
"api_key:update",
|
||||
"assign_org_role:*",
|
||||
"assign_org_role:assign",
|
||||
"assign_org_role:create",
|
||||
"assign_org_role:delete",
|
||||
"assign_org_role:read",
|
||||
"assign_org_role:unassign",
|
||||
"assign_org_role:update",
|
||||
"assign_role:*",
|
||||
"assign_role:assign",
|
||||
"assign_role:read",
|
||||
"assign_role:unassign",
|
||||
"audit_log:*",
|
||||
"audit_log:create",
|
||||
"audit_log:read",
|
||||
"coder:all",
|
||||
"coder:apikeys.manage_self",
|
||||
"coder:application_connect",
|
||||
@@ -11537,40 +11837,193 @@ const docTemplate = `{
|
||||
"coder:workspaces.create",
|
||||
"coder:workspaces.delete",
|
||||
"coder:workspaces.operate",
|
||||
"connection_log:*",
|
||||
"connection_log:read",
|
||||
"connection_log:update",
|
||||
"crypto_key:*",
|
||||
"crypto_key:create",
|
||||
"crypto_key:delete",
|
||||
"crypto_key:read",
|
||||
"crypto_key:update",
|
||||
"debug_info:*",
|
||||
"debug_info:read",
|
||||
"deployment_config:*",
|
||||
"deployment_config:read",
|
||||
"deployment_config:update",
|
||||
"deployment_stats:*",
|
||||
"deployment_stats:read",
|
||||
"file:*",
|
||||
"file:create",
|
||||
"file:read",
|
||||
"group:*",
|
||||
"group:create",
|
||||
"group:delete",
|
||||
"group:read",
|
||||
"group:update",
|
||||
"group_member:*",
|
||||
"group_member:read",
|
||||
"idpsync_settings:*",
|
||||
"idpsync_settings:read",
|
||||
"idpsync_settings:update",
|
||||
"inbox_notification:*",
|
||||
"inbox_notification:create",
|
||||
"inbox_notification:read",
|
||||
"inbox_notification:update",
|
||||
"license:*",
|
||||
"license:create",
|
||||
"license:delete",
|
||||
"license:read",
|
||||
"notification_message:*",
|
||||
"notification_message:create",
|
||||
"notification_message:delete",
|
||||
"notification_message:read",
|
||||
"notification_message:update",
|
||||
"notification_preference:*",
|
||||
"notification_preference:read",
|
||||
"notification_preference:update",
|
||||
"notification_template:*",
|
||||
"notification_template:read",
|
||||
"notification_template:update",
|
||||
"oauth2_app:*",
|
||||
"oauth2_app:create",
|
||||
"oauth2_app:delete",
|
||||
"oauth2_app:read",
|
||||
"oauth2_app:update",
|
||||
"oauth2_app_code_token:*",
|
||||
"oauth2_app_code_token:create",
|
||||
"oauth2_app_code_token:delete",
|
||||
"oauth2_app_code_token:read",
|
||||
"oauth2_app_secret:*",
|
||||
"oauth2_app_secret:create",
|
||||
"oauth2_app_secret:delete",
|
||||
"oauth2_app_secret:read",
|
||||
"oauth2_app_secret:update",
|
||||
"organization:*",
|
||||
"organization:create",
|
||||
"organization:delete",
|
||||
"organization:read",
|
||||
"organization:update",
|
||||
"organization_member:*",
|
||||
"organization_member:create",
|
||||
"organization_member:delete",
|
||||
"organization_member:read",
|
||||
"organization_member:update",
|
||||
"prebuilt_workspace:*",
|
||||
"prebuilt_workspace:delete",
|
||||
"prebuilt_workspace:update",
|
||||
"provisioner_daemon:*",
|
||||
"provisioner_daemon:create",
|
||||
"provisioner_daemon:delete",
|
||||
"provisioner_daemon:read",
|
||||
"provisioner_daemon:update",
|
||||
"provisioner_jobs:*",
|
||||
"provisioner_jobs:create",
|
||||
"provisioner_jobs:read",
|
||||
"provisioner_jobs:update",
|
||||
"replicas:*",
|
||||
"replicas:read",
|
||||
"system:*",
|
||||
"system:create",
|
||||
"system:delete",
|
||||
"system:read",
|
||||
"system:update",
|
||||
"tailnet_coordinator:*",
|
||||
"tailnet_coordinator:create",
|
||||
"tailnet_coordinator:delete",
|
||||
"tailnet_coordinator:read",
|
||||
"tailnet_coordinator:update",
|
||||
"task:*",
|
||||
"task:create",
|
||||
"task:delete",
|
||||
"task:read",
|
||||
"task:update",
|
||||
"template:*",
|
||||
"template:create",
|
||||
"template:delete",
|
||||
"template:read",
|
||||
"template:update",
|
||||
"template:use",
|
||||
"template:view_insights",
|
||||
"usage_event:*",
|
||||
"usage_event:create",
|
||||
"usage_event:read",
|
||||
"usage_event:update",
|
||||
"user:*",
|
||||
"user:create",
|
||||
"user:delete",
|
||||
"user:read",
|
||||
"user:read_personal",
|
||||
"user:update",
|
||||
"user:update_personal",
|
||||
"user_secret:*",
|
||||
"user_secret:create",
|
||||
"user_secret:delete",
|
||||
"user_secret:read",
|
||||
"user_secret:update",
|
||||
"webpush_subscription:*",
|
||||
"webpush_subscription:create",
|
||||
"webpush_subscription:delete",
|
||||
"webpush_subscription:read",
|
||||
"workspace:*",
|
||||
"workspace:application_connect",
|
||||
"workspace:create",
|
||||
"workspace:create_agent",
|
||||
"workspace:delete",
|
||||
"workspace:delete_agent",
|
||||
"workspace:read",
|
||||
"workspace:ssh",
|
||||
"workspace:start",
|
||||
"workspace:stop",
|
||||
"workspace:update"
|
||||
"workspace:update",
|
||||
"workspace_agent_devcontainers:*",
|
||||
"workspace_agent_devcontainers:create",
|
||||
"workspace_agent_resource_monitor:*",
|
||||
"workspace_agent_resource_monitor:create",
|
||||
"workspace_agent_resource_monitor:read",
|
||||
"workspace_agent_resource_monitor:update",
|
||||
"workspace_dormant:*",
|
||||
"workspace_dormant:application_connect",
|
||||
"workspace_dormant:create",
|
||||
"workspace_dormant:create_agent",
|
||||
"workspace_dormant:delete",
|
||||
"workspace_dormant:delete_agent",
|
||||
"workspace_dormant:read",
|
||||
"workspace_dormant:ssh",
|
||||
"workspace_dormant:start",
|
||||
"workspace_dormant:stop",
|
||||
"workspace_dormant:update",
|
||||
"workspace_proxy:*",
|
||||
"workspace_proxy:create",
|
||||
"workspace_proxy:delete",
|
||||
"workspace_proxy:read",
|
||||
"workspace_proxy:update"
|
||||
],
|
||||
"x-enum-varnames": [
|
||||
"APIKeyScopeAll",
|
||||
"APIKeyScopeApplicationConnect",
|
||||
"APIKeyScopeAibridgeInterceptionAll",
|
||||
"APIKeyScopeAibridgeInterceptionCreate",
|
||||
"APIKeyScopeAibridgeInterceptionRead",
|
||||
"APIKeyScopeAibridgeInterceptionUpdate",
|
||||
"APIKeyScopeApiKeyAll",
|
||||
"APIKeyScopeApiKeyCreate",
|
||||
"APIKeyScopeApiKeyDelete",
|
||||
"APIKeyScopeApiKeyRead",
|
||||
"APIKeyScopeApiKeyUpdate",
|
||||
"APIKeyScopeAssignOrgRoleAll",
|
||||
"APIKeyScopeAssignOrgRoleAssign",
|
||||
"APIKeyScopeAssignOrgRoleCreate",
|
||||
"APIKeyScopeAssignOrgRoleDelete",
|
||||
"APIKeyScopeAssignOrgRoleRead",
|
||||
"APIKeyScopeAssignOrgRoleUnassign",
|
||||
"APIKeyScopeAssignOrgRoleUpdate",
|
||||
"APIKeyScopeAssignRoleAll",
|
||||
"APIKeyScopeAssignRoleAssign",
|
||||
"APIKeyScopeAssignRoleRead",
|
||||
"APIKeyScopeAssignRoleUnassign",
|
||||
"APIKeyScopeAuditLogAll",
|
||||
"APIKeyScopeAuditLogCreate",
|
||||
"APIKeyScopeAuditLogRead",
|
||||
"APIKeyScopeCoderAll",
|
||||
"APIKeyScopeCoderApikeysManageSelf",
|
||||
"APIKeyScopeCoderApplicationConnect",
|
||||
@@ -11580,31 +12033,166 @@ const docTemplate = `{
|
||||
"APIKeyScopeCoderWorkspacesCreate",
|
||||
"APIKeyScopeCoderWorkspacesDelete",
|
||||
"APIKeyScopeCoderWorkspacesOperate",
|
||||
"APIKeyScopeConnectionLogAll",
|
||||
"APIKeyScopeConnectionLogRead",
|
||||
"APIKeyScopeConnectionLogUpdate",
|
||||
"APIKeyScopeCryptoKeyAll",
|
||||
"APIKeyScopeCryptoKeyCreate",
|
||||
"APIKeyScopeCryptoKeyDelete",
|
||||
"APIKeyScopeCryptoKeyRead",
|
||||
"APIKeyScopeCryptoKeyUpdate",
|
||||
"APIKeyScopeDebugInfoAll",
|
||||
"APIKeyScopeDebugInfoRead",
|
||||
"APIKeyScopeDeploymentConfigAll",
|
||||
"APIKeyScopeDeploymentConfigRead",
|
||||
"APIKeyScopeDeploymentConfigUpdate",
|
||||
"APIKeyScopeDeploymentStatsAll",
|
||||
"APIKeyScopeDeploymentStatsRead",
|
||||
"APIKeyScopeFileAll",
|
||||
"APIKeyScopeFileCreate",
|
||||
"APIKeyScopeFileRead",
|
||||
"APIKeyScopeGroupAll",
|
||||
"APIKeyScopeGroupCreate",
|
||||
"APIKeyScopeGroupDelete",
|
||||
"APIKeyScopeGroupRead",
|
||||
"APIKeyScopeGroupUpdate",
|
||||
"APIKeyScopeGroupMemberAll",
|
||||
"APIKeyScopeGroupMemberRead",
|
||||
"APIKeyScopeIdpsyncSettingsAll",
|
||||
"APIKeyScopeIdpsyncSettingsRead",
|
||||
"APIKeyScopeIdpsyncSettingsUpdate",
|
||||
"APIKeyScopeInboxNotificationAll",
|
||||
"APIKeyScopeInboxNotificationCreate",
|
||||
"APIKeyScopeInboxNotificationRead",
|
||||
"APIKeyScopeInboxNotificationUpdate",
|
||||
"APIKeyScopeLicenseAll",
|
||||
"APIKeyScopeLicenseCreate",
|
||||
"APIKeyScopeLicenseDelete",
|
||||
"APIKeyScopeLicenseRead",
|
||||
"APIKeyScopeNotificationMessageAll",
|
||||
"APIKeyScopeNotificationMessageCreate",
|
||||
"APIKeyScopeNotificationMessageDelete",
|
||||
"APIKeyScopeNotificationMessageRead",
|
||||
"APIKeyScopeNotificationMessageUpdate",
|
||||
"APIKeyScopeNotificationPreferenceAll",
|
||||
"APIKeyScopeNotificationPreferenceRead",
|
||||
"APIKeyScopeNotificationPreferenceUpdate",
|
||||
"APIKeyScopeNotificationTemplateAll",
|
||||
"APIKeyScopeNotificationTemplateRead",
|
||||
"APIKeyScopeNotificationTemplateUpdate",
|
||||
"APIKeyScopeOauth2AppAll",
|
||||
"APIKeyScopeOauth2AppCreate",
|
||||
"APIKeyScopeOauth2AppDelete",
|
||||
"APIKeyScopeOauth2AppRead",
|
||||
"APIKeyScopeOauth2AppUpdate",
|
||||
"APIKeyScopeOauth2AppCodeTokenAll",
|
||||
"APIKeyScopeOauth2AppCodeTokenCreate",
|
||||
"APIKeyScopeOauth2AppCodeTokenDelete",
|
||||
"APIKeyScopeOauth2AppCodeTokenRead",
|
||||
"APIKeyScopeOauth2AppSecretAll",
|
||||
"APIKeyScopeOauth2AppSecretCreate",
|
||||
"APIKeyScopeOauth2AppSecretDelete",
|
||||
"APIKeyScopeOauth2AppSecretRead",
|
||||
"APIKeyScopeOauth2AppSecretUpdate",
|
||||
"APIKeyScopeOrganizationAll",
|
||||
"APIKeyScopeOrganizationCreate",
|
||||
"APIKeyScopeOrganizationDelete",
|
||||
"APIKeyScopeOrganizationRead",
|
||||
"APIKeyScopeOrganizationUpdate",
|
||||
"APIKeyScopeOrganizationMemberAll",
|
||||
"APIKeyScopeOrganizationMemberCreate",
|
||||
"APIKeyScopeOrganizationMemberDelete",
|
||||
"APIKeyScopeOrganizationMemberRead",
|
||||
"APIKeyScopeOrganizationMemberUpdate",
|
||||
"APIKeyScopePrebuiltWorkspaceAll",
|
||||
"APIKeyScopePrebuiltWorkspaceDelete",
|
||||
"APIKeyScopePrebuiltWorkspaceUpdate",
|
||||
"APIKeyScopeProvisionerDaemonAll",
|
||||
"APIKeyScopeProvisionerDaemonCreate",
|
||||
"APIKeyScopeProvisionerDaemonDelete",
|
||||
"APIKeyScopeProvisionerDaemonRead",
|
||||
"APIKeyScopeProvisionerDaemonUpdate",
|
||||
"APIKeyScopeProvisionerJobsAll",
|
||||
"APIKeyScopeProvisionerJobsCreate",
|
||||
"APIKeyScopeProvisionerJobsRead",
|
||||
"APIKeyScopeProvisionerJobsUpdate",
|
||||
"APIKeyScopeReplicasAll",
|
||||
"APIKeyScopeReplicasRead",
|
||||
"APIKeyScopeSystemAll",
|
||||
"APIKeyScopeSystemCreate",
|
||||
"APIKeyScopeSystemDelete",
|
||||
"APIKeyScopeSystemRead",
|
||||
"APIKeyScopeSystemUpdate",
|
||||
"APIKeyScopeTailnetCoordinatorAll",
|
||||
"APIKeyScopeTailnetCoordinatorCreate",
|
||||
"APIKeyScopeTailnetCoordinatorDelete",
|
||||
"APIKeyScopeTailnetCoordinatorRead",
|
||||
"APIKeyScopeTailnetCoordinatorUpdate",
|
||||
"APIKeyScopeTaskAll",
|
||||
"APIKeyScopeTaskCreate",
|
||||
"APIKeyScopeTaskDelete",
|
||||
"APIKeyScopeTaskRead",
|
||||
"APIKeyScopeTaskUpdate",
|
||||
"APIKeyScopeTemplateAll",
|
||||
"APIKeyScopeTemplateCreate",
|
||||
"APIKeyScopeTemplateDelete",
|
||||
"APIKeyScopeTemplateRead",
|
||||
"APIKeyScopeTemplateUpdate",
|
||||
"APIKeyScopeTemplateUse",
|
||||
"APIKeyScopeTemplateViewInsights",
|
||||
"APIKeyScopeUsageEventAll",
|
||||
"APIKeyScopeUsageEventCreate",
|
||||
"APIKeyScopeUsageEventRead",
|
||||
"APIKeyScopeUsageEventUpdate",
|
||||
"APIKeyScopeUserAll",
|
||||
"APIKeyScopeUserCreate",
|
||||
"APIKeyScopeUserDelete",
|
||||
"APIKeyScopeUserRead",
|
||||
"APIKeyScopeUserReadPersonal",
|
||||
"APIKeyScopeUserUpdate",
|
||||
"APIKeyScopeUserUpdatePersonal",
|
||||
"APIKeyScopeUserSecretAll",
|
||||
"APIKeyScopeUserSecretCreate",
|
||||
"APIKeyScopeUserSecretDelete",
|
||||
"APIKeyScopeUserSecretRead",
|
||||
"APIKeyScopeUserSecretUpdate",
|
||||
"APIKeyScopeWebpushSubscriptionAll",
|
||||
"APIKeyScopeWebpushSubscriptionCreate",
|
||||
"APIKeyScopeWebpushSubscriptionDelete",
|
||||
"APIKeyScopeWebpushSubscriptionRead",
|
||||
"APIKeyScopeWorkspaceAll",
|
||||
"APIKeyScopeWorkspaceApplicationConnect",
|
||||
"APIKeyScopeWorkspaceCreate",
|
||||
"APIKeyScopeWorkspaceCreateAgent",
|
||||
"APIKeyScopeWorkspaceDelete",
|
||||
"APIKeyScopeWorkspaceDeleteAgent",
|
||||
"APIKeyScopeWorkspaceRead",
|
||||
"APIKeyScopeWorkspaceSsh",
|
||||
"APIKeyScopeWorkspaceStart",
|
||||
"APIKeyScopeWorkspaceStop",
|
||||
"APIKeyScopeWorkspaceUpdate"
|
||||
"APIKeyScopeWorkspaceUpdate",
|
||||
"APIKeyScopeWorkspaceAgentDevcontainersAll",
|
||||
"APIKeyScopeWorkspaceAgentDevcontainersCreate",
|
||||
"APIKeyScopeWorkspaceAgentResourceMonitorAll",
|
||||
"APIKeyScopeWorkspaceAgentResourceMonitorCreate",
|
||||
"APIKeyScopeWorkspaceAgentResourceMonitorRead",
|
||||
"APIKeyScopeWorkspaceAgentResourceMonitorUpdate",
|
||||
"APIKeyScopeWorkspaceDormantAll",
|
||||
"APIKeyScopeWorkspaceDormantApplicationConnect",
|
||||
"APIKeyScopeWorkspaceDormantCreate",
|
||||
"APIKeyScopeWorkspaceDormantCreateAgent",
|
||||
"APIKeyScopeWorkspaceDormantDelete",
|
||||
"APIKeyScopeWorkspaceDormantDeleteAgent",
|
||||
"APIKeyScopeWorkspaceDormantRead",
|
||||
"APIKeyScopeWorkspaceDormantSsh",
|
||||
"APIKeyScopeWorkspaceDormantStart",
|
||||
"APIKeyScopeWorkspaceDormantStop",
|
||||
"APIKeyScopeWorkspaceDormantUpdate",
|
||||
"APIKeyScopeWorkspaceProxyAll",
|
||||
"APIKeyScopeWorkspaceProxyCreate",
|
||||
"APIKeyScopeWorkspaceProxyDelete",
|
||||
"APIKeyScopeWorkspaceProxyRead",
|
||||
"APIKeyScopeWorkspaceProxyUpdate"
|
||||
]
|
||||
},
|
||||
"codersdk.AddLicenseRequest": {
|
||||
@@ -12416,6 +13004,25 @@ const docTemplate = `{
|
||||
}
|
||||
}
|
||||
},
|
||||
"codersdk.CreateTaskRequest": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"input": {
|
||||
"type": "string"
|
||||
},
|
||||
"name": {
|
||||
"type": "string"
|
||||
},
|
||||
"template_version_id": {
|
||||
"type": "string",
|
||||
"format": "uuid"
|
||||
},
|
||||
"template_version_preset_id": {
|
||||
"type": "string",
|
||||
"format": "uuid"
|
||||
}
|
||||
}
|
||||
},
|
||||
"codersdk.CreateTemplateRequest": {
|
||||
"type": "object",
|
||||
"required": [
|
||||
@@ -12670,6 +13277,12 @@ const docTemplate = `{
|
||||
"codersdk.CreateTokenRequest": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"allow_list": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"$ref": "#/definitions/codersdk.APIAllowListTarget"
|
||||
}
|
||||
},
|
||||
"lifetime": {
|
||||
"type": "integer"
|
||||
},
|
||||
@@ -15974,6 +16587,10 @@ const docTemplate = `{
|
||||
"type": "string",
|
||||
"format": "uuid"
|
||||
},
|
||||
"initiator_id": {
|
||||
"type": "string",
|
||||
"format": "uuid"
|
||||
},
|
||||
"input": {
|
||||
"$ref": "#/definitions/codersdk.ProvisionerJobInput"
|
||||
},
|
||||
@@ -16370,6 +16987,7 @@ const docTemplate = `{
|
||||
"replicas",
|
||||
"system",
|
||||
"tailnet_coordinator",
|
||||
"task",
|
||||
"template",
|
||||
"usage_event",
|
||||
"user",
|
||||
@@ -16413,6 +17031,7 @@ const docTemplate = `{
|
||||
"ResourceReplicas",
|
||||
"ResourceSystem",
|
||||
"ResourceTailnetCoordinator",
|
||||
"ResourceTask",
|
||||
"ResourceTemplate",
|
||||
"ResourceUsageEvent",
|
||||
"ResourceUser",
|
||||
@@ -16628,7 +17247,8 @@ const docTemplate = `{
|
||||
"idp_sync_settings_group",
|
||||
"idp_sync_settings_role",
|
||||
"workspace_agent",
|
||||
"workspace_app"
|
||||
"workspace_app",
|
||||
"task"
|
||||
],
|
||||
"x-enum-varnames": [
|
||||
"ResourceTypeTemplate",
|
||||
@@ -16655,7 +17275,8 @@ const docTemplate = `{
|
||||
"ResourceTypeIdpSyncSettingsGroup",
|
||||
"ResourceTypeIdpSyncSettingsRole",
|
||||
"ResourceTypeWorkspaceAgent",
|
||||
"ResourceTypeWorkspaceApp"
|
||||
"ResourceTypeWorkspaceApp",
|
||||
"ResourceTypeTask"
|
||||
]
|
||||
},
|
||||
"codersdk.Response": {
|
||||
@@ -16911,6 +17532,189 @@ const docTemplate = `{
|
||||
}
|
||||
}
|
||||
},
|
||||
"codersdk.Task": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"created_at": {
|
||||
"type": "string",
|
||||
"format": "date-time"
|
||||
},
|
||||
"current_state": {
|
||||
"$ref": "#/definitions/codersdk.TaskStateEntry"
|
||||
},
|
||||
"id": {
|
||||
"type": "string",
|
||||
"format": "uuid"
|
||||
},
|
||||
"initial_prompt": {
|
||||
"type": "string"
|
||||
},
|
||||
"name": {
|
||||
"type": "string"
|
||||
},
|
||||
"organization_id": {
|
||||
"type": "string",
|
||||
"format": "uuid"
|
||||
},
|
||||
"owner_id": {
|
||||
"type": "string",
|
||||
"format": "uuid"
|
||||
},
|
||||
"owner_name": {
|
||||
"type": "string"
|
||||
},
|
||||
"status": {
|
||||
"enum": [
|
||||
"pending",
|
||||
"starting",
|
||||
"running",
|
||||
"stopping",
|
||||
"stopped",
|
||||
"failed",
|
||||
"canceling",
|
||||
"canceled",
|
||||
"deleting",
|
||||
"deleted"
|
||||
],
|
||||
"allOf": [
|
||||
{
|
||||
"$ref": "#/definitions/codersdk.WorkspaceStatus"
|
||||
}
|
||||
]
|
||||
},
|
||||
"template_display_name": {
|
||||
"type": "string"
|
||||
},
|
||||
"template_icon": {
|
||||
"type": "string"
|
||||
},
|
||||
"template_id": {
|
||||
"type": "string",
|
||||
"format": "uuid"
|
||||
},
|
||||
"template_name": {
|
||||
"type": "string"
|
||||
},
|
||||
"updated_at": {
|
||||
"type": "string",
|
||||
"format": "date-time"
|
||||
},
|
||||
"workspace_agent_health": {
|
||||
"$ref": "#/definitions/codersdk.WorkspaceAgentHealth"
|
||||
},
|
||||
"workspace_agent_id": {
|
||||
"format": "uuid",
|
||||
"allOf": [
|
||||
{
|
||||
"$ref": "#/definitions/uuid.NullUUID"
|
||||
}
|
||||
]
|
||||
},
|
||||
"workspace_agent_lifecycle": {
|
||||
"$ref": "#/definitions/codersdk.WorkspaceAgentLifecycle"
|
||||
},
|
||||
"workspace_app_id": {
|
||||
"format": "uuid",
|
||||
"allOf": [
|
||||
{
|
||||
"$ref": "#/definitions/uuid.NullUUID"
|
||||
}
|
||||
]
|
||||
},
|
||||
"workspace_build_number": {
|
||||
"type": "integer"
|
||||
},
|
||||
"workspace_id": {
|
||||
"format": "uuid",
|
||||
"allOf": [
|
||||
{
|
||||
"$ref": "#/definitions/uuid.NullUUID"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
"codersdk.TaskLogEntry": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"content": {
|
||||
"type": "string"
|
||||
},
|
||||
"id": {
|
||||
"type": "integer"
|
||||
},
|
||||
"time": {
|
||||
"type": "string",
|
||||
"format": "date-time"
|
||||
},
|
||||
"type": {
|
||||
"$ref": "#/definitions/codersdk.TaskLogType"
|
||||
}
|
||||
}
|
||||
},
|
||||
"codersdk.TaskLogType": {
|
||||
"type": "string",
|
||||
"enum": [
|
||||
"input",
|
||||
"output"
|
||||
],
|
||||
"x-enum-varnames": [
|
||||
"TaskLogTypeInput",
|
||||
"TaskLogTypeOutput"
|
||||
]
|
||||
},
|
||||
"codersdk.TaskLogsResponse": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"logs": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"$ref": "#/definitions/codersdk.TaskLogEntry"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"codersdk.TaskSendRequest": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"input": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
},
|
||||
"codersdk.TaskState": {
|
||||
"type": "string",
|
||||
"enum": [
|
||||
"working",
|
||||
"idle",
|
||||
"complete",
|
||||
"failed"
|
||||
],
|
||||
"x-enum-varnames": [
|
||||
"TaskStateWorking",
|
||||
"TaskStateIdle",
|
||||
"TaskStateComplete",
|
||||
"TaskStateFailed"
|
||||
]
|
||||
},
|
||||
"codersdk.TaskStateEntry": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"message": {
|
||||
"type": "string"
|
||||
},
|
||||
"state": {
|
||||
"$ref": "#/definitions/codersdk.TaskState"
|
||||
},
|
||||
"timestamp": {
|
||||
"type": "string",
|
||||
"format": "date-time"
|
||||
},
|
||||
"uri": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
},
|
||||
"codersdk.TelemetryConfig": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
|
||||
Generated
+785
-4
@@ -106,6 +106,244 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"/api/experimental/tasks": {
|
||||
"get": {
|
||||
"security": [
|
||||
{
|
||||
"CoderSessionToken": []
|
||||
}
|
||||
],
|
||||
"tags": ["Experimental"],
|
||||
"summary": "List AI tasks",
|
||||
"operationId": "list-tasks",
|
||||
"parameters": [
|
||||
{
|
||||
"type": "string",
|
||||
"description": "Search query for filtering tasks",
|
||||
"name": "q",
|
||||
"in": "query"
|
||||
},
|
||||
{
|
||||
"type": "string",
|
||||
"description": "Return tasks after this ID for pagination",
|
||||
"name": "after_id",
|
||||
"in": "query"
|
||||
},
|
||||
{
|
||||
"maximum": 100,
|
||||
"minimum": 1,
|
||||
"type": "integer",
|
||||
"default": 25,
|
||||
"description": "Maximum number of tasks to return",
|
||||
"name": "limit",
|
||||
"in": "query"
|
||||
},
|
||||
{
|
||||
"minimum": 0,
|
||||
"type": "integer",
|
||||
"default": 0,
|
||||
"description": "Offset for pagination",
|
||||
"name": "offset",
|
||||
"in": "query"
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "OK",
|
||||
"schema": {
|
||||
"$ref": "#/definitions/coderd.tasksListResponse"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"/api/experimental/tasks/{user}": {
|
||||
"post": {
|
||||
"security": [
|
||||
{
|
||||
"CoderSessionToken": []
|
||||
}
|
||||
],
|
||||
"tags": ["Experimental"],
|
||||
"summary": "Create a new AI task",
|
||||
"operationId": "create-task",
|
||||
"parameters": [
|
||||
{
|
||||
"type": "string",
|
||||
"description": "Username, user ID, or 'me' for the authenticated user",
|
||||
"name": "user",
|
||||
"in": "path",
|
||||
"required": true
|
||||
},
|
||||
{
|
||||
"description": "Create task request",
|
||||
"name": "request",
|
||||
"in": "body",
|
||||
"required": true,
|
||||
"schema": {
|
||||
"$ref": "#/definitions/codersdk.CreateTaskRequest"
|
||||
}
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
"201": {
|
||||
"description": "Created",
|
||||
"schema": {
|
||||
"$ref": "#/definitions/codersdk.Task"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"/api/experimental/tasks/{user}/{id}": {
|
||||
"get": {
|
||||
"security": [
|
||||
{
|
||||
"CoderSessionToken": []
|
||||
}
|
||||
],
|
||||
"tags": ["Experimental"],
|
||||
"summary": "Get AI task by ID",
|
||||
"operationId": "get-task",
|
||||
"parameters": [
|
||||
{
|
||||
"type": "string",
|
||||
"description": "Username, user ID, or 'me' for the authenticated user",
|
||||
"name": "user",
|
||||
"in": "path",
|
||||
"required": true
|
||||
},
|
||||
{
|
||||
"type": "string",
|
||||
"format": "uuid",
|
||||
"description": "Task ID",
|
||||
"name": "id",
|
||||
"in": "path",
|
||||
"required": true
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "OK",
|
||||
"schema": {
|
||||
"$ref": "#/definitions/codersdk.Task"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"delete": {
|
||||
"security": [
|
||||
{
|
||||
"CoderSessionToken": []
|
||||
}
|
||||
],
|
||||
"tags": ["Experimental"],
|
||||
"summary": "Delete AI task by ID",
|
||||
"operationId": "delete-task",
|
||||
"parameters": [
|
||||
{
|
||||
"type": "string",
|
||||
"description": "Username, user ID, or 'me' for the authenticated user",
|
||||
"name": "user",
|
||||
"in": "path",
|
||||
"required": true
|
||||
},
|
||||
{
|
||||
"type": "string",
|
||||
"format": "uuid",
|
||||
"description": "Task ID",
|
||||
"name": "id",
|
||||
"in": "path",
|
||||
"required": true
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
"202": {
|
||||
"description": "Task deletion initiated"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"/api/experimental/tasks/{user}/{id}/logs": {
|
||||
"get": {
|
||||
"security": [
|
||||
{
|
||||
"CoderSessionToken": []
|
||||
}
|
||||
],
|
||||
"tags": ["Experimental"],
|
||||
"summary": "Get AI task logs",
|
||||
"operationId": "get-task-logs",
|
||||
"parameters": [
|
||||
{
|
||||
"type": "string",
|
||||
"description": "Username, user ID, or 'me' for the authenticated user",
|
||||
"name": "user",
|
||||
"in": "path",
|
||||
"required": true
|
||||
},
|
||||
{
|
||||
"type": "string",
|
||||
"format": "uuid",
|
||||
"description": "Task ID",
|
||||
"name": "id",
|
||||
"in": "path",
|
||||
"required": true
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "OK",
|
||||
"schema": {
|
||||
"$ref": "#/definitions/codersdk.TaskLogsResponse"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"/api/experimental/tasks/{user}/{id}/send": {
|
||||
"post": {
|
||||
"security": [
|
||||
{
|
||||
"CoderSessionToken": []
|
||||
}
|
||||
],
|
||||
"tags": ["Experimental"],
|
||||
"summary": "Send input to AI task",
|
||||
"operationId": "send-task-input",
|
||||
"parameters": [
|
||||
{
|
||||
"type": "string",
|
||||
"description": "Username, user ID, or 'me' for the authenticated user",
|
||||
"name": "user",
|
||||
"in": "path",
|
||||
"required": true
|
||||
},
|
||||
{
|
||||
"type": "string",
|
||||
"format": "uuid",
|
||||
"description": "Task ID",
|
||||
"name": "id",
|
||||
"in": "path",
|
||||
"required": true
|
||||
},
|
||||
{
|
||||
"description": "Task input request",
|
||||
"name": "request",
|
||||
"in": "body",
|
||||
"required": true,
|
||||
"schema": {
|
||||
"$ref": "#/definitions/codersdk.TaskSendRequest"
|
||||
}
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
"204": {
|
||||
"description": "Input sent successfully"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"/appearance": {
|
||||
"get": {
|
||||
"security": [
|
||||
@@ -3299,6 +3537,13 @@
|
||||
"description": "Provisioner tags to filter by (JSON of the form {'tag1':'value1','tag2':'value2'})",
|
||||
"name": "tags",
|
||||
"in": "query"
|
||||
},
|
||||
{
|
||||
"type": "string",
|
||||
"format": "uuid",
|
||||
"description": "Filter results by initiator",
|
||||
"name": "initiator",
|
||||
"in": "query"
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
@@ -9953,6 +10198,20 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"coderd.tasksListResponse": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"count": {
|
||||
"type": "integer"
|
||||
},
|
||||
"tasks": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"$ref": "#/definitions/codersdk.Task"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"codersdk.ACLAvailable": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
@@ -10166,6 +10425,17 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"codersdk.APIAllowListTarget": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"id": {
|
||||
"type": "string"
|
||||
},
|
||||
"type": {
|
||||
"$ref": "#/definitions/codersdk.RBACResource"
|
||||
}
|
||||
}
|
||||
},
|
||||
"codersdk.APIKey": {
|
||||
"type": "object",
|
||||
"required": [
|
||||
@@ -10239,11 +10509,29 @@
|
||||
"enum": [
|
||||
"all",
|
||||
"application_connect",
|
||||
"aibridge_interception:*",
|
||||
"aibridge_interception:create",
|
||||
"aibridge_interception:read",
|
||||
"aibridge_interception:update",
|
||||
"api_key:*",
|
||||
"api_key:create",
|
||||
"api_key:delete",
|
||||
"api_key:read",
|
||||
"api_key:update",
|
||||
"assign_org_role:*",
|
||||
"assign_org_role:assign",
|
||||
"assign_org_role:create",
|
||||
"assign_org_role:delete",
|
||||
"assign_org_role:read",
|
||||
"assign_org_role:unassign",
|
||||
"assign_org_role:update",
|
||||
"assign_role:*",
|
||||
"assign_role:assign",
|
||||
"assign_role:read",
|
||||
"assign_role:unassign",
|
||||
"audit_log:*",
|
||||
"audit_log:create",
|
||||
"audit_log:read",
|
||||
"coder:all",
|
||||
"coder:apikeys.manage_self",
|
||||
"coder:application_connect",
|
||||
@@ -10253,40 +10541,193 @@
|
||||
"coder:workspaces.create",
|
||||
"coder:workspaces.delete",
|
||||
"coder:workspaces.operate",
|
||||
"connection_log:*",
|
||||
"connection_log:read",
|
||||
"connection_log:update",
|
||||
"crypto_key:*",
|
||||
"crypto_key:create",
|
||||
"crypto_key:delete",
|
||||
"crypto_key:read",
|
||||
"crypto_key:update",
|
||||
"debug_info:*",
|
||||
"debug_info:read",
|
||||
"deployment_config:*",
|
||||
"deployment_config:read",
|
||||
"deployment_config:update",
|
||||
"deployment_stats:*",
|
||||
"deployment_stats:read",
|
||||
"file:*",
|
||||
"file:create",
|
||||
"file:read",
|
||||
"group:*",
|
||||
"group:create",
|
||||
"group:delete",
|
||||
"group:read",
|
||||
"group:update",
|
||||
"group_member:*",
|
||||
"group_member:read",
|
||||
"idpsync_settings:*",
|
||||
"idpsync_settings:read",
|
||||
"idpsync_settings:update",
|
||||
"inbox_notification:*",
|
||||
"inbox_notification:create",
|
||||
"inbox_notification:read",
|
||||
"inbox_notification:update",
|
||||
"license:*",
|
||||
"license:create",
|
||||
"license:delete",
|
||||
"license:read",
|
||||
"notification_message:*",
|
||||
"notification_message:create",
|
||||
"notification_message:delete",
|
||||
"notification_message:read",
|
||||
"notification_message:update",
|
||||
"notification_preference:*",
|
||||
"notification_preference:read",
|
||||
"notification_preference:update",
|
||||
"notification_template:*",
|
||||
"notification_template:read",
|
||||
"notification_template:update",
|
||||
"oauth2_app:*",
|
||||
"oauth2_app:create",
|
||||
"oauth2_app:delete",
|
||||
"oauth2_app:read",
|
||||
"oauth2_app:update",
|
||||
"oauth2_app_code_token:*",
|
||||
"oauth2_app_code_token:create",
|
||||
"oauth2_app_code_token:delete",
|
||||
"oauth2_app_code_token:read",
|
||||
"oauth2_app_secret:*",
|
||||
"oauth2_app_secret:create",
|
||||
"oauth2_app_secret:delete",
|
||||
"oauth2_app_secret:read",
|
||||
"oauth2_app_secret:update",
|
||||
"organization:*",
|
||||
"organization:create",
|
||||
"organization:delete",
|
||||
"organization:read",
|
||||
"organization:update",
|
||||
"organization_member:*",
|
||||
"organization_member:create",
|
||||
"organization_member:delete",
|
||||
"organization_member:read",
|
||||
"organization_member:update",
|
||||
"prebuilt_workspace:*",
|
||||
"prebuilt_workspace:delete",
|
||||
"prebuilt_workspace:update",
|
||||
"provisioner_daemon:*",
|
||||
"provisioner_daemon:create",
|
||||
"provisioner_daemon:delete",
|
||||
"provisioner_daemon:read",
|
||||
"provisioner_daemon:update",
|
||||
"provisioner_jobs:*",
|
||||
"provisioner_jobs:create",
|
||||
"provisioner_jobs:read",
|
||||
"provisioner_jobs:update",
|
||||
"replicas:*",
|
||||
"replicas:read",
|
||||
"system:*",
|
||||
"system:create",
|
||||
"system:delete",
|
||||
"system:read",
|
||||
"system:update",
|
||||
"tailnet_coordinator:*",
|
||||
"tailnet_coordinator:create",
|
||||
"tailnet_coordinator:delete",
|
||||
"tailnet_coordinator:read",
|
||||
"tailnet_coordinator:update",
|
||||
"task:*",
|
||||
"task:create",
|
||||
"task:delete",
|
||||
"task:read",
|
||||
"task:update",
|
||||
"template:*",
|
||||
"template:create",
|
||||
"template:delete",
|
||||
"template:read",
|
||||
"template:update",
|
||||
"template:use",
|
||||
"template:view_insights",
|
||||
"usage_event:*",
|
||||
"usage_event:create",
|
||||
"usage_event:read",
|
||||
"usage_event:update",
|
||||
"user:*",
|
||||
"user:create",
|
||||
"user:delete",
|
||||
"user:read",
|
||||
"user:read_personal",
|
||||
"user:update",
|
||||
"user:update_personal",
|
||||
"user_secret:*",
|
||||
"user_secret:create",
|
||||
"user_secret:delete",
|
||||
"user_secret:read",
|
||||
"user_secret:update",
|
||||
"webpush_subscription:*",
|
||||
"webpush_subscription:create",
|
||||
"webpush_subscription:delete",
|
||||
"webpush_subscription:read",
|
||||
"workspace:*",
|
||||
"workspace:application_connect",
|
||||
"workspace:create",
|
||||
"workspace:create_agent",
|
||||
"workspace:delete",
|
||||
"workspace:delete_agent",
|
||||
"workspace:read",
|
||||
"workspace:ssh",
|
||||
"workspace:start",
|
||||
"workspace:stop",
|
||||
"workspace:update"
|
||||
"workspace:update",
|
||||
"workspace_agent_devcontainers:*",
|
||||
"workspace_agent_devcontainers:create",
|
||||
"workspace_agent_resource_monitor:*",
|
||||
"workspace_agent_resource_monitor:create",
|
||||
"workspace_agent_resource_monitor:read",
|
||||
"workspace_agent_resource_monitor:update",
|
||||
"workspace_dormant:*",
|
||||
"workspace_dormant:application_connect",
|
||||
"workspace_dormant:create",
|
||||
"workspace_dormant:create_agent",
|
||||
"workspace_dormant:delete",
|
||||
"workspace_dormant:delete_agent",
|
||||
"workspace_dormant:read",
|
||||
"workspace_dormant:ssh",
|
||||
"workspace_dormant:start",
|
||||
"workspace_dormant:stop",
|
||||
"workspace_dormant:update",
|
||||
"workspace_proxy:*",
|
||||
"workspace_proxy:create",
|
||||
"workspace_proxy:delete",
|
||||
"workspace_proxy:read",
|
||||
"workspace_proxy:update"
|
||||
],
|
||||
"x-enum-varnames": [
|
||||
"APIKeyScopeAll",
|
||||
"APIKeyScopeApplicationConnect",
|
||||
"APIKeyScopeAibridgeInterceptionAll",
|
||||
"APIKeyScopeAibridgeInterceptionCreate",
|
||||
"APIKeyScopeAibridgeInterceptionRead",
|
||||
"APIKeyScopeAibridgeInterceptionUpdate",
|
||||
"APIKeyScopeApiKeyAll",
|
||||
"APIKeyScopeApiKeyCreate",
|
||||
"APIKeyScopeApiKeyDelete",
|
||||
"APIKeyScopeApiKeyRead",
|
||||
"APIKeyScopeApiKeyUpdate",
|
||||
"APIKeyScopeAssignOrgRoleAll",
|
||||
"APIKeyScopeAssignOrgRoleAssign",
|
||||
"APIKeyScopeAssignOrgRoleCreate",
|
||||
"APIKeyScopeAssignOrgRoleDelete",
|
||||
"APIKeyScopeAssignOrgRoleRead",
|
||||
"APIKeyScopeAssignOrgRoleUnassign",
|
||||
"APIKeyScopeAssignOrgRoleUpdate",
|
||||
"APIKeyScopeAssignRoleAll",
|
||||
"APIKeyScopeAssignRoleAssign",
|
||||
"APIKeyScopeAssignRoleRead",
|
||||
"APIKeyScopeAssignRoleUnassign",
|
||||
"APIKeyScopeAuditLogAll",
|
||||
"APIKeyScopeAuditLogCreate",
|
||||
"APIKeyScopeAuditLogRead",
|
||||
"APIKeyScopeCoderAll",
|
||||
"APIKeyScopeCoderApikeysManageSelf",
|
||||
"APIKeyScopeCoderApplicationConnect",
|
||||
@@ -10296,31 +10737,166 @@
|
||||
"APIKeyScopeCoderWorkspacesCreate",
|
||||
"APIKeyScopeCoderWorkspacesDelete",
|
||||
"APIKeyScopeCoderWorkspacesOperate",
|
||||
"APIKeyScopeConnectionLogAll",
|
||||
"APIKeyScopeConnectionLogRead",
|
||||
"APIKeyScopeConnectionLogUpdate",
|
||||
"APIKeyScopeCryptoKeyAll",
|
||||
"APIKeyScopeCryptoKeyCreate",
|
||||
"APIKeyScopeCryptoKeyDelete",
|
||||
"APIKeyScopeCryptoKeyRead",
|
||||
"APIKeyScopeCryptoKeyUpdate",
|
||||
"APIKeyScopeDebugInfoAll",
|
||||
"APIKeyScopeDebugInfoRead",
|
||||
"APIKeyScopeDeploymentConfigAll",
|
||||
"APIKeyScopeDeploymentConfigRead",
|
||||
"APIKeyScopeDeploymentConfigUpdate",
|
||||
"APIKeyScopeDeploymentStatsAll",
|
||||
"APIKeyScopeDeploymentStatsRead",
|
||||
"APIKeyScopeFileAll",
|
||||
"APIKeyScopeFileCreate",
|
||||
"APIKeyScopeFileRead",
|
||||
"APIKeyScopeGroupAll",
|
||||
"APIKeyScopeGroupCreate",
|
||||
"APIKeyScopeGroupDelete",
|
||||
"APIKeyScopeGroupRead",
|
||||
"APIKeyScopeGroupUpdate",
|
||||
"APIKeyScopeGroupMemberAll",
|
||||
"APIKeyScopeGroupMemberRead",
|
||||
"APIKeyScopeIdpsyncSettingsAll",
|
||||
"APIKeyScopeIdpsyncSettingsRead",
|
||||
"APIKeyScopeIdpsyncSettingsUpdate",
|
||||
"APIKeyScopeInboxNotificationAll",
|
||||
"APIKeyScopeInboxNotificationCreate",
|
||||
"APIKeyScopeInboxNotificationRead",
|
||||
"APIKeyScopeInboxNotificationUpdate",
|
||||
"APIKeyScopeLicenseAll",
|
||||
"APIKeyScopeLicenseCreate",
|
||||
"APIKeyScopeLicenseDelete",
|
||||
"APIKeyScopeLicenseRead",
|
||||
"APIKeyScopeNotificationMessageAll",
|
||||
"APIKeyScopeNotificationMessageCreate",
|
||||
"APIKeyScopeNotificationMessageDelete",
|
||||
"APIKeyScopeNotificationMessageRead",
|
||||
"APIKeyScopeNotificationMessageUpdate",
|
||||
"APIKeyScopeNotificationPreferenceAll",
|
||||
"APIKeyScopeNotificationPreferenceRead",
|
||||
"APIKeyScopeNotificationPreferenceUpdate",
|
||||
"APIKeyScopeNotificationTemplateAll",
|
||||
"APIKeyScopeNotificationTemplateRead",
|
||||
"APIKeyScopeNotificationTemplateUpdate",
|
||||
"APIKeyScopeOauth2AppAll",
|
||||
"APIKeyScopeOauth2AppCreate",
|
||||
"APIKeyScopeOauth2AppDelete",
|
||||
"APIKeyScopeOauth2AppRead",
|
||||
"APIKeyScopeOauth2AppUpdate",
|
||||
"APIKeyScopeOauth2AppCodeTokenAll",
|
||||
"APIKeyScopeOauth2AppCodeTokenCreate",
|
||||
"APIKeyScopeOauth2AppCodeTokenDelete",
|
||||
"APIKeyScopeOauth2AppCodeTokenRead",
|
||||
"APIKeyScopeOauth2AppSecretAll",
|
||||
"APIKeyScopeOauth2AppSecretCreate",
|
||||
"APIKeyScopeOauth2AppSecretDelete",
|
||||
"APIKeyScopeOauth2AppSecretRead",
|
||||
"APIKeyScopeOauth2AppSecretUpdate",
|
||||
"APIKeyScopeOrganizationAll",
|
||||
"APIKeyScopeOrganizationCreate",
|
||||
"APIKeyScopeOrganizationDelete",
|
||||
"APIKeyScopeOrganizationRead",
|
||||
"APIKeyScopeOrganizationUpdate",
|
||||
"APIKeyScopeOrganizationMemberAll",
|
||||
"APIKeyScopeOrganizationMemberCreate",
|
||||
"APIKeyScopeOrganizationMemberDelete",
|
||||
"APIKeyScopeOrganizationMemberRead",
|
||||
"APIKeyScopeOrganizationMemberUpdate",
|
||||
"APIKeyScopePrebuiltWorkspaceAll",
|
||||
"APIKeyScopePrebuiltWorkspaceDelete",
|
||||
"APIKeyScopePrebuiltWorkspaceUpdate",
|
||||
"APIKeyScopeProvisionerDaemonAll",
|
||||
"APIKeyScopeProvisionerDaemonCreate",
|
||||
"APIKeyScopeProvisionerDaemonDelete",
|
||||
"APIKeyScopeProvisionerDaemonRead",
|
||||
"APIKeyScopeProvisionerDaemonUpdate",
|
||||
"APIKeyScopeProvisionerJobsAll",
|
||||
"APIKeyScopeProvisionerJobsCreate",
|
||||
"APIKeyScopeProvisionerJobsRead",
|
||||
"APIKeyScopeProvisionerJobsUpdate",
|
||||
"APIKeyScopeReplicasAll",
|
||||
"APIKeyScopeReplicasRead",
|
||||
"APIKeyScopeSystemAll",
|
||||
"APIKeyScopeSystemCreate",
|
||||
"APIKeyScopeSystemDelete",
|
||||
"APIKeyScopeSystemRead",
|
||||
"APIKeyScopeSystemUpdate",
|
||||
"APIKeyScopeTailnetCoordinatorAll",
|
||||
"APIKeyScopeTailnetCoordinatorCreate",
|
||||
"APIKeyScopeTailnetCoordinatorDelete",
|
||||
"APIKeyScopeTailnetCoordinatorRead",
|
||||
"APIKeyScopeTailnetCoordinatorUpdate",
|
||||
"APIKeyScopeTaskAll",
|
||||
"APIKeyScopeTaskCreate",
|
||||
"APIKeyScopeTaskDelete",
|
||||
"APIKeyScopeTaskRead",
|
||||
"APIKeyScopeTaskUpdate",
|
||||
"APIKeyScopeTemplateAll",
|
||||
"APIKeyScopeTemplateCreate",
|
||||
"APIKeyScopeTemplateDelete",
|
||||
"APIKeyScopeTemplateRead",
|
||||
"APIKeyScopeTemplateUpdate",
|
||||
"APIKeyScopeTemplateUse",
|
||||
"APIKeyScopeTemplateViewInsights",
|
||||
"APIKeyScopeUsageEventAll",
|
||||
"APIKeyScopeUsageEventCreate",
|
||||
"APIKeyScopeUsageEventRead",
|
||||
"APIKeyScopeUsageEventUpdate",
|
||||
"APIKeyScopeUserAll",
|
||||
"APIKeyScopeUserCreate",
|
||||
"APIKeyScopeUserDelete",
|
||||
"APIKeyScopeUserRead",
|
||||
"APIKeyScopeUserReadPersonal",
|
||||
"APIKeyScopeUserUpdate",
|
||||
"APIKeyScopeUserUpdatePersonal",
|
||||
"APIKeyScopeUserSecretAll",
|
||||
"APIKeyScopeUserSecretCreate",
|
||||
"APIKeyScopeUserSecretDelete",
|
||||
"APIKeyScopeUserSecretRead",
|
||||
"APIKeyScopeUserSecretUpdate",
|
||||
"APIKeyScopeWebpushSubscriptionAll",
|
||||
"APIKeyScopeWebpushSubscriptionCreate",
|
||||
"APIKeyScopeWebpushSubscriptionDelete",
|
||||
"APIKeyScopeWebpushSubscriptionRead",
|
||||
"APIKeyScopeWorkspaceAll",
|
||||
"APIKeyScopeWorkspaceApplicationConnect",
|
||||
"APIKeyScopeWorkspaceCreate",
|
||||
"APIKeyScopeWorkspaceCreateAgent",
|
||||
"APIKeyScopeWorkspaceDelete",
|
||||
"APIKeyScopeWorkspaceDeleteAgent",
|
||||
"APIKeyScopeWorkspaceRead",
|
||||
"APIKeyScopeWorkspaceSsh",
|
||||
"APIKeyScopeWorkspaceStart",
|
||||
"APIKeyScopeWorkspaceStop",
|
||||
"APIKeyScopeWorkspaceUpdate"
|
||||
"APIKeyScopeWorkspaceUpdate",
|
||||
"APIKeyScopeWorkspaceAgentDevcontainersAll",
|
||||
"APIKeyScopeWorkspaceAgentDevcontainersCreate",
|
||||
"APIKeyScopeWorkspaceAgentResourceMonitorAll",
|
||||
"APIKeyScopeWorkspaceAgentResourceMonitorCreate",
|
||||
"APIKeyScopeWorkspaceAgentResourceMonitorRead",
|
||||
"APIKeyScopeWorkspaceAgentResourceMonitorUpdate",
|
||||
"APIKeyScopeWorkspaceDormantAll",
|
||||
"APIKeyScopeWorkspaceDormantApplicationConnect",
|
||||
"APIKeyScopeWorkspaceDormantCreate",
|
||||
"APIKeyScopeWorkspaceDormantCreateAgent",
|
||||
"APIKeyScopeWorkspaceDormantDelete",
|
||||
"APIKeyScopeWorkspaceDormantDeleteAgent",
|
||||
"APIKeyScopeWorkspaceDormantRead",
|
||||
"APIKeyScopeWorkspaceDormantSsh",
|
||||
"APIKeyScopeWorkspaceDormantStart",
|
||||
"APIKeyScopeWorkspaceDormantStop",
|
||||
"APIKeyScopeWorkspaceDormantUpdate",
|
||||
"APIKeyScopeWorkspaceProxyAll",
|
||||
"APIKeyScopeWorkspaceProxyCreate",
|
||||
"APIKeyScopeWorkspaceProxyDelete",
|
||||
"APIKeyScopeWorkspaceProxyRead",
|
||||
"APIKeyScopeWorkspaceProxyUpdate"
|
||||
]
|
||||
},
|
||||
"codersdk.AddLicenseRequest": {
|
||||
@@ -11094,6 +11670,25 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"codersdk.CreateTaskRequest": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"input": {
|
||||
"type": "string"
|
||||
},
|
||||
"name": {
|
||||
"type": "string"
|
||||
},
|
||||
"template_version_id": {
|
||||
"type": "string",
|
||||
"format": "uuid"
|
||||
},
|
||||
"template_version_preset_id": {
|
||||
"type": "string",
|
||||
"format": "uuid"
|
||||
}
|
||||
}
|
||||
},
|
||||
"codersdk.CreateTemplateRequest": {
|
||||
"type": "object",
|
||||
"required": ["name", "template_version_id"],
|
||||
@@ -11327,6 +11922,12 @@
|
||||
"codersdk.CreateTokenRequest": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"allow_list": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"$ref": "#/definitions/codersdk.APIAllowListTarget"
|
||||
}
|
||||
},
|
||||
"lifetime": {
|
||||
"type": "integer"
|
||||
},
|
||||
@@ -14532,6 +15133,10 @@
|
||||
"type": "string",
|
||||
"format": "uuid"
|
||||
},
|
||||
"initiator_id": {
|
||||
"type": "string",
|
||||
"format": "uuid"
|
||||
},
|
||||
"input": {
|
||||
"$ref": "#/definitions/codersdk.ProvisionerJobInput"
|
||||
},
|
||||
@@ -14904,6 +15509,7 @@
|
||||
"replicas",
|
||||
"system",
|
||||
"tailnet_coordinator",
|
||||
"task",
|
||||
"template",
|
||||
"usage_event",
|
||||
"user",
|
||||
@@ -14947,6 +15553,7 @@
|
||||
"ResourceReplicas",
|
||||
"ResourceSystem",
|
||||
"ResourceTailnetCoordinator",
|
||||
"ResourceTask",
|
||||
"ResourceTemplate",
|
||||
"ResourceUsageEvent",
|
||||
"ResourceUser",
|
||||
@@ -15152,7 +15759,8 @@
|
||||
"idp_sync_settings_group",
|
||||
"idp_sync_settings_role",
|
||||
"workspace_agent",
|
||||
"workspace_app"
|
||||
"workspace_app",
|
||||
"task"
|
||||
],
|
||||
"x-enum-varnames": [
|
||||
"ResourceTypeTemplate",
|
||||
@@ -15179,7 +15787,8 @@
|
||||
"ResourceTypeIdpSyncSettingsGroup",
|
||||
"ResourceTypeIdpSyncSettingsRole",
|
||||
"ResourceTypeWorkspaceAgent",
|
||||
"ResourceTypeWorkspaceApp"
|
||||
"ResourceTypeWorkspaceApp",
|
||||
"ResourceTypeTask"
|
||||
]
|
||||
},
|
||||
"codersdk.Response": {
|
||||
@@ -15431,6 +16040,178 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"codersdk.Task": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"created_at": {
|
||||
"type": "string",
|
||||
"format": "date-time"
|
||||
},
|
||||
"current_state": {
|
||||
"$ref": "#/definitions/codersdk.TaskStateEntry"
|
||||
},
|
||||
"id": {
|
||||
"type": "string",
|
||||
"format": "uuid"
|
||||
},
|
||||
"initial_prompt": {
|
||||
"type": "string"
|
||||
},
|
||||
"name": {
|
||||
"type": "string"
|
||||
},
|
||||
"organization_id": {
|
||||
"type": "string",
|
||||
"format": "uuid"
|
||||
},
|
||||
"owner_id": {
|
||||
"type": "string",
|
||||
"format": "uuid"
|
||||
},
|
||||
"owner_name": {
|
||||
"type": "string"
|
||||
},
|
||||
"status": {
|
||||
"enum": [
|
||||
"pending",
|
||||
"starting",
|
||||
"running",
|
||||
"stopping",
|
||||
"stopped",
|
||||
"failed",
|
||||
"canceling",
|
||||
"canceled",
|
||||
"deleting",
|
||||
"deleted"
|
||||
],
|
||||
"allOf": [
|
||||
{
|
||||
"$ref": "#/definitions/codersdk.WorkspaceStatus"
|
||||
}
|
||||
]
|
||||
},
|
||||
"template_display_name": {
|
||||
"type": "string"
|
||||
},
|
||||
"template_icon": {
|
||||
"type": "string"
|
||||
},
|
||||
"template_id": {
|
||||
"type": "string",
|
||||
"format": "uuid"
|
||||
},
|
||||
"template_name": {
|
||||
"type": "string"
|
||||
},
|
||||
"updated_at": {
|
||||
"type": "string",
|
||||
"format": "date-time"
|
||||
},
|
||||
"workspace_agent_health": {
|
||||
"$ref": "#/definitions/codersdk.WorkspaceAgentHealth"
|
||||
},
|
||||
"workspace_agent_id": {
|
||||
"format": "uuid",
|
||||
"allOf": [
|
||||
{
|
||||
"$ref": "#/definitions/uuid.NullUUID"
|
||||
}
|
||||
]
|
||||
},
|
||||
"workspace_agent_lifecycle": {
|
||||
"$ref": "#/definitions/codersdk.WorkspaceAgentLifecycle"
|
||||
},
|
||||
"workspace_app_id": {
|
||||
"format": "uuid",
|
||||
"allOf": [
|
||||
{
|
||||
"$ref": "#/definitions/uuid.NullUUID"
|
||||
}
|
||||
]
|
||||
},
|
||||
"workspace_build_number": {
|
||||
"type": "integer"
|
||||
},
|
||||
"workspace_id": {
|
||||
"format": "uuid",
|
||||
"allOf": [
|
||||
{
|
||||
"$ref": "#/definitions/uuid.NullUUID"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
"codersdk.TaskLogEntry": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"content": {
|
||||
"type": "string"
|
||||
},
|
||||
"id": {
|
||||
"type": "integer"
|
||||
},
|
||||
"time": {
|
||||
"type": "string",
|
||||
"format": "date-time"
|
||||
},
|
||||
"type": {
|
||||
"$ref": "#/definitions/codersdk.TaskLogType"
|
||||
}
|
||||
}
|
||||
},
|
||||
"codersdk.TaskLogType": {
|
||||
"type": "string",
|
||||
"enum": ["input", "output"],
|
||||
"x-enum-varnames": ["TaskLogTypeInput", "TaskLogTypeOutput"]
|
||||
},
|
||||
"codersdk.TaskLogsResponse": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"logs": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"$ref": "#/definitions/codersdk.TaskLogEntry"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"codersdk.TaskSendRequest": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"input": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
},
|
||||
"codersdk.TaskState": {
|
||||
"type": "string",
|
||||
"enum": ["working", "idle", "complete", "failed"],
|
||||
"x-enum-varnames": [
|
||||
"TaskStateWorking",
|
||||
"TaskStateIdle",
|
||||
"TaskStateComplete",
|
||||
"TaskStateFailed"
|
||||
]
|
||||
},
|
||||
"codersdk.TaskStateEntry": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"message": {
|
||||
"type": "string"
|
||||
},
|
||||
"state": {
|
||||
"$ref": "#/definitions/codersdk.TaskState"
|
||||
},
|
||||
"timestamp": {
|
||||
"type": "string",
|
||||
"format": "date-time"
|
||||
},
|
||||
"uri": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
},
|
||||
"codersdk.TelemetryConfig": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
|
||||
@@ -116,6 +116,37 @@ func (api *API) postToken(rw http.ResponseWriter, r *http.Request) {
|
||||
TokenName: tokenName,
|
||||
}
|
||||
|
||||
if len(createToken.AllowList) > 0 {
|
||||
rbacAllowListElements := make([]rbac.AllowListElement, 0, len(createToken.AllowList))
|
||||
for _, t := range createToken.AllowList {
|
||||
entry, err := rbac.NewAllowListElement(string(t.Type), t.ID)
|
||||
if err != nil {
|
||||
httpapi.Write(ctx, rw, http.StatusBadRequest, codersdk.Response{
|
||||
Message: "Failed to create API key.",
|
||||
Detail: err.Error(),
|
||||
})
|
||||
return
|
||||
}
|
||||
rbacAllowListElements = append(rbacAllowListElements, entry)
|
||||
}
|
||||
|
||||
rbacAllowList, err := rbac.NormalizeAllowList(rbacAllowListElements)
|
||||
if err != nil {
|
||||
httpapi.Write(ctx, rw, http.StatusBadRequest, codersdk.Response{
|
||||
Message: "Failed to create API key.",
|
||||
Detail: err.Error(),
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
dbAllowList := make(database.AllowList, 0, len(rbacAllowList))
|
||||
for _, e := range rbacAllowList {
|
||||
dbAllowList = append(dbAllowList, rbac.AllowListElement{Type: e.Type, ID: e.ID})
|
||||
}
|
||||
|
||||
params.AllowList = dbAllowList
|
||||
}
|
||||
|
||||
if createToken.Lifetime != 0 {
|
||||
err := api.validateAPIKeyLifetime(ctx, user.ID, createToken.Lifetime)
|
||||
if err != nil {
|
||||
|
||||
@@ -12,6 +12,7 @@ import (
|
||||
|
||||
"github.com/coder/coder/v2/coderd/database"
|
||||
"github.com/coder/coder/v2/coderd/database/dbtime"
|
||||
"github.com/coder/coder/v2/coderd/rbac/policy"
|
||||
"github.com/coder/coder/v2/cryptorand"
|
||||
)
|
||||
|
||||
@@ -34,6 +35,9 @@ type CreateParams struct {
|
||||
Scopes database.APIKeyScopes
|
||||
TokenName string
|
||||
RemoteAddr string
|
||||
// AllowList is an optional, normalized allow-list
|
||||
// of resource type and uuid entries. If empty, defaults to wildcard.
|
||||
AllowList database.AllowList
|
||||
}
|
||||
|
||||
// Generate generates an API key, returning the key as a string as well as the
|
||||
@@ -61,6 +65,10 @@ func Generate(params CreateParams) (database.InsertAPIKeyParams, string, error)
|
||||
params.LifetimeSeconds = int64(time.Until(params.ExpiresAt).Seconds())
|
||||
}
|
||||
|
||||
if len(params.AllowList) == 0 {
|
||||
params.AllowList = database.AllowList{{Type: policy.WildcardSymbol, ID: policy.WildcardSymbol}}
|
||||
}
|
||||
|
||||
ip := net.ParseIP(params.RemoteAddr)
|
||||
if ip == nil {
|
||||
ip = net.IPv4(0, 0, 0, 0)
|
||||
@@ -115,7 +123,7 @@ func Generate(params CreateParams) (database.InsertAPIKeyParams, string, error)
|
||||
HashedSecret: hashed[:],
|
||||
LoginType: params.LoginType,
|
||||
Scopes: scopes,
|
||||
AllowList: database.AllowList{database.AllowListWildcard()},
|
||||
AllowList: params.AllowList,
|
||||
TokenName: params.TokenName,
|
||||
}, token, nil
|
||||
}
|
||||
|
||||
@@ -420,6 +420,14 @@ func (api *API) auditLogIsResourceDeleted(ctx context.Context, alog database.Get
|
||||
api.Logger.Error(ctx, "unable to fetch oauth2 app secret", slog.Error(err))
|
||||
}
|
||||
return false
|
||||
case database.ResourceTypeTask:
|
||||
task, err := api.Database.GetTaskByID(ctx, alog.AuditLog.ResourceID)
|
||||
if xerrors.Is(err, sql.ErrNoRows) {
|
||||
return true
|
||||
} else if err != nil {
|
||||
api.Logger.Error(ctx, "unable to fetch task", slog.Error(err))
|
||||
}
|
||||
return task.DeletedAt.Valid && task.DeletedAt.Time.Before(time.Now())
|
||||
default:
|
||||
return false
|
||||
}
|
||||
@@ -496,6 +504,17 @@ func (api *API) auditLogResourceLink(ctx context.Context, alog database.GetAudit
|
||||
}
|
||||
return fmt.Sprintf("/deployment/oauth2-provider/apps/%s", secret.AppID)
|
||||
|
||||
case database.ResourceTypeTask:
|
||||
task, err := api.Database.GetTaskByID(ctx, alog.AuditLog.ResourceID)
|
||||
if err != nil {
|
||||
return ""
|
||||
}
|
||||
workspace, err := api.Database.GetWorkspaceByID(ctx, task.WorkspaceID.UUID)
|
||||
if err != nil {
|
||||
return ""
|
||||
}
|
||||
return fmt.Sprintf("/tasks/%s/%s", workspace.OwnerName, task.Name)
|
||||
|
||||
default:
|
||||
return ""
|
||||
}
|
||||
|
||||
@@ -31,7 +31,8 @@ type Auditable interface {
|
||||
database.NotificationTemplate |
|
||||
idpsync.OrganizationSyncSettings |
|
||||
idpsync.GroupSyncSettings |
|
||||
idpsync.RoleSyncSettings
|
||||
idpsync.RoleSyncSettings |
|
||||
database.TaskTable
|
||||
}
|
||||
|
||||
// Map is a map of changed fields in an audited resource. It maps field names to
|
||||
|
||||
@@ -131,6 +131,8 @@ func ResourceTarget[T Auditable](tgt T) string {
|
||||
return "Organization Group Sync"
|
||||
case idpsync.RoleSyncSettings:
|
||||
return "Organization Role Sync"
|
||||
case database.TaskTable:
|
||||
return typed.Name
|
||||
default:
|
||||
panic(fmt.Sprintf("unknown resource %T for ResourceTarget", tgt))
|
||||
}
|
||||
@@ -193,6 +195,8 @@ func ResourceID[T Auditable](tgt T) uuid.UUID {
|
||||
return noID // Org field on audit log has org id
|
||||
case idpsync.RoleSyncSettings:
|
||||
return noID // Org field on audit log has org id
|
||||
case database.TaskTable:
|
||||
return typed.ID
|
||||
default:
|
||||
panic(fmt.Sprintf("unknown resource %T for ResourceID", tgt))
|
||||
}
|
||||
@@ -246,6 +250,8 @@ func ResourceType[T Auditable](tgt T) database.ResourceType {
|
||||
return database.ResourceTypeIdpSyncSettingsRole
|
||||
case idpsync.GroupSyncSettings:
|
||||
return database.ResourceTypeIdpSyncSettingsGroup
|
||||
case database.TaskTable:
|
||||
return database.ResourceTypeTask
|
||||
default:
|
||||
panic(fmt.Sprintf("unknown resource %T for ResourceType", typed))
|
||||
}
|
||||
@@ -302,6 +308,8 @@ func ResourceRequiresOrgID[T Auditable]() bool {
|
||||
return true
|
||||
case idpsync.RoleSyncSettings:
|
||||
return true
|
||||
case database.TaskTable:
|
||||
return true
|
||||
default:
|
||||
panic(fmt.Sprintf("unknown resource %T for ResourceRequiresOrgID", tgt))
|
||||
}
|
||||
|
||||
@@ -68,7 +68,7 @@ func AssertRBAC(t *testing.T, api *coderd.API, client *codersdk.Client) RBACAsse
|
||||
ID: key.UserID.String(),
|
||||
Roles: rbac.RoleIdentifiers(roleNames),
|
||||
Groups: roles.Groups,
|
||||
Scope: key.Scopes,
|
||||
Scope: key.ScopeSet(),
|
||||
},
|
||||
Recorder: recorder,
|
||||
}
|
||||
|
||||
@@ -62,10 +62,6 @@ func (m *FakeConnectionLogger) Contains(t testing.TB, expected database.UpsertCo
|
||||
t.Logf("connection log %d: expected ID %s, got %s", idx+1, expected.ID, cl.ID)
|
||||
continue
|
||||
}
|
||||
if !expected.Time.IsZero() && expected.Time != cl.Time {
|
||||
t.Logf("connection log %d: expected Time %s, got %s", idx+1, expected.Time, cl.Time)
|
||||
continue
|
||||
}
|
||||
if expected.OrganizationID != uuid.Nil && cl.OrganizationID != expected.OrganizationID {
|
||||
t.Logf("connection log %d: expected OrganizationID %s, got %s", idx+1, expected.OrganizationID, cl.OrganizationID)
|
||||
continue
|
||||
@@ -114,6 +110,18 @@ func (m *FakeConnectionLogger) Contains(t testing.TB, expected database.UpsertCo
|
||||
t.Logf("connection log %d: expected ConnectionID %s, got %s", idx+1, expected.ConnectionID.UUID, cl.ConnectionID.UUID)
|
||||
continue
|
||||
}
|
||||
if expected.DisconnectReason.Valid && cl.DisconnectReason.String != expected.DisconnectReason.String {
|
||||
t.Logf("connection log %d: expected DisconnectReason %s, got %s", idx+1, expected.DisconnectReason.String, cl.DisconnectReason.String)
|
||||
continue
|
||||
}
|
||||
if !expected.Time.IsZero() && expected.Time != cl.Time {
|
||||
t.Logf("connection log %d: expected Time %s, got %s", idx+1, expected.Time, cl.Time)
|
||||
continue
|
||||
}
|
||||
if expected.ConnectionStatus != "" && expected.ConnectionStatus != cl.ConnectionStatus {
|
||||
t.Logf("connection log %d: expected ConnectionStatus %s, got %s", idx+1, expected.ConnectionStatus, cl.ConnectionStatus)
|
||||
continue
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
|
||||
@@ -9,10 +9,10 @@ const (
|
||||
CheckOneTimePasscodeSet CheckConstraint = "one_time_passcode_set" // users
|
||||
CheckUsersUsernameMinLength CheckConstraint = "users_username_min_length" // users
|
||||
CheckMaxProvisionerLogsLength CheckConstraint = "max_provisioner_logs_length" // provisioner_jobs
|
||||
CheckValidationMonotonicOrder CheckConstraint = "validation_monotonic_order" // template_version_parameters
|
||||
CheckUsageEventTypeCheck CheckConstraint = "usage_event_type_check" // usage_events
|
||||
CheckMaxLogsLength CheckConstraint = "max_logs_length" // workspace_agents
|
||||
CheckSubsystemsNotNone CheckConstraint = "subsystems_not_none" // workspace_agents
|
||||
CheckWorkspaceBuildsAiTaskSidebarAppIDRequired CheckConstraint = "workspace_builds_ai_task_sidebar_app_id_required" // workspace_builds
|
||||
CheckWorkspaceBuildsDeadlineBelowMaxDeadline CheckConstraint = "workspace_builds_deadline_below_max_deadline" // workspace_builds
|
||||
CheckValidationMonotonicOrder CheckConstraint = "validation_monotonic_order" // template_version_parameters
|
||||
CheckUsageEventTypeCheck CheckConstraint = "usage_event_type_check" // usage_events
|
||||
)
|
||||
|
||||
@@ -693,13 +693,13 @@ func SlimRoleFromName(name string) codersdk.SlimRole {
|
||||
func RBACRole(role rbac.Role) codersdk.Role {
|
||||
slim := SlimRole(role)
|
||||
|
||||
orgPerms := role.Org[slim.OrganizationID]
|
||||
orgPerms := role.ByOrgID[slim.OrganizationID]
|
||||
return codersdk.Role{
|
||||
Name: slim.Name,
|
||||
OrganizationID: slim.OrganizationID,
|
||||
DisplayName: slim.DisplayName,
|
||||
SitePermissions: List(role.Site, RBACPermission),
|
||||
OrganizationPermissions: List(orgPerms, RBACPermission),
|
||||
OrganizationPermissions: List(orgPerms.Org, RBACPermission),
|
||||
UserPermissions: List(role.User, RBACPermission),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -219,7 +219,9 @@ var (
|
||||
rbac.ResourceUser.Type: {policy.ActionRead, policy.ActionReadPersonal, policy.ActionUpdatePersonal},
|
||||
rbac.ResourceWorkspaceDormant.Type: {policy.ActionDelete, policy.ActionRead, policy.ActionUpdate, policy.ActionWorkspaceStop},
|
||||
rbac.ResourceWorkspace.Type: {policy.ActionDelete, policy.ActionRead, policy.ActionUpdate, policy.ActionWorkspaceStart, policy.ActionWorkspaceStop, policy.ActionCreateAgent},
|
||||
rbac.ResourceApiKey.Type: {policy.WildcardSymbol},
|
||||
// Provisionerd needs to read and update tasks associated with workspaces.
|
||||
rbac.ResourceTask.Type: {policy.ActionRead, policy.ActionUpdate},
|
||||
rbac.ResourceApiKey.Type: {policy.WildcardSymbol},
|
||||
// When org scoped provisioner credentials are implemented,
|
||||
// this can be reduced to read a specific org.
|
||||
rbac.ResourceOrganization.Type: {policy.ActionRead},
|
||||
@@ -232,8 +234,8 @@ var (
|
||||
// Provisionerd creates usage events
|
||||
rbac.ResourceUsageEvent.Type: {policy.ActionCreate},
|
||||
}),
|
||||
Org: map[string][]rbac.Permission{},
|
||||
User: []rbac.Permission{},
|
||||
User: []rbac.Permission{},
|
||||
ByOrgID: map[string]rbac.OrgPermissions{},
|
||||
},
|
||||
}),
|
||||
Scope: rbac.ScopeAll,
|
||||
@@ -257,8 +259,8 @@ var (
|
||||
rbac.ResourceWorkspace.Type: {policy.ActionDelete, policy.ActionRead, policy.ActionUpdate, policy.ActionWorkspaceStart, policy.ActionWorkspaceStop},
|
||||
rbac.ResourceWorkspaceDormant.Type: {policy.ActionDelete, policy.ActionRead, policy.ActionUpdate, policy.ActionWorkspaceStop},
|
||||
}),
|
||||
Org: map[string][]rbac.Permission{},
|
||||
User: []rbac.Permission{},
|
||||
User: []rbac.Permission{},
|
||||
ByOrgID: map[string]rbac.OrgPermissions{},
|
||||
},
|
||||
}),
|
||||
Scope: rbac.ScopeAll,
|
||||
@@ -274,13 +276,14 @@ var (
|
||||
Identifier: rbac.RoleIdentifier{Name: "jobreaper"},
|
||||
DisplayName: "Job Reaper Daemon",
|
||||
Site: rbac.Permissions(map[string][]policy.Action{
|
||||
rbac.ResourceSystem.Type: {policy.WildcardSymbol},
|
||||
rbac.ResourceTemplate.Type: {policy.ActionRead},
|
||||
rbac.ResourceWorkspace.Type: {policy.ActionRead, policy.ActionUpdate},
|
||||
rbac.ResourceProvisionerJobs.Type: {policy.ActionRead, policy.ActionUpdate},
|
||||
rbac.ResourceSystem.Type: {policy.WildcardSymbol},
|
||||
rbac.ResourceTemplate.Type: {policy.ActionRead, policy.ActionUpdate},
|
||||
rbac.ResourceWorkspace.Type: {policy.ActionRead, policy.ActionUpdate},
|
||||
rbac.ResourceWorkspaceDormant.Type: {policy.ActionRead, policy.ActionUpdate},
|
||||
rbac.ResourceProvisionerJobs.Type: {policy.ActionRead, policy.ActionUpdate},
|
||||
}),
|
||||
Org: map[string][]rbac.Permission{},
|
||||
User: []rbac.Permission{},
|
||||
User: []rbac.Permission{},
|
||||
ByOrgID: map[string]rbac.OrgPermissions{},
|
||||
},
|
||||
}),
|
||||
Scope: rbac.ScopeAll,
|
||||
@@ -298,8 +301,8 @@ var (
|
||||
Site: rbac.Permissions(map[string][]policy.Action{
|
||||
rbac.ResourceCryptoKey.Type: {policy.WildcardSymbol},
|
||||
}),
|
||||
Org: map[string][]rbac.Permission{},
|
||||
User: []rbac.Permission{},
|
||||
User: []rbac.Permission{},
|
||||
ByOrgID: map[string]rbac.OrgPermissions{},
|
||||
},
|
||||
}),
|
||||
Scope: rbac.ScopeAll,
|
||||
@@ -317,8 +320,8 @@ var (
|
||||
Site: rbac.Permissions(map[string][]policy.Action{
|
||||
rbac.ResourceCryptoKey.Type: {policy.WildcardSymbol},
|
||||
}),
|
||||
Org: map[string][]rbac.Permission{},
|
||||
User: []rbac.Permission{},
|
||||
User: []rbac.Permission{},
|
||||
ByOrgID: map[string]rbac.OrgPermissions{},
|
||||
},
|
||||
}),
|
||||
Scope: rbac.ScopeAll,
|
||||
@@ -335,8 +338,8 @@ var (
|
||||
Site: rbac.Permissions(map[string][]policy.Action{
|
||||
rbac.ResourceConnectionLog.Type: {policy.ActionUpdate, policy.ActionRead},
|
||||
}),
|
||||
Org: map[string][]rbac.Permission{},
|
||||
User: []rbac.Permission{},
|
||||
User: []rbac.Permission{},
|
||||
ByOrgID: map[string]rbac.OrgPermissions{},
|
||||
},
|
||||
}),
|
||||
Scope: rbac.ScopeAll,
|
||||
@@ -356,8 +359,8 @@ var (
|
||||
rbac.ResourceWebpushSubscription.Type: {policy.ActionCreate, policy.ActionRead, policy.ActionUpdate, policy.ActionDelete},
|
||||
rbac.ResourceDeploymentConfig.Type: {policy.ActionRead, policy.ActionUpdate}, // To read and upsert VAPID keys
|
||||
}),
|
||||
Org: map[string][]rbac.Permission{},
|
||||
User: []rbac.Permission{},
|
||||
User: []rbac.Permission{},
|
||||
ByOrgID: map[string]rbac.OrgPermissions{},
|
||||
},
|
||||
}),
|
||||
Scope: rbac.ScopeAll,
|
||||
@@ -375,8 +378,8 @@ var (
|
||||
// The workspace monitor needs to be able to update monitors
|
||||
rbac.ResourceWorkspaceAgentResourceMonitor.Type: {policy.ActionUpdate},
|
||||
}),
|
||||
Org: map[string][]rbac.Permission{},
|
||||
User: []rbac.Permission{},
|
||||
User: []rbac.Permission{},
|
||||
ByOrgID: map[string]rbac.OrgPermissions{},
|
||||
},
|
||||
}),
|
||||
Scope: rbac.ScopeAll,
|
||||
@@ -392,12 +395,12 @@ var (
|
||||
Identifier: rbac.RoleIdentifier{Name: "subagentapi"},
|
||||
DisplayName: "Sub Agent API",
|
||||
Site: []rbac.Permission{},
|
||||
Org: map[string][]rbac.Permission{
|
||||
orgID.String(): {},
|
||||
},
|
||||
User: rbac.Permissions(map[string][]policy.Action{
|
||||
rbac.ResourceWorkspace.Type: {policy.ActionRead, policy.ActionUpdate, policy.ActionCreateAgent, policy.ActionDeleteAgent},
|
||||
}),
|
||||
ByOrgID: map[string]rbac.OrgPermissions{
|
||||
orgID.String(): {},
|
||||
},
|
||||
},
|
||||
}),
|
||||
Scope: rbac.ScopeAll,
|
||||
@@ -436,8 +439,8 @@ var (
|
||||
rbac.ResourceOauth2App.Type: {policy.ActionCreate, policy.ActionRead, policy.ActionUpdate, policy.ActionDelete},
|
||||
rbac.ResourceOauth2AppSecret.Type: {policy.ActionCreate, policy.ActionRead, policy.ActionUpdate, policy.ActionDelete},
|
||||
}),
|
||||
Org: map[string][]rbac.Permission{},
|
||||
User: []rbac.Permission{},
|
||||
User: []rbac.Permission{},
|
||||
ByOrgID: map[string]rbac.OrgPermissions{},
|
||||
},
|
||||
}),
|
||||
Scope: rbac.ScopeAll,
|
||||
@@ -454,8 +457,8 @@ var (
|
||||
Site: rbac.Permissions(map[string][]policy.Action{
|
||||
rbac.ResourceProvisionerDaemon.Type: {policy.ActionRead},
|
||||
}),
|
||||
Org: map[string][]rbac.Permission{},
|
||||
User: []rbac.Permission{},
|
||||
User: []rbac.Permission{},
|
||||
ByOrgID: map[string]rbac.OrgPermissions{},
|
||||
},
|
||||
}),
|
||||
Scope: rbac.ScopeAll,
|
||||
@@ -531,8 +534,8 @@ var (
|
||||
Site: rbac.Permissions(map[string][]policy.Action{
|
||||
rbac.ResourceFile.Type: {policy.ActionRead},
|
||||
}),
|
||||
Org: map[string][]rbac.Permission{},
|
||||
User: []rbac.Permission{},
|
||||
User: []rbac.Permission{},
|
||||
ByOrgID: map[string]rbac.OrgPermissions{},
|
||||
},
|
||||
}),
|
||||
Scope: rbac.ScopeAll,
|
||||
@@ -552,8 +555,8 @@ var (
|
||||
// reads/processes them.
|
||||
rbac.ResourceUsageEvent.Type: {policy.ActionRead, policy.ActionUpdate},
|
||||
}),
|
||||
Org: map[string][]rbac.Permission{},
|
||||
User: []rbac.Permission{},
|
||||
User: []rbac.Permission{},
|
||||
ByOrgID: map[string]rbac.OrgPermissions{},
|
||||
},
|
||||
}),
|
||||
Scope: rbac.ScopeAll,
|
||||
@@ -576,8 +579,8 @@ var (
|
||||
rbac.ResourceApiKey.Type: {policy.ActionRead}, // Validate API keys.
|
||||
rbac.ResourceAibridgeInterception.Type: {policy.ActionCreate, policy.ActionRead, policy.ActionUpdate},
|
||||
}),
|
||||
Org: map[string][]rbac.Permission{},
|
||||
User: []rbac.Permission{},
|
||||
User: []rbac.Permission{},
|
||||
ByOrgID: map[string]rbac.OrgPermissions{},
|
||||
},
|
||||
}),
|
||||
Scope: rbac.ScopeAll,
|
||||
@@ -1253,13 +1256,13 @@ func (q *querier) customRoleCheck(ctx context.Context, role database.CustomRole)
|
||||
return xerrors.Errorf("invalid role: %w", err)
|
||||
}
|
||||
|
||||
if len(rbacRole.Org) > 0 && len(rbacRole.Site) > 0 {
|
||||
if len(rbacRole.ByOrgID) > 0 && len(rbacRole.Site) > 0 {
|
||||
// This is a choice to keep roles simple. If we allow mixing site and org scoped perms, then knowing who can
|
||||
// do what gets more complicated.
|
||||
return xerrors.Errorf("invalid custom role, cannot assign both org and site permissions at the same time")
|
||||
}
|
||||
|
||||
if len(rbacRole.Org) > 1 {
|
||||
if len(rbacRole.ByOrgID) > 1 {
|
||||
// Again to avoid more complexity in our roles
|
||||
return xerrors.Errorf("invalid custom role, cannot assign permissions to more than 1 org at a time")
|
||||
}
|
||||
@@ -1272,8 +1275,8 @@ func (q *querier) customRoleCheck(ctx context.Context, role database.CustomRole)
|
||||
}
|
||||
}
|
||||
|
||||
for orgID, perms := range rbacRole.Org {
|
||||
for _, orgPerm := range perms {
|
||||
for orgID, perms := range rbacRole.ByOrgID {
|
||||
for _, orgPerm := range perms.Org {
|
||||
err := q.customRoleEscalationCheck(ctx, act, orgPerm, rbac.Object{OrgID: orgID, Type: orgPerm.ResourceType})
|
||||
if err != nil {
|
||||
return xerrors.Errorf("org=%q: %w", orgID, err)
|
||||
@@ -2882,6 +2885,14 @@ func (q *querier) GetTailnetTunnelPeerIDs(ctx context.Context, srcID uuid.UUID)
|
||||
return q.db.GetTailnetTunnelPeerIDs(ctx, srcID)
|
||||
}
|
||||
|
||||
func (q *querier) GetTaskByID(ctx context.Context, id uuid.UUID) (database.Task, error) {
|
||||
return fetch(q.log, q.auth, q.db.GetTaskByID)(ctx, id)
|
||||
}
|
||||
|
||||
func (q *querier) GetTaskByWorkspaceID(ctx context.Context, workspaceID uuid.UUID) (database.Task, error) {
|
||||
return fetch(q.log, q.auth, q.db.GetTaskByWorkspaceID)(ctx, workspaceID)
|
||||
}
|
||||
|
||||
func (q *querier) GetTelemetryItem(ctx context.Context, key string) (database.TelemetryItem, error) {
|
||||
if err := q.authorizeContext(ctx, policy.ActionRead, rbac.ResourceSystem); err != nil {
|
||||
return database.TelemetryItem{}, err
|
||||
@@ -4107,6 +4118,17 @@ func (q *querier) InsertReplica(ctx context.Context, arg database.InsertReplicaP
|
||||
return q.db.InsertReplica(ctx, arg)
|
||||
}
|
||||
|
||||
func (q *querier) InsertTask(ctx context.Context, arg database.InsertTaskParams) (database.TaskTable, error) {
|
||||
// Ensure the actor can access the specified template version (and thus its template).
|
||||
if _, err := q.GetTemplateVersionByID(ctx, arg.TemplateVersionID); err != nil {
|
||||
return database.TaskTable{}, err
|
||||
}
|
||||
|
||||
obj := rbac.ResourceTask.WithOwner(arg.OwnerID.String()).InOrg(arg.OrganizationID)
|
||||
|
||||
return insert(q.log, q.auth, obj, q.db.InsertTask)(ctx, arg)
|
||||
}
|
||||
|
||||
func (q *querier) InsertTelemetryItemIfNotExists(ctx context.Context, arg database.InsertTelemetryItemIfNotExistsParams) error {
|
||||
if err := q.authorizeContext(ctx, policy.ActionCreate, rbac.ResourceSystem); err != nil {
|
||||
return err
|
||||
@@ -4463,6 +4485,11 @@ func (q *querier) ListProvisionerKeysByOrganizationExcludeReserved(ctx context.C
|
||||
return fetchWithPostFilter(q.auth, policy.ActionRead, q.db.ListProvisionerKeysByOrganizationExcludeReserved)(ctx, organizationID)
|
||||
}
|
||||
|
||||
func (q *querier) ListTasks(ctx context.Context, arg database.ListTasksParams) ([]database.Task, error) {
|
||||
// TODO(Cian): replace this with a sql filter for improved performance. https://github.com/coder/internal/issues/1061
|
||||
return fetchWithPostFilter(q.auth, policy.ActionRead, q.db.ListTasks)(ctx, arg)
|
||||
}
|
||||
|
||||
func (q *querier) ListUserSecrets(ctx context.Context, userID uuid.UUID) ([]database.UserSecret, error) {
|
||||
obj := rbac.ResourceUserSecret.WithOwner(userID.String())
|
||||
if err := q.authorizeContext(ctx, policy.ActionRead, obj); err != nil {
|
||||
@@ -5665,6 +5692,18 @@ func (q *querier) UpsertTailnetTunnel(ctx context.Context, arg database.UpsertTa
|
||||
return q.db.UpsertTailnetTunnel(ctx, arg)
|
||||
}
|
||||
|
||||
func (q *querier) UpsertTaskWorkspaceApp(ctx context.Context, arg database.UpsertTaskWorkspaceAppParams) (database.TaskWorkspaceApp, error) {
|
||||
// Fetch the task to derive the RBAC object and authorize update on it.
|
||||
task, err := q.db.GetTaskByID(ctx, arg.TaskID)
|
||||
if err != nil {
|
||||
return database.TaskWorkspaceApp{}, err
|
||||
}
|
||||
if err := q.authorizeContext(ctx, policy.ActionUpdate, task); err != nil {
|
||||
return database.TaskWorkspaceApp{}, err
|
||||
}
|
||||
return q.db.UpsertTaskWorkspaceApp(ctx, arg)
|
||||
}
|
||||
|
||||
func (q *querier) UpsertTelemetryItem(ctx context.Context, arg database.UpsertTelemetryItemParams) error {
|
||||
if err := q.authorizeContext(ctx, policy.ActionUpdate, rbac.ResourceSystem); err != nil {
|
||||
return err
|
||||
|
||||
@@ -1639,10 +1639,43 @@ func (s *MethodTestSuite) TestUser() {
|
||||
}
|
||||
|
||||
func (s *MethodTestSuite) TestWorkspace() {
|
||||
// The Workspace object differs it's type based on whether it's dormant or
|
||||
// not, which is why we have two tests for it. To ensure we are actually
|
||||
// testing the correct RBAC objects, we also explicitly create the expected
|
||||
// object here rather than passing in the model.
|
||||
s.Run("GetWorkspaceByID", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) {
|
||||
ws := testutil.Fake(s.T(), faker, database.Workspace{})
|
||||
ws.DormantAt = sql.NullTime{
|
||||
Time: time.Time{},
|
||||
Valid: false,
|
||||
}
|
||||
// Ensure the RBAC is not the dormant type.
|
||||
require.Equal(s.T(), rbac.ResourceWorkspace.Type, ws.RBACObject().Type)
|
||||
dbm.EXPECT().GetWorkspaceByID(gomock.Any(), ws.ID).Return(ws, nil).AnyTimes()
|
||||
check.Args(ws.ID).Asserts(ws, policy.ActionRead).Returns(ws)
|
||||
// Explicitly create the expected object.
|
||||
expected := rbac.ResourceWorkspace.WithID(ws.ID).
|
||||
InOrg(ws.OrganizationID).
|
||||
WithOwner(ws.OwnerID.String()).
|
||||
WithGroupACL(ws.GroupACL.RBACACL()).
|
||||
WithACLUserList(ws.UserACL.RBACACL())
|
||||
check.Args(ws.ID).Asserts(expected, policy.ActionRead).Returns(ws)
|
||||
}))
|
||||
s.Run("DormantWorkspace/GetWorkspaceByID", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) {
|
||||
ws := testutil.Fake(s.T(), faker, database.Workspace{
|
||||
DormantAt: sql.NullTime{
|
||||
Time: time.Now().Add(-time.Hour),
|
||||
Valid: true,
|
||||
},
|
||||
})
|
||||
// Ensure the RBAC changed automatically.
|
||||
require.Equal(s.T(), rbac.ResourceWorkspaceDormant.Type, ws.RBACObject().Type)
|
||||
dbm.EXPECT().GetWorkspaceByID(gomock.Any(), ws.ID).Return(ws, nil).AnyTimes()
|
||||
// Explicitly create the expected object.
|
||||
expected := rbac.ResourceWorkspaceDormant.
|
||||
WithID(ws.ID).
|
||||
InOrg(ws.OrganizationID).
|
||||
WithOwner(ws.OwnerID.String())
|
||||
check.Args(ws.ID).Asserts(expected, policy.ActionRead).Returns(ws)
|
||||
}))
|
||||
s.Run("GetWorkspaceByResourceID", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) {
|
||||
ws := testutil.Fake(s.T(), faker, database.Workspace{})
|
||||
@@ -2314,6 +2347,65 @@ func (s *MethodTestSuite) TestWorkspacePortSharing() {
|
||||
}))
|
||||
}
|
||||
|
||||
func (s *MethodTestSuite) TestTasks() {
|
||||
s.Run("GetTaskByID", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) {
|
||||
task := testutil.Fake(s.T(), faker, database.Task{})
|
||||
dbm.EXPECT().GetTaskByID(gomock.Any(), task.ID).Return(task, nil).AnyTimes()
|
||||
check.Args(task.ID).Asserts(task, policy.ActionRead).Returns(task)
|
||||
}))
|
||||
s.Run("InsertTask", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) {
|
||||
tpl := testutil.Fake(s.T(), faker, database.Template{})
|
||||
tv := testutil.Fake(s.T(), faker, database.TemplateVersion{
|
||||
TemplateID: uuid.NullUUID{UUID: tpl.ID, Valid: true},
|
||||
OrganizationID: tpl.OrganizationID,
|
||||
})
|
||||
|
||||
arg := testutil.Fake(s.T(), faker, database.InsertTaskParams{
|
||||
OrganizationID: tpl.OrganizationID,
|
||||
TemplateVersionID: tv.ID,
|
||||
})
|
||||
|
||||
dbm.EXPECT().GetTemplateVersionByID(gomock.Any(), tv.ID).Return(tv, nil).AnyTimes()
|
||||
dbm.EXPECT().GetTemplateByID(gomock.Any(), tpl.ID).Return(tpl, nil).AnyTimes()
|
||||
dbm.EXPECT().InsertTask(gomock.Any(), arg).Return(database.TaskTable{}, nil).AnyTimes()
|
||||
|
||||
check.Args(arg).Asserts(
|
||||
tpl, policy.ActionRead,
|
||||
rbac.ResourceTask.InOrg(arg.OrganizationID).WithOwner(arg.OwnerID.String()), policy.ActionCreate,
|
||||
).Returns(database.TaskTable{})
|
||||
}))
|
||||
s.Run("UpsertTaskWorkspaceApp", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) {
|
||||
task := testutil.Fake(s.T(), faker, database.Task{})
|
||||
arg := database.UpsertTaskWorkspaceAppParams{
|
||||
TaskID: task.ID,
|
||||
WorkspaceBuildNumber: 1,
|
||||
}
|
||||
|
||||
dbm.EXPECT().GetTaskByID(gomock.Any(), task.ID).Return(task, nil).AnyTimes()
|
||||
dbm.EXPECT().UpsertTaskWorkspaceApp(gomock.Any(), arg).Return(database.TaskWorkspaceApp{}, nil).AnyTimes()
|
||||
|
||||
check.Args(arg).Asserts(task, policy.ActionUpdate).Returns(database.TaskWorkspaceApp{})
|
||||
}))
|
||||
s.Run("GetTaskByWorkspaceID", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) {
|
||||
task := testutil.Fake(s.T(), faker, database.Task{})
|
||||
task.WorkspaceID = uuid.NullUUID{UUID: uuid.New(), Valid: true}
|
||||
dbm.EXPECT().GetTaskByWorkspaceID(gomock.Any(), task.WorkspaceID.UUID).Return(task, nil).AnyTimes()
|
||||
check.Args(task.WorkspaceID.UUID).Asserts(task, policy.ActionRead).Returns(task)
|
||||
}))
|
||||
s.Run("ListTasks", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) {
|
||||
u1 := testutil.Fake(s.T(), faker, database.User{})
|
||||
u2 := testutil.Fake(s.T(), faker, database.User{})
|
||||
org1 := testutil.Fake(s.T(), faker, database.Organization{})
|
||||
org2 := testutil.Fake(s.T(), faker, database.Organization{})
|
||||
_ = testutil.Fake(s.T(), faker, database.OrganizationMember{UserID: u1.ID, OrganizationID: org1.ID})
|
||||
_ = testutil.Fake(s.T(), faker, database.OrganizationMember{UserID: u2.ID, OrganizationID: org2.ID})
|
||||
t1 := testutil.Fake(s.T(), faker, database.Task{OwnerID: u1.ID})
|
||||
t2 := testutil.Fake(s.T(), faker, database.Task{OwnerID: u2.ID})
|
||||
dbm.EXPECT().ListTasks(gomock.Any(), gomock.Any()).Return([]database.Task{t1, t2}, nil).AnyTimes()
|
||||
check.Args(database.ListTasksParams{}).Asserts(t1, policy.ActionRead, t2, policy.ActionRead).Returns([]database.Task{t1, t2})
|
||||
}))
|
||||
}
|
||||
|
||||
func (s *MethodTestSuite) TestProvisionerKeys() {
|
||||
s.Run("InsertProvisionerKey", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) {
|
||||
org := testutil.Fake(s.T(), faker, database.Organization{})
|
||||
@@ -2484,10 +2576,12 @@ func (s *MethodTestSuite) TestExtraMethods() {
|
||||
|
||||
ds, err := db.GetProvisionerJobsByOrganizationAndStatusWithQueuePositionAndProvisioner(context.Background(), database.GetProvisionerJobsByOrganizationAndStatusWithQueuePositionAndProvisionerParams{
|
||||
OrganizationID: org.ID,
|
||||
InitiatorID: uuid.Nil,
|
||||
})
|
||||
s.NoError(err, "get provisioner jobs by org")
|
||||
check.Args(database.GetProvisionerJobsByOrganizationAndStatusWithQueuePositionAndProvisionerParams{
|
||||
OrganizationID: org.ID,
|
||||
InitiatorID: uuid.Nil,
|
||||
}).Asserts(j1, policy.ActionRead, j2, policy.ActionRead).Returns(ds)
|
||||
}))
|
||||
}
|
||||
|
||||
@@ -225,6 +225,10 @@ func (s *MethodTestSuite) SubtestWithDB(db database.Store, testCaseF func(db dat
|
||||
if testCase.outputs != nil {
|
||||
// Assert the required outputs
|
||||
s.Equal(len(testCase.outputs), len(outputs), "method %q returned unexpected number of outputs", methodName)
|
||||
cmpOptions := []cmp.Option{
|
||||
// Equate nil and empty slices.
|
||||
cmpopts.EquateEmpty(),
|
||||
}
|
||||
for i := range outputs {
|
||||
a, b := testCase.outputs[i].Interface(), outputs[i].Interface()
|
||||
|
||||
@@ -232,10 +236,9 @@ func (s *MethodTestSuite) SubtestWithDB(db database.Store, testCaseF func(db dat
|
||||
// first check if the values are equal with regard to order.
|
||||
// If not, re-check disregarding order and show a nice diff
|
||||
// output of the two values.
|
||||
if !cmp.Equal(a, b, cmpopts.EquateEmpty()) {
|
||||
if diff := cmp.Diff(a, b,
|
||||
// Equate nil and empty slices.
|
||||
cmpopts.EquateEmpty(),
|
||||
if !cmp.Equal(a, b, cmpOptions...) {
|
||||
diffOpts := append(
|
||||
append([]cmp.Option{}, cmpOptions...),
|
||||
// Allow slice order to be ignored.
|
||||
cmpopts.SortSlices(func(a, b any) bool {
|
||||
var ab, bb strings.Builder
|
||||
@@ -247,7 +250,8 @@ func (s *MethodTestSuite) SubtestWithDB(db database.Store, testCaseF func(db dat
|
||||
// https://github.com/google/go-cmp/issues/67
|
||||
return ab.String() < bb.String()
|
||||
}),
|
||||
); diff != "" {
|
||||
)
|
||||
if diff := cmp.Diff(a, b, diffOpts...); diff != "" {
|
||||
s.Failf("compare outputs failed", "method %q returned unexpected output %d (-want +got):\n%s", methodName, i, diff)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -24,6 +24,7 @@ import (
|
||||
"github.com/coder/coder/v2/coderd/rbac"
|
||||
"github.com/coder/coder/v2/coderd/telemetry"
|
||||
"github.com/coder/coder/v2/coderd/wspubsub"
|
||||
"github.com/coder/coder/v2/codersdk"
|
||||
"github.com/coder/coder/v2/provisionersdk"
|
||||
sdkproto "github.com/coder/coder/v2/provisionersdk/proto"
|
||||
)
|
||||
@@ -55,6 +56,7 @@ type WorkspaceBuildBuilder struct {
|
||||
params []database.WorkspaceBuildParameter
|
||||
agentToken string
|
||||
dispo workspaceBuildDisposition
|
||||
taskAppID uuid.UUID
|
||||
}
|
||||
|
||||
type workspaceBuildDisposition struct {
|
||||
@@ -117,6 +119,27 @@ func (b WorkspaceBuildBuilder) WithAgent(mutations ...func([]*sdkproto.Agent) []
|
||||
return b
|
||||
}
|
||||
|
||||
func (b WorkspaceBuildBuilder) WithTask(seed *sdkproto.App) WorkspaceBuildBuilder {
|
||||
//nolint: revive // returns modified struct
|
||||
b.taskAppID = uuid.New()
|
||||
if seed == nil {
|
||||
seed = &sdkproto.App{}
|
||||
}
|
||||
return b.Params(database.WorkspaceBuildParameter{
|
||||
Name: codersdk.AITaskPromptParameterName,
|
||||
Value: "list me",
|
||||
}).WithAgent(func(a []*sdkproto.Agent) []*sdkproto.Agent {
|
||||
a[0].Apps = []*sdkproto.App{
|
||||
{
|
||||
Id: takeFirst(seed.Id, b.taskAppID.String()),
|
||||
Slug: takeFirst(seed.Slug, "vcode"),
|
||||
Url: takeFirst(seed.Url, ""),
|
||||
},
|
||||
}
|
||||
return a
|
||||
})
|
||||
}
|
||||
|
||||
func (b WorkspaceBuildBuilder) Starting() WorkspaceBuildBuilder {
|
||||
//nolint: revive // returns modified struct
|
||||
b.dispo.starting = true
|
||||
@@ -134,6 +157,14 @@ func (b WorkspaceBuildBuilder) Do() WorkspaceResponse {
|
||||
b.seed.ID = uuid.New()
|
||||
b.seed.JobID = jobID
|
||||
|
||||
if b.taskAppID != uuid.Nil {
|
||||
b.seed.HasAITask = sql.NullBool{
|
||||
Bool: true,
|
||||
Valid: true,
|
||||
}
|
||||
b.seed.AITaskSidebarAppID = uuid.NullUUID{UUID: b.taskAppID, Valid: true}
|
||||
}
|
||||
|
||||
resp := WorkspaceResponse{
|
||||
AgentToken: b.agentToken,
|
||||
}
|
||||
|
||||
@@ -27,6 +27,8 @@ import (
|
||||
"github.com/coder/coder/v2/coderd/database/provisionerjobs"
|
||||
"github.com/coder/coder/v2/coderd/database/pubsub"
|
||||
"github.com/coder/coder/v2/coderd/rbac"
|
||||
"github.com/coder/coder/v2/coderd/rbac/policy"
|
||||
"github.com/coder/coder/v2/coderd/taskname"
|
||||
"github.com/coder/coder/v2/codersdk"
|
||||
"github.com/coder/coder/v2/cryptorand"
|
||||
"github.com/coder/coder/v2/provisionerd/proto"
|
||||
@@ -186,7 +188,7 @@ func APIKey(t testing.TB, db database.Store, seed database.APIKey, munge ...func
|
||||
UpdatedAt: takeFirst(seed.UpdatedAt, dbtime.Now()),
|
||||
LoginType: takeFirst(seed.LoginType, database.LoginTypePassword),
|
||||
Scopes: takeFirstSlice([]database.APIKeyScope(seed.Scopes), []database.APIKeyScope{database.ApiKeyScopeCoderAll}),
|
||||
AllowList: takeFirstSlice(seed.AllowList, database.AllowList{database.AllowListWildcard()}),
|
||||
AllowList: takeFirstSlice(seed.AllowList, database.AllowList{{Type: policy.WildcardSymbol, ID: policy.WildcardSymbol}}),
|
||||
TokenName: takeFirst(seed.TokenName),
|
||||
}
|
||||
for _, fn := range munge {
|
||||
@@ -420,6 +422,14 @@ func Workspace(t testing.TB, db database.Store, orig database.WorkspaceTable) da
|
||||
require.NoError(t, err, "set workspace as deleted")
|
||||
workspace.Deleted = true
|
||||
}
|
||||
if orig.DormantAt.Valid {
|
||||
_, err = db.UpdateWorkspaceDormantDeletingAt(genCtx, database.UpdateWorkspaceDormantDeletingAtParams{
|
||||
ID: workspace.ID,
|
||||
DormantAt: orig.DormantAt,
|
||||
})
|
||||
require.NoError(t, err, "set workspace as dormant")
|
||||
workspace.DormantAt = orig.DormantAt
|
||||
}
|
||||
return workspace
|
||||
}
|
||||
|
||||
@@ -1551,6 +1561,43 @@ func AIBridgeToolUsage(t testing.TB, db database.Store, seed database.InsertAIBr
|
||||
return toolUsage
|
||||
}
|
||||
|
||||
func Task(t testing.TB, db database.Store, orig database.TaskTable) database.TaskTable {
|
||||
t.Helper()
|
||||
|
||||
parameters := orig.TemplateParameters
|
||||
if parameters == nil {
|
||||
parameters = json.RawMessage([]byte("{}"))
|
||||
}
|
||||
|
||||
task, err := db.InsertTask(genCtx, database.InsertTaskParams{
|
||||
OrganizationID: orig.OrganizationID,
|
||||
OwnerID: orig.OwnerID,
|
||||
Name: takeFirst(orig.Name, taskname.GenerateFallback()),
|
||||
WorkspaceID: orig.WorkspaceID,
|
||||
TemplateVersionID: orig.TemplateVersionID,
|
||||
TemplateParameters: parameters,
|
||||
Prompt: orig.Prompt,
|
||||
CreatedAt: takeFirst(orig.CreatedAt, dbtime.Now()),
|
||||
})
|
||||
require.NoError(t, err, "failed to insert task")
|
||||
|
||||
return task
|
||||
}
|
||||
|
||||
func TaskWorkspaceApp(t testing.TB, db database.Store, orig database.TaskWorkspaceApp) database.TaskWorkspaceApp {
|
||||
t.Helper()
|
||||
|
||||
app, err := db.UpsertTaskWorkspaceApp(genCtx, database.UpsertTaskWorkspaceAppParams{
|
||||
TaskID: orig.TaskID,
|
||||
WorkspaceBuildNumber: orig.WorkspaceBuildNumber,
|
||||
WorkspaceAgentID: orig.WorkspaceAgentID,
|
||||
WorkspaceAppID: orig.WorkspaceAppID,
|
||||
})
|
||||
require.NoError(t, err, "failed to upsert task workspace app")
|
||||
|
||||
return app
|
||||
}
|
||||
|
||||
func provisionerJobTiming(t testing.TB, db database.Store, seed database.ProvisionerJobTiming) database.ProvisionerJobTiming {
|
||||
timing, err := db.InsertProvisionerJobTimings(genCtx, database.InsertProvisionerJobTimingsParams{
|
||||
JobID: takeFirst(seed.JobID, uuid.New()),
|
||||
|
||||
@@ -1482,6 +1482,20 @@ func (m queryMetricsStore) GetTailnetTunnelPeerIDs(ctx context.Context, srcID uu
|
||||
return r0, r1
|
||||
}
|
||||
|
||||
func (m queryMetricsStore) GetTaskByID(ctx context.Context, id uuid.UUID) (database.Task, error) {
|
||||
start := time.Now()
|
||||
r0, r1 := m.s.GetTaskByID(ctx, id)
|
||||
m.queryLatencies.WithLabelValues("GetTaskByID").Observe(time.Since(start).Seconds())
|
||||
return r0, r1
|
||||
}
|
||||
|
||||
func (m queryMetricsStore) GetTaskByWorkspaceID(ctx context.Context, workspaceID uuid.UUID) (database.Task, error) {
|
||||
start := time.Now()
|
||||
r0, r1 := m.s.GetTaskByWorkspaceID(ctx, workspaceID)
|
||||
m.queryLatencies.WithLabelValues("GetTaskByWorkspaceID").Observe(time.Since(start).Seconds())
|
||||
return r0, r1
|
||||
}
|
||||
|
||||
func (m queryMetricsStore) GetTelemetryItem(ctx context.Context, key string) (database.TelemetryItem, error) {
|
||||
start := time.Now()
|
||||
r0, r1 := m.s.GetTelemetryItem(ctx, key)
|
||||
@@ -2455,6 +2469,13 @@ func (m queryMetricsStore) InsertReplica(ctx context.Context, arg database.Inser
|
||||
return replica, err
|
||||
}
|
||||
|
||||
func (m queryMetricsStore) InsertTask(ctx context.Context, arg database.InsertTaskParams) (database.TaskTable, error) {
|
||||
start := time.Now()
|
||||
r0, r1 := m.s.InsertTask(ctx, arg)
|
||||
m.queryLatencies.WithLabelValues("InsertTask").Observe(time.Since(start).Seconds())
|
||||
return r0, r1
|
||||
}
|
||||
|
||||
func (m queryMetricsStore) InsertTelemetryItemIfNotExists(ctx context.Context, arg database.InsertTelemetryItemIfNotExistsParams) error {
|
||||
start := time.Now()
|
||||
r0 := m.s.InsertTelemetryItemIfNotExists(ctx, arg)
|
||||
@@ -2714,6 +2735,13 @@ func (m queryMetricsStore) ListProvisionerKeysByOrganizationExcludeReserved(ctx
|
||||
return r0, r1
|
||||
}
|
||||
|
||||
func (m queryMetricsStore) ListTasks(ctx context.Context, arg database.ListTasksParams) ([]database.Task, error) {
|
||||
start := time.Now()
|
||||
r0, r1 := m.s.ListTasks(ctx, arg)
|
||||
m.queryLatencies.WithLabelValues("ListTasks").Observe(time.Since(start).Seconds())
|
||||
return r0, r1
|
||||
}
|
||||
|
||||
func (m queryMetricsStore) ListUserSecrets(ctx context.Context, userID uuid.UUID) ([]database.UserSecret, error) {
|
||||
start := time.Now()
|
||||
r0, r1 := m.s.ListUserSecrets(ctx, userID)
|
||||
@@ -3533,6 +3561,13 @@ func (m queryMetricsStore) UpsertTailnetTunnel(ctx context.Context, arg database
|
||||
return r0, r1
|
||||
}
|
||||
|
||||
func (m queryMetricsStore) UpsertTaskWorkspaceApp(ctx context.Context, arg database.UpsertTaskWorkspaceAppParams) (database.TaskWorkspaceApp, error) {
|
||||
start := time.Now()
|
||||
r0, r1 := m.s.UpsertTaskWorkspaceApp(ctx, arg)
|
||||
m.queryLatencies.WithLabelValues("UpsertTaskWorkspaceApp").Observe(time.Since(start).Seconds())
|
||||
return r0, r1
|
||||
}
|
||||
|
||||
func (m queryMetricsStore) UpsertTelemetryItem(ctx context.Context, arg database.UpsertTelemetryItemParams) error {
|
||||
start := time.Now()
|
||||
r0 := m.s.UpsertTelemetryItem(ctx, arg)
|
||||
|
||||
@@ -3119,6 +3119,36 @@ func (mr *MockStoreMockRecorder) GetTailnetTunnelPeerIDs(ctx, srcID any) *gomock
|
||||
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetTailnetTunnelPeerIDs", reflect.TypeOf((*MockStore)(nil).GetTailnetTunnelPeerIDs), ctx, srcID)
|
||||
}
|
||||
|
||||
// GetTaskByID mocks base method.
|
||||
func (m *MockStore) GetTaskByID(ctx context.Context, id uuid.UUID) (database.Task, error) {
|
||||
m.ctrl.T.Helper()
|
||||
ret := m.ctrl.Call(m, "GetTaskByID", ctx, id)
|
||||
ret0, _ := ret[0].(database.Task)
|
||||
ret1, _ := ret[1].(error)
|
||||
return ret0, ret1
|
||||
}
|
||||
|
||||
// GetTaskByID indicates an expected call of GetTaskByID.
|
||||
func (mr *MockStoreMockRecorder) GetTaskByID(ctx, id any) *gomock.Call {
|
||||
mr.mock.ctrl.T.Helper()
|
||||
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetTaskByID", reflect.TypeOf((*MockStore)(nil).GetTaskByID), ctx, id)
|
||||
}
|
||||
|
||||
// GetTaskByWorkspaceID mocks base method.
|
||||
func (m *MockStore) GetTaskByWorkspaceID(ctx context.Context, workspaceID uuid.UUID) (database.Task, error) {
|
||||
m.ctrl.T.Helper()
|
||||
ret := m.ctrl.Call(m, "GetTaskByWorkspaceID", ctx, workspaceID)
|
||||
ret0, _ := ret[0].(database.Task)
|
||||
ret1, _ := ret[1].(error)
|
||||
return ret0, ret1
|
||||
}
|
||||
|
||||
// GetTaskByWorkspaceID indicates an expected call of GetTaskByWorkspaceID.
|
||||
func (mr *MockStoreMockRecorder) GetTaskByWorkspaceID(ctx, workspaceID any) *gomock.Call {
|
||||
mr.mock.ctrl.T.Helper()
|
||||
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetTaskByWorkspaceID", reflect.TypeOf((*MockStore)(nil).GetTaskByWorkspaceID), ctx, workspaceID)
|
||||
}
|
||||
|
||||
// GetTelemetryItem mocks base method.
|
||||
func (m *MockStore) GetTelemetryItem(ctx context.Context, key string) (database.TelemetryItem, error) {
|
||||
m.ctrl.T.Helper()
|
||||
@@ -5244,6 +5274,21 @@ func (mr *MockStoreMockRecorder) InsertReplica(ctx, arg any) *gomock.Call {
|
||||
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "InsertReplica", reflect.TypeOf((*MockStore)(nil).InsertReplica), ctx, arg)
|
||||
}
|
||||
|
||||
// InsertTask mocks base method.
|
||||
func (m *MockStore) InsertTask(ctx context.Context, arg database.InsertTaskParams) (database.TaskTable, error) {
|
||||
m.ctrl.T.Helper()
|
||||
ret := m.ctrl.Call(m, "InsertTask", ctx, arg)
|
||||
ret0, _ := ret[0].(database.TaskTable)
|
||||
ret1, _ := ret[1].(error)
|
||||
return ret0, ret1
|
||||
}
|
||||
|
||||
// InsertTask indicates an expected call of InsertTask.
|
||||
func (mr *MockStoreMockRecorder) InsertTask(ctx, arg any) *gomock.Call {
|
||||
mr.mock.ctrl.T.Helper()
|
||||
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "InsertTask", reflect.TypeOf((*MockStore)(nil).InsertTask), ctx, arg)
|
||||
}
|
||||
|
||||
// InsertTelemetryItemIfNotExists mocks base method.
|
||||
func (m *MockStore) InsertTelemetryItemIfNotExists(ctx context.Context, arg database.InsertTelemetryItemIfNotExistsParams) error {
|
||||
m.ctrl.T.Helper()
|
||||
@@ -5803,6 +5848,21 @@ func (mr *MockStoreMockRecorder) ListProvisionerKeysByOrganizationExcludeReserve
|
||||
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ListProvisionerKeysByOrganizationExcludeReserved", reflect.TypeOf((*MockStore)(nil).ListProvisionerKeysByOrganizationExcludeReserved), ctx, organizationID)
|
||||
}
|
||||
|
||||
// ListTasks mocks base method.
|
||||
func (m *MockStore) ListTasks(ctx context.Context, arg database.ListTasksParams) ([]database.Task, error) {
|
||||
m.ctrl.T.Helper()
|
||||
ret := m.ctrl.Call(m, "ListTasks", ctx, arg)
|
||||
ret0, _ := ret[0].([]database.Task)
|
||||
ret1, _ := ret[1].(error)
|
||||
return ret0, ret1
|
||||
}
|
||||
|
||||
// ListTasks indicates an expected call of ListTasks.
|
||||
func (mr *MockStoreMockRecorder) ListTasks(ctx, arg any) *gomock.Call {
|
||||
mr.mock.ctrl.T.Helper()
|
||||
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ListTasks", reflect.TypeOf((*MockStore)(nil).ListTasks), ctx, arg)
|
||||
}
|
||||
|
||||
// ListUserSecrets mocks base method.
|
||||
func (m *MockStore) ListUserSecrets(ctx context.Context, userID uuid.UUID) ([]database.UserSecret, error) {
|
||||
m.ctrl.T.Helper()
|
||||
@@ -7517,6 +7577,21 @@ func (mr *MockStoreMockRecorder) UpsertTailnetTunnel(ctx, arg any) *gomock.Call
|
||||
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "UpsertTailnetTunnel", reflect.TypeOf((*MockStore)(nil).UpsertTailnetTunnel), ctx, arg)
|
||||
}
|
||||
|
||||
// UpsertTaskWorkspaceApp mocks base method.
|
||||
func (m *MockStore) UpsertTaskWorkspaceApp(ctx context.Context, arg database.UpsertTaskWorkspaceAppParams) (database.TaskWorkspaceApp, error) {
|
||||
m.ctrl.T.Helper()
|
||||
ret := m.ctrl.Call(m, "UpsertTaskWorkspaceApp", ctx, arg)
|
||||
ret0, _ := ret[0].(database.TaskWorkspaceApp)
|
||||
ret1, _ := ret[1].(error)
|
||||
return ret0, ret1
|
||||
}
|
||||
|
||||
// UpsertTaskWorkspaceApp indicates an expected call of UpsertTaskWorkspaceApp.
|
||||
func (mr *MockStoreMockRecorder) UpsertTaskWorkspaceApp(ctx, arg any) *gomock.Call {
|
||||
mr.mock.ctrl.T.Helper()
|
||||
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "UpsertTaskWorkspaceApp", reflect.TypeOf((*MockStore)(nil).UpsertTaskWorkspaceApp), ctx, arg)
|
||||
}
|
||||
|
||||
// UpsertTelemetryItem mocks base method.
|
||||
func (m *MockStore) UpsertTelemetryItem(ctx context.Context, arg database.UpsertTelemetryItemParams) error {
|
||||
m.ctrl.T.Helper()
|
||||
|
||||
@@ -150,7 +150,7 @@ func (b *Broker) init(t TBSubset) error {
|
||||
b.uuid = uuid.New()
|
||||
ctx, cancel := context.WithTimeout(context.Background(), 20*time.Second)
|
||||
defer cancel()
|
||||
b.cleanerFD, err = startCleaner(ctx, b.uuid, coderTestingParams.DSN())
|
||||
b.cleanerFD, err = startCleaner(ctx, t, b.uuid, coderTestingParams.DSN())
|
||||
if err != nil {
|
||||
return xerrors.Errorf("start test db cleaner: %w", err)
|
||||
}
|
||||
|
||||
@@ -22,36 +22,43 @@ const (
|
||||
cleanerRespOK = "OK"
|
||||
envCleanerParentUUID = "DB_CLEANER_PARENT_UUID"
|
||||
envCleanerDSN = "DB_CLEANER_DSN"
|
||||
)
|
||||
|
||||
var (
|
||||
originalWorkingDir string
|
||||
errGettingWorkingDir error
|
||||
envCleanerMagic = "DB_CLEANER_MAGIC"
|
||||
envCleanerMagicValue = "XEHdJqWehWek8AaWwopy" // 20 random characters to make this collision resistant
|
||||
)
|
||||
|
||||
func init() {
|
||||
// We expect our tests to run from somewhere in the project tree where `go run` below in `startCleaner` will
|
||||
// be able to resolve the command package. However, some of the tests modify the working directory during the run.
|
||||
// So, we grab the working directory during package init, before tests are run, and then set that work dir on the
|
||||
// subcommand process before it starts.
|
||||
originalWorkingDir, errGettingWorkingDir = os.Getwd()
|
||||
// We are hijacking the init() function here to do something very non-standard.
|
||||
//
|
||||
// We want to be able to run the cleaner as a subprocess of the test process so that it can outlive the test binary
|
||||
// and still clean up, even if the test process times out or is killed. So, what we do is in startCleaner() below,
|
||||
// which is called in the parent process, we exec our own binary and set a collision-resistant environment variable.
|
||||
// Then here in the init(), which will run before main() and therefore before executing tests, we check for the
|
||||
// environment variable, and if present we know this is the child process and we exec the cleaner. Instead of
|
||||
// returning normally from init() we call os.Exit(). This prevents tests from being re-run in the child process (and
|
||||
// recursion).
|
||||
//
|
||||
// If the magic value is not present, we know we are the parent process and init() returns normally.
|
||||
magicValue := os.Getenv(envCleanerMagic)
|
||||
if magicValue == envCleanerMagicValue {
|
||||
RunCleaner()
|
||||
os.Exit(0)
|
||||
}
|
||||
}
|
||||
|
||||
// startCleaner starts the cleaner in a subprocess. holdThis is an opaque reference that needs to be kept from being
|
||||
// garbage collected until we are done with all test databases (e.g. the end of the process).
|
||||
func startCleaner(ctx context.Context, parentUUID uuid.UUID, dsn string) (holdThis any, err error) {
|
||||
cmd := exec.Command("go", "run", "github.com/coder/coder/v2/coderd/database/dbtestutil/cleanercmd")
|
||||
func startCleaner(ctx context.Context, _ TBSubset, parentUUID uuid.UUID, dsn string) (holdThis any, err error) {
|
||||
bin, err := os.Executable()
|
||||
if err != nil {
|
||||
return nil, xerrors.Errorf("could not get executable path: %w", err)
|
||||
}
|
||||
cmd := exec.Command(bin)
|
||||
cmd.Env = append(os.Environ(),
|
||||
fmt.Sprintf("%s=%s", envCleanerParentUUID, parentUUID.String()),
|
||||
fmt.Sprintf("%s=%s", envCleanerDSN, dsn),
|
||||
fmt.Sprintf("%s=%s", envCleanerMagic, envCleanerMagicValue),
|
||||
)
|
||||
|
||||
// c.f. comment on `func init()` in this file.
|
||||
if errGettingWorkingDir != nil {
|
||||
return nil, xerrors.Errorf("failed to get working directory during init: %w", errGettingWorkingDir)
|
||||
}
|
||||
cmd.Dir = originalWorkingDir
|
||||
|
||||
// Here we don't actually use the reference to the stdin pipe, because we never write anything to it. When this
|
||||
// process exits, the pipe is closed by the OS and this triggers the cleaner to do its cleaning work. But, we do
|
||||
// need to hang on to a reference to it so that it doesn't get garbage collected and trigger cleanup early.
|
||||
@@ -178,8 +185,7 @@ func (c *cleaner) waitAndClean() {
|
||||
}
|
||||
|
||||
// RunCleaner runs the test database cleaning process. It takes no arguments but uses stdio and environment variables
|
||||
// for its operation. It is designed to be launched as the only task of a `main()` process, but is included in this
|
||||
// package to share constants with the parent code that launches it above.
|
||||
// for its operation.
|
||||
//
|
||||
// The cleaner is designed to run in a separate process from the main test suite, connected over stdio. If the main test
|
||||
// process ends (panics, times out, or is killed) without explicitly discarding the databases it clones, the cleaner
|
||||
|
||||
@@ -1,7 +0,0 @@
|
||||
package main
|
||||
|
||||
import "github.com/coder/coder/v2/coderd/database/dbtestutil"
|
||||
|
||||
func main() {
|
||||
dbtestutil.RunCleaner()
|
||||
}
|
||||
@@ -242,10 +242,11 @@ func PGDump(dbURL string) ([]byte, error) {
|
||||
"PGCLIENTENCODING=UTF8",
|
||||
"PGDATABASE=", // we should always specify the database name in the connection string
|
||||
}
|
||||
var stdout bytes.Buffer
|
||||
var stdout, stderr bytes.Buffer
|
||||
cmd.Stdout = &stdout
|
||||
cmd.Stderr = &stderr
|
||||
if err := cmd.Run(); err != nil {
|
||||
return nil, xerrors.Errorf("exec pg_dump: %w", err)
|
||||
return nil, xerrors.Errorf("exec pg_dump: %w\n%s", err, stderr.String())
|
||||
}
|
||||
return stdout.Bytes(), nil
|
||||
}
|
||||
|
||||
@@ -166,6 +166,7 @@ type TBSubset interface {
|
||||
Cleanup(func())
|
||||
Helper()
|
||||
Logf(format string, args ...any)
|
||||
TempDir() string
|
||||
}
|
||||
|
||||
// Open creates a new PostgreSQL database instance.
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
Generated
+250
-125
@@ -157,7 +157,52 @@ CREATE TYPE api_key_scope AS ENUM (
|
||||
'coder:workspaces.access',
|
||||
'coder:templates.build',
|
||||
'coder:templates.author',
|
||||
'coder:apikeys.manage_self'
|
||||
'coder:apikeys.manage_self',
|
||||
'aibridge_interception:*',
|
||||
'api_key:*',
|
||||
'assign_org_role:*',
|
||||
'assign_role:*',
|
||||
'audit_log:*',
|
||||
'connection_log:*',
|
||||
'crypto_key:*',
|
||||
'debug_info:*',
|
||||
'deployment_config:*',
|
||||
'deployment_stats:*',
|
||||
'file:*',
|
||||
'group:*',
|
||||
'group_member:*',
|
||||
'idpsync_settings:*',
|
||||
'inbox_notification:*',
|
||||
'license:*',
|
||||
'notification_message:*',
|
||||
'notification_preference:*',
|
||||
'notification_template:*',
|
||||
'oauth2_app:*',
|
||||
'oauth2_app_code_token:*',
|
||||
'oauth2_app_secret:*',
|
||||
'organization:*',
|
||||
'organization_member:*',
|
||||
'prebuilt_workspace:*',
|
||||
'provisioner_daemon:*',
|
||||
'provisioner_jobs:*',
|
||||
'replicas:*',
|
||||
'system:*',
|
||||
'tailnet_coordinator:*',
|
||||
'template:*',
|
||||
'usage_event:*',
|
||||
'user:*',
|
||||
'user_secret:*',
|
||||
'webpush_subscription:*',
|
||||
'workspace:*',
|
||||
'workspace_agent_devcontainers:*',
|
||||
'workspace_agent_resource_monitor:*',
|
||||
'workspace_dormant:*',
|
||||
'workspace_proxy:*',
|
||||
'task:create',
|
||||
'task:read',
|
||||
'task:update',
|
||||
'task:delete',
|
||||
'task:*'
|
||||
);
|
||||
|
||||
CREATE TYPE app_sharing_level AS ENUM (
|
||||
@@ -415,7 +460,8 @@ CREATE TYPE resource_type AS ENUM (
|
||||
'idp_sync_settings_role',
|
||||
'workspace_agent',
|
||||
'workspace_app',
|
||||
'prebuilds_settings'
|
||||
'prebuilds_settings',
|
||||
'task'
|
||||
);
|
||||
|
||||
CREATE TYPE startup_script_behavior AS ENUM (
|
||||
@@ -432,6 +478,15 @@ CREATE TYPE tailnet_status AS ENUM (
|
||||
'lost'
|
||||
);
|
||||
|
||||
CREATE TYPE task_status AS ENUM (
|
||||
'pending',
|
||||
'initializing',
|
||||
'active',
|
||||
'paused',
|
||||
'unknown',
|
||||
'error'
|
||||
);
|
||||
|
||||
CREATE TYPE user_status AS ENUM (
|
||||
'active',
|
||||
'suspended',
|
||||
@@ -1751,9 +1806,9 @@ CREATE TABLE tailnet_tunnels (
|
||||
|
||||
CREATE TABLE task_workspace_apps (
|
||||
task_id uuid NOT NULL,
|
||||
workspace_build_id uuid NOT NULL,
|
||||
workspace_agent_id uuid NOT NULL,
|
||||
workspace_app_id uuid NOT NULL
|
||||
workspace_agent_id uuid,
|
||||
workspace_app_id uuid,
|
||||
workspace_build_number integer NOT NULL
|
||||
);
|
||||
|
||||
CREATE TABLE tasks (
|
||||
@@ -1769,6 +1824,183 @@ CREATE TABLE tasks (
|
||||
deleted_at timestamp with time zone
|
||||
);
|
||||
|
||||
CREATE TABLE workspace_agents (
|
||||
id uuid NOT NULL,
|
||||
created_at timestamp with time zone NOT NULL,
|
||||
updated_at timestamp with time zone NOT NULL,
|
||||
name character varying(64) NOT NULL,
|
||||
first_connected_at timestamp with time zone,
|
||||
last_connected_at timestamp with time zone,
|
||||
disconnected_at timestamp with time zone,
|
||||
resource_id uuid NOT NULL,
|
||||
auth_token uuid NOT NULL,
|
||||
auth_instance_id character varying,
|
||||
architecture character varying(64) NOT NULL,
|
||||
environment_variables jsonb,
|
||||
operating_system character varying(64) NOT NULL,
|
||||
instance_metadata jsonb,
|
||||
resource_metadata jsonb,
|
||||
directory character varying(4096) DEFAULT ''::character varying NOT NULL,
|
||||
version text DEFAULT ''::text NOT NULL,
|
||||
last_connected_replica_id uuid,
|
||||
connection_timeout_seconds integer DEFAULT 0 NOT NULL,
|
||||
troubleshooting_url text DEFAULT ''::text NOT NULL,
|
||||
motd_file text DEFAULT ''::text NOT NULL,
|
||||
lifecycle_state workspace_agent_lifecycle_state DEFAULT 'created'::workspace_agent_lifecycle_state NOT NULL,
|
||||
expanded_directory character varying(4096) DEFAULT ''::character varying NOT NULL,
|
||||
logs_length integer DEFAULT 0 NOT NULL,
|
||||
logs_overflowed boolean DEFAULT false NOT NULL,
|
||||
started_at timestamp with time zone,
|
||||
ready_at timestamp with time zone,
|
||||
subsystems workspace_agent_subsystem[] DEFAULT '{}'::workspace_agent_subsystem[],
|
||||
display_apps display_app[] DEFAULT '{vscode,vscode_insiders,web_terminal,ssh_helper,port_forwarding_helper}'::display_app[],
|
||||
api_version text DEFAULT ''::text NOT NULL,
|
||||
display_order integer DEFAULT 0 NOT NULL,
|
||||
parent_id uuid,
|
||||
api_key_scope agent_key_scope_enum DEFAULT 'all'::agent_key_scope_enum NOT NULL,
|
||||
deleted boolean DEFAULT false NOT NULL,
|
||||
CONSTRAINT max_logs_length CHECK ((logs_length <= 1048576)),
|
||||
CONSTRAINT subsystems_not_none CHECK ((NOT ('none'::workspace_agent_subsystem = ANY (subsystems))))
|
||||
);
|
||||
|
||||
COMMENT ON COLUMN workspace_agents.version IS 'Version tracks the version of the currently running workspace agent. Workspace agents register their version upon start.';
|
||||
|
||||
COMMENT ON COLUMN workspace_agents.connection_timeout_seconds IS 'Connection timeout in seconds, 0 means disabled.';
|
||||
|
||||
COMMENT ON COLUMN workspace_agents.troubleshooting_url IS 'URL for troubleshooting the agent.';
|
||||
|
||||
COMMENT ON COLUMN workspace_agents.motd_file IS 'Path to file inside workspace containing the message of the day (MOTD) to show to the user when logging in via SSH.';
|
||||
|
||||
COMMENT ON COLUMN workspace_agents.lifecycle_state IS 'The current lifecycle state reported by the workspace agent.';
|
||||
|
||||
COMMENT ON COLUMN workspace_agents.expanded_directory IS 'The resolved path of a user-specified directory. e.g. ~/coder -> /home/coder/coder';
|
||||
|
||||
COMMENT ON COLUMN workspace_agents.logs_length IS 'Total length of startup logs';
|
||||
|
||||
COMMENT ON COLUMN workspace_agents.logs_overflowed IS 'Whether the startup logs overflowed in length';
|
||||
|
||||
COMMENT ON COLUMN workspace_agents.started_at IS 'The time the agent entered the starting lifecycle state';
|
||||
|
||||
COMMENT ON COLUMN workspace_agents.ready_at IS 'The time the agent entered the ready or start_error lifecycle state';
|
||||
|
||||
COMMENT ON COLUMN workspace_agents.display_order IS 'Specifies the order in which to display agents in user interfaces.';
|
||||
|
||||
COMMENT ON COLUMN workspace_agents.api_key_scope IS 'Defines the scope of the API key associated with the agent. ''all'' allows access to everything, ''no_user_data'' restricts it to exclude user data.';
|
||||
|
||||
COMMENT ON COLUMN workspace_agents.deleted IS 'Indicates whether or not the agent has been deleted. This is currently only applicable to sub agents.';
|
||||
|
||||
CREATE TABLE workspace_apps (
|
||||
id uuid NOT NULL,
|
||||
created_at timestamp with time zone NOT NULL,
|
||||
agent_id uuid NOT NULL,
|
||||
display_name character varying(64) NOT NULL,
|
||||
icon character varying(256) NOT NULL,
|
||||
command character varying(65534),
|
||||
url character varying(65534),
|
||||
healthcheck_url text DEFAULT ''::text NOT NULL,
|
||||
healthcheck_interval integer DEFAULT 0 NOT NULL,
|
||||
healthcheck_threshold integer DEFAULT 0 NOT NULL,
|
||||
health workspace_app_health DEFAULT 'disabled'::workspace_app_health NOT NULL,
|
||||
subdomain boolean DEFAULT false NOT NULL,
|
||||
sharing_level app_sharing_level DEFAULT 'owner'::app_sharing_level NOT NULL,
|
||||
slug text NOT NULL,
|
||||
external boolean DEFAULT false NOT NULL,
|
||||
display_order integer DEFAULT 0 NOT NULL,
|
||||
hidden boolean DEFAULT false NOT NULL,
|
||||
open_in workspace_app_open_in DEFAULT 'slim-window'::workspace_app_open_in NOT NULL,
|
||||
display_group text,
|
||||
tooltip character varying(2048) DEFAULT ''::character varying NOT NULL
|
||||
);
|
||||
|
||||
COMMENT ON COLUMN workspace_apps.display_order IS 'Specifies the order in which to display agent app in user interfaces.';
|
||||
|
||||
COMMENT ON COLUMN workspace_apps.hidden IS 'Determines if the app is not shown in user interfaces.';
|
||||
|
||||
COMMENT ON COLUMN workspace_apps.tooltip IS 'Markdown text that is displayed when hovering over workspace apps.';
|
||||
|
||||
CREATE TABLE workspace_builds (
|
||||
id uuid NOT NULL,
|
||||
created_at timestamp with time zone NOT NULL,
|
||||
updated_at timestamp with time zone NOT NULL,
|
||||
workspace_id uuid NOT NULL,
|
||||
template_version_id uuid NOT NULL,
|
||||
build_number integer NOT NULL,
|
||||
transition workspace_transition NOT NULL,
|
||||
initiator_id uuid NOT NULL,
|
||||
provisioner_state bytea,
|
||||
job_id uuid NOT NULL,
|
||||
deadline timestamp with time zone DEFAULT '0001-01-01 00:00:00+00'::timestamp with time zone NOT NULL,
|
||||
reason build_reason DEFAULT 'initiator'::build_reason NOT NULL,
|
||||
daily_cost integer DEFAULT 0 NOT NULL,
|
||||
max_deadline timestamp with time zone DEFAULT '0001-01-01 00:00:00+00'::timestamp with time zone NOT NULL,
|
||||
template_version_preset_id uuid,
|
||||
has_ai_task boolean,
|
||||
ai_task_sidebar_app_id uuid,
|
||||
has_external_agent boolean,
|
||||
CONSTRAINT workspace_builds_ai_task_sidebar_app_id_required CHECK (((((has_ai_task IS NULL) OR (has_ai_task = false)) AND (ai_task_sidebar_app_id IS NULL)) OR ((has_ai_task = true) AND (ai_task_sidebar_app_id IS NOT NULL)))),
|
||||
CONSTRAINT workspace_builds_deadline_below_max_deadline CHECK ((((deadline <> '0001-01-01 00:00:00+00'::timestamp with time zone) AND (deadline <= max_deadline)) OR (max_deadline = '0001-01-01 00:00:00+00'::timestamp with time zone)))
|
||||
);
|
||||
|
||||
CREATE VIEW tasks_with_status AS
|
||||
SELECT tasks.id,
|
||||
tasks.organization_id,
|
||||
tasks.owner_id,
|
||||
tasks.name,
|
||||
tasks.workspace_id,
|
||||
tasks.template_version_id,
|
||||
tasks.template_parameters,
|
||||
tasks.prompt,
|
||||
tasks.created_at,
|
||||
tasks.deleted_at,
|
||||
CASE
|
||||
WHEN ((tasks.workspace_id IS NULL) OR (latest_build.job_status IS NULL)) THEN 'pending'::task_status
|
||||
WHEN (latest_build.job_status = 'failed'::provisioner_job_status) THEN 'error'::task_status
|
||||
WHEN ((latest_build.transition = ANY (ARRAY['stop'::workspace_transition, 'delete'::workspace_transition])) AND (latest_build.job_status = 'succeeded'::provisioner_job_status)) THEN 'paused'::task_status
|
||||
WHEN ((latest_build.transition = 'start'::workspace_transition) AND (latest_build.job_status = 'pending'::provisioner_job_status)) THEN 'initializing'::task_status
|
||||
WHEN ((latest_build.transition = 'start'::workspace_transition) AND (latest_build.job_status = ANY (ARRAY['running'::provisioner_job_status, 'succeeded'::provisioner_job_status]))) THEN
|
||||
CASE
|
||||
WHEN agent_status."none" THEN 'initializing'::task_status
|
||||
WHEN agent_status.connecting THEN 'initializing'::task_status
|
||||
WHEN agent_status.connected THEN
|
||||
CASE
|
||||
WHEN app_status.any_unhealthy THEN 'error'::task_status
|
||||
WHEN app_status.any_initializing THEN 'initializing'::task_status
|
||||
WHEN app_status.all_healthy_or_disabled THEN 'active'::task_status
|
||||
ELSE 'unknown'::task_status
|
||||
END
|
||||
ELSE 'unknown'::task_status
|
||||
END
|
||||
ELSE 'unknown'::task_status
|
||||
END AS status,
|
||||
task_app.workspace_build_number,
|
||||
task_app.workspace_agent_id,
|
||||
task_app.workspace_app_id
|
||||
FROM ((((tasks
|
||||
LEFT JOIN LATERAL ( SELECT task_app_1.workspace_build_number,
|
||||
task_app_1.workspace_agent_id,
|
||||
task_app_1.workspace_app_id
|
||||
FROM task_workspace_apps task_app_1
|
||||
WHERE (task_app_1.task_id = tasks.id)
|
||||
ORDER BY task_app_1.workspace_build_number DESC
|
||||
LIMIT 1) task_app ON (true))
|
||||
LEFT JOIN LATERAL ( SELECT workspace_build.transition,
|
||||
provisioner_job.job_status,
|
||||
workspace_build.job_id
|
||||
FROM (workspace_builds workspace_build
|
||||
JOIN provisioner_jobs provisioner_job ON ((provisioner_job.id = workspace_build.job_id)))
|
||||
WHERE ((workspace_build.workspace_id = tasks.workspace_id) AND (workspace_build.build_number = task_app.workspace_build_number))) latest_build ON (true))
|
||||
CROSS JOIN LATERAL ( SELECT (count(*) = 0) AS "none",
|
||||
bool_or((workspace_agent.lifecycle_state = ANY (ARRAY['created'::workspace_agent_lifecycle_state, 'starting'::workspace_agent_lifecycle_state]))) AS connecting,
|
||||
bool_and((workspace_agent.lifecycle_state = 'ready'::workspace_agent_lifecycle_state)) AS connected
|
||||
FROM workspace_agents workspace_agent
|
||||
WHERE (workspace_agent.id = task_app.workspace_agent_id)) agent_status)
|
||||
CROSS JOIN LATERAL ( SELECT bool_or((workspace_app.health = 'unhealthy'::workspace_app_health)) AS any_unhealthy,
|
||||
bool_or((workspace_app.health = 'initializing'::workspace_app_health)) AS any_initializing,
|
||||
bool_and((workspace_app.health = ANY (ARRAY['healthy'::workspace_app_health, 'disabled'::workspace_app_health]))) AS all_healthy_or_disabled
|
||||
FROM workspace_apps workspace_app
|
||||
WHERE (workspace_app.id = task_app.workspace_app_id)) app_status)
|
||||
WHERE (tasks.deleted_at IS NULL);
|
||||
|
||||
CREATE TABLE telemetry_items (
|
||||
key text NOT NULL,
|
||||
value text NOT NULL,
|
||||
@@ -2332,71 +2564,6 @@ CREATE TABLE workspace_agent_volume_resource_monitors (
|
||||
debounced_until timestamp with time zone DEFAULT '0001-01-01 00:00:00+00'::timestamp with time zone NOT NULL
|
||||
);
|
||||
|
||||
CREATE TABLE workspace_agents (
|
||||
id uuid NOT NULL,
|
||||
created_at timestamp with time zone NOT NULL,
|
||||
updated_at timestamp with time zone NOT NULL,
|
||||
name character varying(64) NOT NULL,
|
||||
first_connected_at timestamp with time zone,
|
||||
last_connected_at timestamp with time zone,
|
||||
disconnected_at timestamp with time zone,
|
||||
resource_id uuid NOT NULL,
|
||||
auth_token uuid NOT NULL,
|
||||
auth_instance_id character varying,
|
||||
architecture character varying(64) NOT NULL,
|
||||
environment_variables jsonb,
|
||||
operating_system character varying(64) NOT NULL,
|
||||
instance_metadata jsonb,
|
||||
resource_metadata jsonb,
|
||||
directory character varying(4096) DEFAULT ''::character varying NOT NULL,
|
||||
version text DEFAULT ''::text NOT NULL,
|
||||
last_connected_replica_id uuid,
|
||||
connection_timeout_seconds integer DEFAULT 0 NOT NULL,
|
||||
troubleshooting_url text DEFAULT ''::text NOT NULL,
|
||||
motd_file text DEFAULT ''::text NOT NULL,
|
||||
lifecycle_state workspace_agent_lifecycle_state DEFAULT 'created'::workspace_agent_lifecycle_state NOT NULL,
|
||||
expanded_directory character varying(4096) DEFAULT ''::character varying NOT NULL,
|
||||
logs_length integer DEFAULT 0 NOT NULL,
|
||||
logs_overflowed boolean DEFAULT false NOT NULL,
|
||||
started_at timestamp with time zone,
|
||||
ready_at timestamp with time zone,
|
||||
subsystems workspace_agent_subsystem[] DEFAULT '{}'::workspace_agent_subsystem[],
|
||||
display_apps display_app[] DEFAULT '{vscode,vscode_insiders,web_terminal,ssh_helper,port_forwarding_helper}'::display_app[],
|
||||
api_version text DEFAULT ''::text NOT NULL,
|
||||
display_order integer DEFAULT 0 NOT NULL,
|
||||
parent_id uuid,
|
||||
api_key_scope agent_key_scope_enum DEFAULT 'all'::agent_key_scope_enum NOT NULL,
|
||||
deleted boolean DEFAULT false NOT NULL,
|
||||
CONSTRAINT max_logs_length CHECK ((logs_length <= 1048576)),
|
||||
CONSTRAINT subsystems_not_none CHECK ((NOT ('none'::workspace_agent_subsystem = ANY (subsystems))))
|
||||
);
|
||||
|
||||
COMMENT ON COLUMN workspace_agents.version IS 'Version tracks the version of the currently running workspace agent. Workspace agents register their version upon start.';
|
||||
|
||||
COMMENT ON COLUMN workspace_agents.connection_timeout_seconds IS 'Connection timeout in seconds, 0 means disabled.';
|
||||
|
||||
COMMENT ON COLUMN workspace_agents.troubleshooting_url IS 'URL for troubleshooting the agent.';
|
||||
|
||||
COMMENT ON COLUMN workspace_agents.motd_file IS 'Path to file inside workspace containing the message of the day (MOTD) to show to the user when logging in via SSH.';
|
||||
|
||||
COMMENT ON COLUMN workspace_agents.lifecycle_state IS 'The current lifecycle state reported by the workspace agent.';
|
||||
|
||||
COMMENT ON COLUMN workspace_agents.expanded_directory IS 'The resolved path of a user-specified directory. e.g. ~/coder -> /home/coder/coder';
|
||||
|
||||
COMMENT ON COLUMN workspace_agents.logs_length IS 'Total length of startup logs';
|
||||
|
||||
COMMENT ON COLUMN workspace_agents.logs_overflowed IS 'Whether the startup logs overflowed in length';
|
||||
|
||||
COMMENT ON COLUMN workspace_agents.started_at IS 'The time the agent entered the starting lifecycle state';
|
||||
|
||||
COMMENT ON COLUMN workspace_agents.ready_at IS 'The time the agent entered the ready or start_error lifecycle state';
|
||||
|
||||
COMMENT ON COLUMN workspace_agents.display_order IS 'Specifies the order in which to display agents in user interfaces.';
|
||||
|
||||
COMMENT ON COLUMN workspace_agents.api_key_scope IS 'Defines the scope of the API key associated with the agent. ''all'' allows access to everything, ''no_user_data'' restricts it to exclude user data.';
|
||||
|
||||
COMMENT ON COLUMN workspace_agents.deleted IS 'Indicates whether or not the agent has been deleted. This is currently only applicable to sub agents.';
|
||||
|
||||
CREATE UNLOGGED TABLE workspace_app_audit_sessions (
|
||||
agent_id uuid NOT NULL,
|
||||
app_id uuid NOT NULL,
|
||||
@@ -2485,35 +2652,6 @@ CREATE TABLE workspace_app_statuses (
|
||||
uri text
|
||||
);
|
||||
|
||||
CREATE TABLE workspace_apps (
|
||||
id uuid NOT NULL,
|
||||
created_at timestamp with time zone NOT NULL,
|
||||
agent_id uuid NOT NULL,
|
||||
display_name character varying(64) NOT NULL,
|
||||
icon character varying(256) NOT NULL,
|
||||
command character varying(65534),
|
||||
url character varying(65534),
|
||||
healthcheck_url text DEFAULT ''::text NOT NULL,
|
||||
healthcheck_interval integer DEFAULT 0 NOT NULL,
|
||||
healthcheck_threshold integer DEFAULT 0 NOT NULL,
|
||||
health workspace_app_health DEFAULT 'disabled'::workspace_app_health NOT NULL,
|
||||
subdomain boolean DEFAULT false NOT NULL,
|
||||
sharing_level app_sharing_level DEFAULT 'owner'::app_sharing_level NOT NULL,
|
||||
slug text NOT NULL,
|
||||
external boolean DEFAULT false NOT NULL,
|
||||
display_order integer DEFAULT 0 NOT NULL,
|
||||
hidden boolean DEFAULT false NOT NULL,
|
||||
open_in workspace_app_open_in DEFAULT 'slim-window'::workspace_app_open_in NOT NULL,
|
||||
display_group text,
|
||||
tooltip character varying(2048) DEFAULT ''::character varying NOT NULL
|
||||
);
|
||||
|
||||
COMMENT ON COLUMN workspace_apps.display_order IS 'Specifies the order in which to display agent app in user interfaces.';
|
||||
|
||||
COMMENT ON COLUMN workspace_apps.hidden IS 'Determines if the app is not shown in user interfaces.';
|
||||
|
||||
COMMENT ON COLUMN workspace_apps.tooltip IS 'Markdown text that is displayed when hovering over workspace apps.';
|
||||
|
||||
CREATE TABLE workspace_build_parameters (
|
||||
workspace_build_id uuid NOT NULL,
|
||||
name text NOT NULL,
|
||||
@@ -2524,29 +2662,6 @@ COMMENT ON COLUMN workspace_build_parameters.name IS 'Parameter name';
|
||||
|
||||
COMMENT ON COLUMN workspace_build_parameters.value IS 'Parameter value';
|
||||
|
||||
CREATE TABLE workspace_builds (
|
||||
id uuid NOT NULL,
|
||||
created_at timestamp with time zone NOT NULL,
|
||||
updated_at timestamp with time zone NOT NULL,
|
||||
workspace_id uuid NOT NULL,
|
||||
template_version_id uuid NOT NULL,
|
||||
build_number integer NOT NULL,
|
||||
transition workspace_transition NOT NULL,
|
||||
initiator_id uuid NOT NULL,
|
||||
provisioner_state bytea,
|
||||
job_id uuid NOT NULL,
|
||||
deadline timestamp with time zone DEFAULT '0001-01-01 00:00:00+00'::timestamp with time zone NOT NULL,
|
||||
reason build_reason DEFAULT 'initiator'::build_reason NOT NULL,
|
||||
daily_cost integer DEFAULT 0 NOT NULL,
|
||||
max_deadline timestamp with time zone DEFAULT '0001-01-01 00:00:00+00'::timestamp with time zone NOT NULL,
|
||||
template_version_preset_id uuid,
|
||||
has_ai_task boolean,
|
||||
ai_task_sidebar_app_id uuid,
|
||||
has_external_agent boolean,
|
||||
CONSTRAINT workspace_builds_ai_task_sidebar_app_id_required CHECK (((((has_ai_task IS NULL) OR (has_ai_task = false)) AND (ai_task_sidebar_app_id IS NULL)) OR ((has_ai_task = true) AND (ai_task_sidebar_app_id IS NOT NULL)))),
|
||||
CONSTRAINT workspace_builds_deadline_below_max_deadline CHECK ((((deadline <> '0001-01-01 00:00:00+00'::timestamp with time zone) AND (deadline <= max_deadline)) OR (max_deadline = '0001-01-01 00:00:00+00'::timestamp with time zone)))
|
||||
);
|
||||
|
||||
CREATE VIEW workspace_build_with_user AS
|
||||
SELECT workspace_builds.id,
|
||||
workspace_builds.created_at,
|
||||
@@ -2962,6 +3077,9 @@ ALTER TABLE ONLY tailnet_peers
|
||||
ALTER TABLE ONLY tailnet_tunnels
|
||||
ADD CONSTRAINT tailnet_tunnels_pkey PRIMARY KEY (coordinator_id, src_id, dst_id);
|
||||
|
||||
ALTER TABLE ONLY task_workspace_apps
|
||||
ADD CONSTRAINT task_workspace_apps_pkey PRIMARY KEY (task_id, workspace_build_number);
|
||||
|
||||
ALTER TABLE ONLY tasks
|
||||
ADD CONSTRAINT tasks_pkey PRIMARY KEY (id);
|
||||
|
||||
@@ -3227,6 +3345,16 @@ COMMENT ON INDEX provisioner_jobs_worker_id_organization_id_completed_at_idx IS
|
||||
|
||||
CREATE UNIQUE INDEX provisioner_keys_organization_id_name_idx ON provisioner_keys USING btree (organization_id, lower((name)::text));
|
||||
|
||||
CREATE INDEX tasks_organization_id_idx ON tasks USING btree (organization_id);
|
||||
|
||||
CREATE INDEX tasks_owner_id_idx ON tasks USING btree (owner_id);
|
||||
|
||||
CREATE UNIQUE INDEX tasks_owner_id_name_unique_idx ON tasks USING btree (owner_id, lower(name)) WHERE (deleted_at IS NULL);
|
||||
|
||||
COMMENT ON INDEX tasks_owner_id_name_unique_idx IS 'Index to ensure uniqueness for task owner/name';
|
||||
|
||||
CREATE INDEX tasks_workspace_id_idx ON tasks USING btree (workspace_id);
|
||||
|
||||
CREATE INDEX template_usage_stats_start_time_idx ON template_usage_stats USING btree (start_time DESC);
|
||||
|
||||
COMMENT ON INDEX template_usage_stats_start_time_idx IS 'Index for querying MAX(start_time).';
|
||||
@@ -3507,9 +3635,6 @@ ALTER TABLE ONLY task_workspace_apps
|
||||
ALTER TABLE ONLY task_workspace_apps
|
||||
ADD CONSTRAINT task_workspace_apps_workspace_app_id_fkey FOREIGN KEY (workspace_app_id) REFERENCES workspace_apps(id) ON DELETE CASCADE;
|
||||
|
||||
ALTER TABLE ONLY task_workspace_apps
|
||||
ADD CONSTRAINT task_workspace_apps_workspace_build_id_fkey FOREIGN KEY (workspace_build_id) REFERENCES workspace_builds(id) ON DELETE CASCADE;
|
||||
|
||||
ALTER TABLE ONLY tasks
|
||||
ADD CONSTRAINT tasks_organization_id_fkey FOREIGN KEY (organization_id) REFERENCES organizations(id) ON DELETE CASCADE;
|
||||
|
||||
|
||||
@@ -48,7 +48,6 @@ const (
|
||||
ForeignKeyTaskWorkspaceAppsTaskID ForeignKeyConstraint = "task_workspace_apps_task_id_fkey" // ALTER TABLE ONLY task_workspace_apps ADD CONSTRAINT task_workspace_apps_task_id_fkey FOREIGN KEY (task_id) REFERENCES tasks(id) ON DELETE CASCADE;
|
||||
ForeignKeyTaskWorkspaceAppsWorkspaceAgentID ForeignKeyConstraint = "task_workspace_apps_workspace_agent_id_fkey" // ALTER TABLE ONLY task_workspace_apps ADD CONSTRAINT task_workspace_apps_workspace_agent_id_fkey FOREIGN KEY (workspace_agent_id) REFERENCES workspace_agents(id) ON DELETE CASCADE;
|
||||
ForeignKeyTaskWorkspaceAppsWorkspaceAppID ForeignKeyConstraint = "task_workspace_apps_workspace_app_id_fkey" // ALTER TABLE ONLY task_workspace_apps ADD CONSTRAINT task_workspace_apps_workspace_app_id_fkey FOREIGN KEY (workspace_app_id) REFERENCES workspace_apps(id) ON DELETE CASCADE;
|
||||
ForeignKeyTaskWorkspaceAppsWorkspaceBuildID ForeignKeyConstraint = "task_workspace_apps_workspace_build_id_fkey" // ALTER TABLE ONLY task_workspace_apps ADD CONSTRAINT task_workspace_apps_workspace_build_id_fkey FOREIGN KEY (workspace_build_id) REFERENCES workspace_builds(id) ON DELETE CASCADE;
|
||||
ForeignKeyTasksOrganizationID ForeignKeyConstraint = "tasks_organization_id_fkey" // ALTER TABLE ONLY tasks ADD CONSTRAINT tasks_organization_id_fkey FOREIGN KEY (organization_id) REFERENCES organizations(id) ON DELETE CASCADE;
|
||||
ForeignKeyTasksOwnerID ForeignKeyConstraint = "tasks_owner_id_fkey" // ALTER TABLE ONLY tasks ADD CONSTRAINT tasks_owner_id_fkey FOREIGN KEY (owner_id) REFERENCES users(id) ON DELETE CASCADE;
|
||||
ForeignKeyTasksTemplateVersionID ForeignKeyConstraint = "tasks_template_version_id_fkey" // ALTER TABLE ONLY tasks ADD CONSTRAINT tasks_template_version_id_fkey FOREIGN KEY (template_version_id) REFERENCES template_versions(id) ON DELETE CASCADE;
|
||||
|
||||
@@ -35,6 +35,10 @@ func (*mockTB) Logf(format string, args ...any) {
|
||||
_, _ = fmt.Printf(format, args...)
|
||||
}
|
||||
|
||||
func (*mockTB) TempDir() string {
|
||||
panic("not implemented")
|
||||
}
|
||||
|
||||
func main() {
|
||||
t := &mockTB{}
|
||||
defer func() {
|
||||
|
||||
@@ -0,0 +1,2 @@
|
||||
-- No-op: enum values remain to avoid churn. Removing enum values requires
|
||||
-- doing a create/cast/drop cycle which is intentionally omitted here.
|
||||
@@ -0,0 +1,42 @@
|
||||
-- Add wildcard api_key_scope entries so every RBAC resource has a matching resource:* value.
|
||||
-- Generated via: CGO_ENABLED=0 go run ./scripts/generate_api_key_scope_enum
|
||||
ALTER TYPE api_key_scope ADD VALUE IF NOT EXISTS 'aibridge_interception:*';
|
||||
ALTER TYPE api_key_scope ADD VALUE IF NOT EXISTS 'api_key:*';
|
||||
ALTER TYPE api_key_scope ADD VALUE IF NOT EXISTS 'assign_org_role:*';
|
||||
ALTER TYPE api_key_scope ADD VALUE IF NOT EXISTS 'assign_role:*';
|
||||
ALTER TYPE api_key_scope ADD VALUE IF NOT EXISTS 'audit_log:*';
|
||||
ALTER TYPE api_key_scope ADD VALUE IF NOT EXISTS 'connection_log:*';
|
||||
ALTER TYPE api_key_scope ADD VALUE IF NOT EXISTS 'crypto_key:*';
|
||||
ALTER TYPE api_key_scope ADD VALUE IF NOT EXISTS 'debug_info:*';
|
||||
ALTER TYPE api_key_scope ADD VALUE IF NOT EXISTS 'deployment_config:*';
|
||||
ALTER TYPE api_key_scope ADD VALUE IF NOT EXISTS 'deployment_stats:*';
|
||||
ALTER TYPE api_key_scope ADD VALUE IF NOT EXISTS 'file:*';
|
||||
ALTER TYPE api_key_scope ADD VALUE IF NOT EXISTS 'group:*';
|
||||
ALTER TYPE api_key_scope ADD VALUE IF NOT EXISTS 'group_member:*';
|
||||
ALTER TYPE api_key_scope ADD VALUE IF NOT EXISTS 'idpsync_settings:*';
|
||||
ALTER TYPE api_key_scope ADD VALUE IF NOT EXISTS 'inbox_notification:*';
|
||||
ALTER TYPE api_key_scope ADD VALUE IF NOT EXISTS 'license:*';
|
||||
ALTER TYPE api_key_scope ADD VALUE IF NOT EXISTS 'notification_message:*';
|
||||
ALTER TYPE api_key_scope ADD VALUE IF NOT EXISTS 'notification_preference:*';
|
||||
ALTER TYPE api_key_scope ADD VALUE IF NOT EXISTS 'notification_template:*';
|
||||
ALTER TYPE api_key_scope ADD VALUE IF NOT EXISTS 'oauth2_app:*';
|
||||
ALTER TYPE api_key_scope ADD VALUE IF NOT EXISTS 'oauth2_app_code_token:*';
|
||||
ALTER TYPE api_key_scope ADD VALUE IF NOT EXISTS 'oauth2_app_secret:*';
|
||||
ALTER TYPE api_key_scope ADD VALUE IF NOT EXISTS 'organization:*';
|
||||
ALTER TYPE api_key_scope ADD VALUE IF NOT EXISTS 'organization_member:*';
|
||||
ALTER TYPE api_key_scope ADD VALUE IF NOT EXISTS 'prebuilt_workspace:*';
|
||||
ALTER TYPE api_key_scope ADD VALUE IF NOT EXISTS 'provisioner_daemon:*';
|
||||
ALTER TYPE api_key_scope ADD VALUE IF NOT EXISTS 'provisioner_jobs:*';
|
||||
ALTER TYPE api_key_scope ADD VALUE IF NOT EXISTS 'replicas:*';
|
||||
ALTER TYPE api_key_scope ADD VALUE IF NOT EXISTS 'system:*';
|
||||
ALTER TYPE api_key_scope ADD VALUE IF NOT EXISTS 'tailnet_coordinator:*';
|
||||
ALTER TYPE api_key_scope ADD VALUE IF NOT EXISTS 'template:*';
|
||||
ALTER TYPE api_key_scope ADD VALUE IF NOT EXISTS 'usage_event:*';
|
||||
ALTER TYPE api_key_scope ADD VALUE IF NOT EXISTS 'user:*';
|
||||
ALTER TYPE api_key_scope ADD VALUE IF NOT EXISTS 'user_secret:*';
|
||||
ALTER TYPE api_key_scope ADD VALUE IF NOT EXISTS 'webpush_subscription:*';
|
||||
ALTER TYPE api_key_scope ADD VALUE IF NOT EXISTS 'workspace:*';
|
||||
ALTER TYPE api_key_scope ADD VALUE IF NOT EXISTS 'workspace_agent_devcontainers:*';
|
||||
ALTER TYPE api_key_scope ADD VALUE IF NOT EXISTS 'workspace_agent_resource_monitor:*';
|
||||
ALTER TYPE api_key_scope ADD VALUE IF NOT EXISTS 'workspace_dormant:*';
|
||||
ALTER TYPE api_key_scope ADD VALUE IF NOT EXISTS 'workspace_proxy:*';
|
||||
@@ -0,0 +1,3 @@
|
||||
-- Revert Tasks RBAC.
|
||||
-- No-op: enum values remain to avoid churn. Removing enum values requires
|
||||
-- doing a create/cast/drop cycle which is intentionally omitted here.
|
||||
@@ -0,0 +1,6 @@
|
||||
-- Tasks RBAC.
|
||||
ALTER TYPE api_key_scope ADD VALUE IF NOT EXISTS 'task:create';
|
||||
ALTER TYPE api_key_scope ADD VALUE IF NOT EXISTS 'task:read';
|
||||
ALTER TYPE api_key_scope ADD VALUE IF NOT EXISTS 'task:update';
|
||||
ALTER TYPE api_key_scope ADD VALUE IF NOT EXISTS 'task:delete';
|
||||
ALTER TYPE api_key_scope ADD VALUE IF NOT EXISTS 'task:*';
|
||||
@@ -0,0 +1,33 @@
|
||||
DROP VIEW IF EXISTS tasks_with_status;
|
||||
DROP TYPE IF EXISTS task_status;
|
||||
|
||||
DROP INDEX IF EXISTS tasks_organization_id_idx;
|
||||
DROP INDEX IF EXISTS tasks_owner_id_idx;
|
||||
DROP INDEX IF EXISTS tasks_workspace_id_idx;
|
||||
|
||||
ALTER TABLE task_workspace_apps
|
||||
DROP CONSTRAINT IF EXISTS task_workspace_apps_pkey;
|
||||
|
||||
-- Add back workspace_build_id column.
|
||||
ALTER TABLE task_workspace_apps
|
||||
ADD COLUMN workspace_build_id UUID;
|
||||
|
||||
-- Try to populate workspace_build_id from workspace_builds.
|
||||
UPDATE task_workspace_apps
|
||||
SET workspace_build_id = workspace_builds.id
|
||||
FROM workspace_builds
|
||||
WHERE workspace_builds.build_number = task_workspace_apps.workspace_build_number
|
||||
AND workspace_builds.workspace_id IN (
|
||||
SELECT workspace_id FROM tasks WHERE tasks.id = task_workspace_apps.task_id
|
||||
);
|
||||
|
||||
-- Remove rows that couldn't be restored.
|
||||
DELETE FROM task_workspace_apps
|
||||
WHERE workspace_build_id IS NULL;
|
||||
|
||||
-- Restore original schema.
|
||||
ALTER TABLE task_workspace_apps
|
||||
DROP COLUMN workspace_build_number,
|
||||
ALTER COLUMN workspace_build_id SET NOT NULL,
|
||||
ALTER COLUMN workspace_agent_id SET NOT NULL,
|
||||
ALTER COLUMN workspace_app_id SET NOT NULL;
|
||||
@@ -0,0 +1,104 @@
|
||||
-- Replace workspace_build_id with workspace_build_number.
|
||||
ALTER TABLE task_workspace_apps
|
||||
ADD COLUMN workspace_build_number INTEGER;
|
||||
|
||||
-- Try to populate workspace_build_number from workspace_builds.
|
||||
UPDATE task_workspace_apps
|
||||
SET workspace_build_number = workspace_builds.build_number
|
||||
FROM workspace_builds
|
||||
WHERE workspace_builds.id = task_workspace_apps.workspace_build_id;
|
||||
|
||||
-- Remove rows that couldn't be migrated.
|
||||
DELETE FROM task_workspace_apps
|
||||
WHERE workspace_build_number IS NULL;
|
||||
|
||||
ALTER TABLE task_workspace_apps
|
||||
DROP COLUMN workspace_build_id,
|
||||
ALTER COLUMN workspace_build_number SET NOT NULL,
|
||||
ALTER COLUMN workspace_agent_id DROP NOT NULL,
|
||||
ALTER COLUMN workspace_app_id DROP NOT NULL,
|
||||
ADD CONSTRAINT task_workspace_apps_pkey PRIMARY KEY (task_id, workspace_build_number);
|
||||
|
||||
-- Add indexes for common joins or filters.
|
||||
CREATE INDEX IF NOT EXISTS tasks_workspace_id_idx ON tasks (workspace_id);
|
||||
CREATE INDEX IF NOT EXISTS tasks_owner_id_idx ON tasks (owner_id);
|
||||
CREATE INDEX IF NOT EXISTS tasks_organization_id_idx ON tasks (organization_id);
|
||||
|
||||
CREATE TYPE task_status AS ENUM (
|
||||
'pending',
|
||||
'initializing',
|
||||
'active',
|
||||
'paused',
|
||||
'unknown',
|
||||
'error'
|
||||
);
|
||||
|
||||
CREATE VIEW
|
||||
tasks_with_status
|
||||
AS
|
||||
SELECT
|
||||
tasks.*,
|
||||
CASE
|
||||
WHEN tasks.workspace_id IS NULL OR latest_build.job_status IS NULL THEN 'pending'::task_status
|
||||
|
||||
WHEN latest_build.job_status = 'failed' THEN 'error'::task_status
|
||||
|
||||
WHEN latest_build.transition IN ('stop', 'delete')
|
||||
AND latest_build.job_status = 'succeeded' THEN 'paused'::task_status
|
||||
|
||||
WHEN latest_build.transition = 'start'
|
||||
AND latest_build.job_status = 'pending' THEN 'initializing'::task_status
|
||||
|
||||
WHEN latest_build.transition = 'start' AND latest_build.job_status IN ('running', 'succeeded') THEN
|
||||
CASE
|
||||
WHEN agent_status.none THEN 'initializing'::task_status
|
||||
WHEN agent_status.connecting THEN 'initializing'::task_status
|
||||
WHEN agent_status.connected THEN
|
||||
CASE
|
||||
WHEN app_status.any_unhealthy THEN 'error'::task_status
|
||||
WHEN app_status.any_initializing THEN 'initializing'::task_status
|
||||
WHEN app_status.all_healthy_or_disabled THEN 'active'::task_status
|
||||
ELSE 'unknown'::task_status
|
||||
END
|
||||
ELSE 'unknown'::task_status
|
||||
END
|
||||
|
||||
ELSE 'unknown'::task_status
|
||||
END AS status
|
||||
FROM
|
||||
tasks
|
||||
LEFT JOIN LATERAL (
|
||||
SELECT workspace_build_number, workspace_agent_id, workspace_app_id
|
||||
FROM task_workspace_apps task_app
|
||||
WHERE task_id = tasks.id
|
||||
ORDER BY workspace_build_number DESC
|
||||
LIMIT 1
|
||||
) task_app ON TRUE
|
||||
LEFT JOIN LATERAL (
|
||||
SELECT
|
||||
workspace_build.transition,
|
||||
provisioner_job.job_status,
|
||||
workspace_build.job_id
|
||||
FROM workspace_builds workspace_build
|
||||
JOIN provisioner_jobs provisioner_job ON provisioner_job.id = workspace_build.job_id
|
||||
WHERE workspace_build.workspace_id = tasks.workspace_id
|
||||
AND workspace_build.build_number = task_app.workspace_build_number
|
||||
) latest_build ON TRUE
|
||||
CROSS JOIN LATERAL (
|
||||
SELECT
|
||||
COUNT(*) = 0 AS none,
|
||||
bool_or(workspace_agent.lifecycle_state IN ('created', 'starting')) AS connecting,
|
||||
bool_and(workspace_agent.lifecycle_state = 'ready') AS connected
|
||||
FROM workspace_agents workspace_agent
|
||||
WHERE workspace_agent.id = task_app.workspace_agent_id
|
||||
) agent_status
|
||||
CROSS JOIN LATERAL (
|
||||
SELECT
|
||||
bool_or(workspace_app.health = 'unhealthy') AS any_unhealthy,
|
||||
bool_or(workspace_app.health = 'initializing') AS any_initializing,
|
||||
bool_and(workspace_app.health IN ('healthy', 'disabled')) AS all_healthy_or_disabled
|
||||
FROM workspace_apps workspace_app
|
||||
WHERE workspace_app.id = task_app.workspace_app_id
|
||||
) app_status
|
||||
WHERE
|
||||
tasks.deleted_at IS NULL;
|
||||
@@ -0,0 +1 @@
|
||||
DROP INDEX IF EXISTS tasks_owner_id_name_unique_idx;
|
||||
@@ -0,0 +1,2 @@
|
||||
CREATE UNIQUE INDEX IF NOT EXISTS tasks_owner_id_name_unique_idx ON tasks (owner_id, LOWER(name)) WHERE deleted_at IS NULL;
|
||||
COMMENT ON INDEX tasks_owner_id_name_unique_idx IS 'Index to ensure uniqueness for task owner/name';
|
||||
@@ -0,0 +1 @@
|
||||
-- Nothing to do
|
||||
@@ -0,0 +1 @@
|
||||
ALTER TYPE resource_type ADD VALUE IF NOT EXISTS 'task';
|
||||
@@ -0,0 +1,72 @@
|
||||
DROP VIEW IF EXISTS tasks_with_status;
|
||||
|
||||
-- Restore from 00037_add_columns_to_tasks_with_status.up.sql.
|
||||
CREATE VIEW
|
||||
tasks_with_status
|
||||
AS
|
||||
SELECT
|
||||
tasks.*,
|
||||
CASE
|
||||
WHEN tasks.workspace_id IS NULL OR latest_build.job_status IS NULL THEN 'pending'::task_status
|
||||
|
||||
WHEN latest_build.job_status = 'failed' THEN 'error'::task_status
|
||||
|
||||
WHEN latest_build.transition IN ('stop', 'delete')
|
||||
AND latest_build.job_status = 'succeeded' THEN 'paused'::task_status
|
||||
|
||||
WHEN latest_build.transition = 'start'
|
||||
AND latest_build.job_status = 'pending' THEN 'initializing'::task_status
|
||||
|
||||
WHEN latest_build.transition = 'start' AND latest_build.job_status IN ('running', 'succeeded') THEN
|
||||
CASE
|
||||
WHEN agent_status.none THEN 'initializing'::task_status
|
||||
WHEN agent_status.connecting THEN 'initializing'::task_status
|
||||
WHEN agent_status.connected THEN
|
||||
CASE
|
||||
WHEN app_status.any_unhealthy THEN 'error'::task_status
|
||||
WHEN app_status.any_initializing THEN 'initializing'::task_status
|
||||
WHEN app_status.all_healthy_or_disabled THEN 'active'::task_status
|
||||
ELSE 'unknown'::task_status
|
||||
END
|
||||
ELSE 'unknown'::task_status
|
||||
END
|
||||
|
||||
ELSE 'unknown'::task_status
|
||||
END AS status
|
||||
FROM
|
||||
tasks
|
||||
LEFT JOIN LATERAL (
|
||||
SELECT workspace_build_number, workspace_agent_id, workspace_app_id
|
||||
FROM task_workspace_apps task_app
|
||||
WHERE task_id = tasks.id
|
||||
ORDER BY workspace_build_number DESC
|
||||
LIMIT 1
|
||||
) task_app ON TRUE
|
||||
LEFT JOIN LATERAL (
|
||||
SELECT
|
||||
workspace_build.transition,
|
||||
provisioner_job.job_status,
|
||||
workspace_build.job_id
|
||||
FROM workspace_builds workspace_build
|
||||
JOIN provisioner_jobs provisioner_job ON provisioner_job.id = workspace_build.job_id
|
||||
WHERE workspace_build.workspace_id = tasks.workspace_id
|
||||
AND workspace_build.build_number = task_app.workspace_build_number
|
||||
) latest_build ON TRUE
|
||||
CROSS JOIN LATERAL (
|
||||
SELECT
|
||||
COUNT(*) = 0 AS none,
|
||||
bool_or(workspace_agent.lifecycle_state IN ('created', 'starting')) AS connecting,
|
||||
bool_and(workspace_agent.lifecycle_state = 'ready') AS connected
|
||||
FROM workspace_agents workspace_agent
|
||||
WHERE workspace_agent.id = task_app.workspace_agent_id
|
||||
) agent_status
|
||||
CROSS JOIN LATERAL (
|
||||
SELECT
|
||||
bool_or(workspace_app.health = 'unhealthy') AS any_unhealthy,
|
||||
bool_or(workspace_app.health = 'initializing') AS any_initializing,
|
||||
bool_and(workspace_app.health IN ('healthy', 'disabled')) AS all_healthy_or_disabled
|
||||
FROM workspace_apps workspace_app
|
||||
WHERE workspace_app.id = task_app.workspace_app_id
|
||||
) app_status
|
||||
WHERE
|
||||
tasks.deleted_at IS NULL;
|
||||
@@ -0,0 +1,74 @@
|
||||
-- Drop view from 00037_add_columns_to_tasks_with_status.up.sql.
|
||||
DROP VIEW IF EXISTS tasks_with_status;
|
||||
|
||||
-- Add task_app columns.
|
||||
CREATE VIEW
|
||||
tasks_with_status
|
||||
AS
|
||||
SELECT
|
||||
tasks.*,
|
||||
CASE
|
||||
WHEN tasks.workspace_id IS NULL OR latest_build.job_status IS NULL THEN 'pending'::task_status
|
||||
|
||||
WHEN latest_build.job_status = 'failed' THEN 'error'::task_status
|
||||
|
||||
WHEN latest_build.transition IN ('stop', 'delete')
|
||||
AND latest_build.job_status = 'succeeded' THEN 'paused'::task_status
|
||||
|
||||
WHEN latest_build.transition = 'start'
|
||||
AND latest_build.job_status = 'pending' THEN 'initializing'::task_status
|
||||
|
||||
WHEN latest_build.transition = 'start' AND latest_build.job_status IN ('running', 'succeeded') THEN
|
||||
CASE
|
||||
WHEN agent_status.none THEN 'initializing'::task_status
|
||||
WHEN agent_status.connecting THEN 'initializing'::task_status
|
||||
WHEN agent_status.connected THEN
|
||||
CASE
|
||||
WHEN app_status.any_unhealthy THEN 'error'::task_status
|
||||
WHEN app_status.any_initializing THEN 'initializing'::task_status
|
||||
WHEN app_status.all_healthy_or_disabled THEN 'active'::task_status
|
||||
ELSE 'unknown'::task_status
|
||||
END
|
||||
ELSE 'unknown'::task_status
|
||||
END
|
||||
|
||||
ELSE 'unknown'::task_status
|
||||
END AS status,
|
||||
task_app.*
|
||||
FROM
|
||||
tasks
|
||||
LEFT JOIN LATERAL (
|
||||
SELECT workspace_build_number, workspace_agent_id, workspace_app_id
|
||||
FROM task_workspace_apps task_app
|
||||
WHERE task_id = tasks.id
|
||||
ORDER BY workspace_build_number DESC
|
||||
LIMIT 1
|
||||
) task_app ON TRUE
|
||||
LEFT JOIN LATERAL (
|
||||
SELECT
|
||||
workspace_build.transition,
|
||||
provisioner_job.job_status,
|
||||
workspace_build.job_id
|
||||
FROM workspace_builds workspace_build
|
||||
JOIN provisioner_jobs provisioner_job ON provisioner_job.id = workspace_build.job_id
|
||||
WHERE workspace_build.workspace_id = tasks.workspace_id
|
||||
AND workspace_build.build_number = task_app.workspace_build_number
|
||||
) latest_build ON TRUE
|
||||
CROSS JOIN LATERAL (
|
||||
SELECT
|
||||
COUNT(*) = 0 AS none,
|
||||
bool_or(workspace_agent.lifecycle_state IN ('created', 'starting')) AS connecting,
|
||||
bool_and(workspace_agent.lifecycle_state = 'ready') AS connected
|
||||
FROM workspace_agents workspace_agent
|
||||
WHERE workspace_agent.id = task_app.workspace_agent_id
|
||||
) agent_status
|
||||
CROSS JOIN LATERAL (
|
||||
SELECT
|
||||
bool_or(workspace_app.health = 'unhealthy') AS any_unhealthy,
|
||||
bool_or(workspace_app.health = 'initializing') AS any_initializing,
|
||||
bool_and(workspace_app.health IN ('healthy', 'disabled')) AS all_healthy_or_disabled
|
||||
FROM workspace_apps workspace_app
|
||||
WHERE workspace_app.id = task_app.workspace_app_id
|
||||
) app_status
|
||||
WHERE
|
||||
tasks.deleted_at IS NULL;
|
||||
+5
@@ -0,0 +1,5 @@
|
||||
-- Remove Task 'completed' transition template notification
|
||||
DELETE FROM notification_templates WHERE id = '8c5a4d12-9f7e-4b3a-a1c8-6e4f2d9b5a7c';
|
||||
|
||||
-- Remove Task 'failed' transition template notification
|
||||
DELETE FROM notification_templates WHERE id = '3b7e8f1a-4c2d-49a6-b5e9-7f3a1c8d6b4e';
|
||||
+63
@@ -0,0 +1,63 @@
|
||||
-- Task transition to 'complete' status
|
||||
INSERT INTO notification_templates (
|
||||
id,
|
||||
name,
|
||||
title_template,
|
||||
body_template,
|
||||
actions,
|
||||
"group",
|
||||
method,
|
||||
kind,
|
||||
enabled_by_default
|
||||
) VALUES (
|
||||
'8c5a4d12-9f7e-4b3a-a1c8-6e4f2d9b5a7c',
|
||||
'Task Completed',
|
||||
E'Task ''{{.Labels.workspace}}'' completed',
|
||||
E'The task ''{{.Labels.task}}'' has completed successfully.',
|
||||
'[
|
||||
{
|
||||
"label": "View task",
|
||||
"url": "{{base_url}}/tasks/{{.UserUsername}}/{{.Labels.workspace}}"
|
||||
},
|
||||
{
|
||||
"label": "View workspace",
|
||||
"url": "{{base_url}}/@{{.UserUsername}}/{{.Labels.workspace}}"
|
||||
}
|
||||
]'::jsonb,
|
||||
'Task Events',
|
||||
NULL,
|
||||
'system'::notification_template_kind,
|
||||
true
|
||||
);
|
||||
|
||||
-- Task transition to 'failed' status
|
||||
INSERT INTO notification_templates (
|
||||
id,
|
||||
name,
|
||||
title_template,
|
||||
body_template,
|
||||
actions,
|
||||
"group",
|
||||
method,
|
||||
kind,
|
||||
enabled_by_default
|
||||
) VALUES (
|
||||
'3b7e8f1a-4c2d-49a6-b5e9-7f3a1c8d6b4e',
|
||||
'Task Failed',
|
||||
E'Task ''{{.Labels.workspace}}'' failed',
|
||||
E'The task ''{{.Labels.task}}'' has failed. Check the logs for more details.',
|
||||
'[
|
||||
{
|
||||
"label": "View task",
|
||||
"url": "{{base_url}}/tasks/{{.UserUsername}}/{{.Labels.workspace}}"
|
||||
},
|
||||
{
|
||||
"label": "View workspace",
|
||||
"url": "{{base_url}}/@{{.UserUsername}}/{{.Labels.workspace}}"
|
||||
}
|
||||
]'::jsonb,
|
||||
'Task Events',
|
||||
NULL,
|
||||
'system'::notification_template_kind,
|
||||
true
|
||||
);
|
||||
+6
@@ -0,0 +1,6 @@
|
||||
INSERT INTO public.task_workspace_apps VALUES (
|
||||
'f5a1c3e4-8b2d-4f6a-9d7e-2a8b5c9e1f3d', -- task_id
|
||||
NULL, -- workspace_agent_id
|
||||
NULL, -- workspace_app_id
|
||||
99 -- workspace_build_number
|
||||
) ON CONFLICT DO NOTHING;
|
||||
@@ -132,6 +132,20 @@ func (w ConnectionLog) RBACObject() rbac.Object {
|
||||
return obj
|
||||
}
|
||||
|
||||
func (t Task) RBACObject() rbac.Object {
|
||||
return rbac.ResourceTask.
|
||||
WithID(t.ID).
|
||||
WithOwner(t.OwnerID.String()).
|
||||
InOrg(t.OrganizationID)
|
||||
}
|
||||
|
||||
func (t TaskTable) RBACObject() rbac.Object {
|
||||
return rbac.ResourceTask.
|
||||
WithID(t.ID).
|
||||
WithOwner(t.OwnerID.String()).
|
||||
InOrg(t.OrganizationID)
|
||||
}
|
||||
|
||||
func (s APIKeyScope) ToRBAC() rbac.ScopeName {
|
||||
switch s {
|
||||
case ApiKeyScopeCoderAll:
|
||||
@@ -145,24 +159,30 @@ func (s APIKeyScope) ToRBAC() rbac.ScopeName {
|
||||
}
|
||||
}
|
||||
|
||||
// APIKeyScopes allows expanding multiple API key scopes into a single
|
||||
// RBAC scope for authorization. This implements rbac.ExpandableScope so
|
||||
// callers can pass the list directly without deriving a single scope.
|
||||
// APIKeyScopes represents a collection of individual API key scope names as
|
||||
// stored in the database. Helper methods on this type are used to derive the
|
||||
// RBAC scope that should be authorized for the key.
|
||||
type APIKeyScopes []APIKeyScope
|
||||
|
||||
var _ rbac.ExpandableScope = APIKeyScopes{}
|
||||
// WithAllowList wraps the scopes with a database allow list, producing an
|
||||
// ExpandableScope that always enforces the allow list overlay when expanded.
|
||||
func (s APIKeyScopes) WithAllowList(list AllowList) APIKeyScopeSet {
|
||||
return APIKeyScopeSet{Scopes: s, AllowList: list}
|
||||
}
|
||||
|
||||
// Has returns true if the slice contains the provided scope.
|
||||
func (s APIKeyScopes) Has(target APIKeyScope) bool {
|
||||
return slices.Contains(s, target)
|
||||
}
|
||||
|
||||
// Expand merges the permissions of all scopes in the list into a single scope.
|
||||
// If the list is empty, it defaults to rbac.ScopeAll.
|
||||
func (s APIKeyScopes) Expand() (rbac.Scope, error) {
|
||||
// expandRBACScope merges the permissions of all scopes in the list into a
|
||||
// single RBAC scope. If the list is empty, it defaults to rbac.ScopeAll for
|
||||
// backward compatibility. This method is internal; use ScopeSet() to combine
|
||||
// scopes with the API key's allow list for authorization.
|
||||
func (s APIKeyScopes) expandRBACScope() (rbac.Scope, error) {
|
||||
// Default to ScopeAll for backward compatibility when no scopes provided.
|
||||
if len(s) == 0 {
|
||||
return rbac.ScopeAll.Expand()
|
||||
return rbac.Scope{}, xerrors.New("no scopes provided")
|
||||
}
|
||||
|
||||
var merged rbac.Scope
|
||||
@@ -170,13 +190,12 @@ func (s APIKeyScopes) Expand() (rbac.Scope, error) {
|
||||
// Identifier is informational; not used in policy evaluation.
|
||||
Identifier: rbac.RoleIdentifier{Name: "Scope_Multiple"},
|
||||
Site: nil,
|
||||
Org: map[string][]rbac.Permission{},
|
||||
User: nil,
|
||||
ByOrgID: map[string]rbac.OrgPermissions{},
|
||||
}
|
||||
|
||||
// Track allow list union, collapsing to wildcard if any child is wildcard.
|
||||
allowAll := false
|
||||
allowSet := make(map[string]rbac.AllowListElement)
|
||||
// Collect allow lists for a union after expanding all scopes.
|
||||
allowLists := make([][]rbac.AllowListElement, 0, len(s))
|
||||
|
||||
for _, s := range s {
|
||||
expanded, err := s.ToRBAC().Expand()
|
||||
@@ -186,39 +205,30 @@ func (s APIKeyScopes) Expand() (rbac.Scope, error) {
|
||||
|
||||
// Merge role permissions: union by simple concatenation.
|
||||
merged.Site = append(merged.Site, expanded.Site...)
|
||||
for orgID, perms := range expanded.Org {
|
||||
merged.Org[orgID] = append(merged.Org[orgID], perms...)
|
||||
for orgID, perms := range expanded.ByOrgID {
|
||||
orgPerms := merged.ByOrgID[orgID]
|
||||
orgPerms.Org = append(orgPerms.Org, perms.Org...)
|
||||
merged.ByOrgID[orgID] = orgPerms
|
||||
}
|
||||
merged.User = append(merged.User, expanded.User...)
|
||||
|
||||
// Merge allow lists.
|
||||
for _, e := range expanded.AllowIDList {
|
||||
if e.ID == policy.WildcardSymbol && e.Type == policy.WildcardSymbol {
|
||||
allowAll = true
|
||||
// No need to track other entries once wildcard is present.
|
||||
continue
|
||||
}
|
||||
key := e.String()
|
||||
allowSet[key] = e
|
||||
}
|
||||
allowLists = append(allowLists, expanded.AllowIDList)
|
||||
}
|
||||
|
||||
// De-duplicate permissions across Site/Org/User
|
||||
merged.Site = rbac.DeduplicatePermissions(merged.Site)
|
||||
for orgID, perms := range merged.Org {
|
||||
merged.Org[orgID] = rbac.DeduplicatePermissions(perms)
|
||||
}
|
||||
merged.User = rbac.DeduplicatePermissions(merged.User)
|
||||
|
||||
if allowAll || len(allowSet) == 0 {
|
||||
merged.AllowIDList = []rbac.AllowListElement{rbac.AllowListAll()}
|
||||
} else {
|
||||
merged.AllowIDList = make([]rbac.AllowListElement, 0, len(allowSet))
|
||||
for _, v := range allowSet {
|
||||
merged.AllowIDList = append(merged.AllowIDList, v)
|
||||
}
|
||||
for orgID, perms := range merged.ByOrgID {
|
||||
perms.Org = rbac.DeduplicatePermissions(perms.Org)
|
||||
merged.ByOrgID[orgID] = perms
|
||||
}
|
||||
|
||||
union, err := rbac.UnionAllowLists(allowLists...)
|
||||
if err != nil {
|
||||
return rbac.Scope{}, err
|
||||
}
|
||||
merged.AllowIDList = union
|
||||
|
||||
return merged, nil
|
||||
}
|
||||
|
||||
@@ -235,6 +245,37 @@ func (s APIKeyScopes) Name() rbac.RoleIdentifier {
|
||||
return rbac.RoleIdentifier{Name: "scopes[" + strings.Join(names, "+") + "]"}
|
||||
}
|
||||
|
||||
// APIKeyScopeSet merges expanded scopes with the API key's DB allow_list. If
|
||||
// the DB allow_list is a wildcard or empty, the merged scope's allow list is
|
||||
// unchanged. Otherwise, the DB allow_list overrides the merged AllowIDList to
|
||||
// enforce the token's resource scoping consistently across all permissions.
|
||||
type APIKeyScopeSet struct {
|
||||
Scopes APIKeyScopes
|
||||
AllowList AllowList
|
||||
}
|
||||
|
||||
var _ rbac.ExpandableScope = APIKeyScopeSet{}
|
||||
|
||||
func (s APIKeyScopeSet) Name() rbac.RoleIdentifier { return s.Scopes.Name() }
|
||||
|
||||
func (s APIKeyScopeSet) Expand() (rbac.Scope, error) {
|
||||
merged, err := s.Scopes.expandRBACScope()
|
||||
if err != nil {
|
||||
return rbac.Scope{}, err
|
||||
}
|
||||
merged.AllowIDList = rbac.IntersectAllowLists(merged.AllowIDList, s.AllowList)
|
||||
return merged, nil
|
||||
}
|
||||
|
||||
// ScopeSet returns the scopes combined with the database allow list. It is the
|
||||
// canonical way to expose an API key's effective scope for authorization.
|
||||
func (k APIKey) ScopeSet() APIKeyScopeSet {
|
||||
return APIKeyScopeSet{
|
||||
Scopes: k.Scopes,
|
||||
AllowList: k.AllowList,
|
||||
}
|
||||
}
|
||||
|
||||
func (k APIKey) RBACObject() rbac.Object {
|
||||
return rbac.ResourceApiKey.WithIDString(k.ID).
|
||||
WithOwner(k.UserID.String())
|
||||
|
||||
@@ -3,6 +3,7 @@ package database
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/google/uuid"
|
||||
"github.com/stretchr/testify/require"
|
||||
|
||||
"github.com/coder/coder/v2/coderd/rbac"
|
||||
@@ -38,7 +39,7 @@ func TestAPIKeyScopesExpand(t *testing.T) {
|
||||
for _, tc := range cases {
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
t.Parallel()
|
||||
s, err := tc.scopes.Expand()
|
||||
s, err := tc.scopes.expandRBACScope()
|
||||
require.NoError(t, err)
|
||||
tc.want(t, s)
|
||||
})
|
||||
@@ -59,7 +60,7 @@ func TestAPIKeyScopesExpand(t *testing.T) {
|
||||
for _, tc := range cases {
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
t.Parallel()
|
||||
s, err := tc.scopes.Expand()
|
||||
s, err := tc.scopes.expandRBACScope()
|
||||
require.NoError(t, err)
|
||||
requirePermission(t, s, tc.res, tc.act)
|
||||
requireAllowAll(t, s)
|
||||
@@ -70,7 +71,7 @@ func TestAPIKeyScopesExpand(t *testing.T) {
|
||||
t.Run("merge", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
scopes := APIKeyScopes{ApiKeyScopeCoderApplicationConnect, ApiKeyScopeCoderAll, ApiKeyScopeWorkspaceRead}
|
||||
s, err := scopes.Expand()
|
||||
s, err := scopes.expandRBACScope()
|
||||
require.NoError(t, err)
|
||||
requirePermission(t, s, rbac.ResourceWildcard.Type, policy.Action(policy.WildcardSymbol))
|
||||
requirePermission(t, s, rbac.ResourceWorkspace.Type, policy.ActionApplicationConnect)
|
||||
@@ -78,13 +79,68 @@ func TestAPIKeyScopesExpand(t *testing.T) {
|
||||
requireAllowAll(t, s)
|
||||
})
|
||||
|
||||
t.Run("empty_defaults_to_all", func(t *testing.T) {
|
||||
t.Run("effective_scope_keep_types", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
s, err := (APIKeyScopes{}).Expand()
|
||||
workspaceID := uuid.New()
|
||||
|
||||
effective := APIKeyScopeSet{
|
||||
Scopes: APIKeyScopes{ApiKeyScopeWorkspaceRead},
|
||||
AllowList: AllowList{
|
||||
{Type: rbac.ResourceWorkspace.Type, ID: workspaceID.String()},
|
||||
},
|
||||
}
|
||||
|
||||
expanded, err := effective.Expand()
|
||||
require.NoError(t, err)
|
||||
requirePermission(t, s, rbac.ResourceWildcard.Type, policy.Action(policy.WildcardSymbol))
|
||||
require.Len(t, expanded.AllowIDList, 1)
|
||||
require.Equal(t, "workspace", expanded.AllowIDList[0].Type)
|
||||
require.Equal(t, workspaceID.String(), expanded.AllowIDList[0].ID)
|
||||
})
|
||||
|
||||
t.Run("empty_rejected", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
_, err := (APIKeyScopes{}).expandRBACScope()
|
||||
require.Error(t, err)
|
||||
require.ErrorContains(t, err, "no scopes provided")
|
||||
})
|
||||
|
||||
t.Run("allow_list_overrides", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
allowID := uuid.NewString()
|
||||
set := APIKeyScopes{ApiKeyScopeWorkspaceRead}.WithAllowList(AllowList{
|
||||
{Type: rbac.ResourceWorkspace.Type, ID: allowID},
|
||||
})
|
||||
s, err := set.Expand()
|
||||
require.NoError(t, err)
|
||||
require.Len(t, s.AllowIDList, 1)
|
||||
require.Equal(t, rbac.AllowListElement{Type: rbac.ResourceWorkspace.Type, ID: allowID}, s.AllowIDList[0])
|
||||
})
|
||||
|
||||
t.Run("allow_list_wildcard_keeps_merged", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
set := APIKeyScopes{ApiKeyScopeWorkspaceRead}.WithAllowList(AllowList{
|
||||
{Type: policy.WildcardSymbol, ID: policy.WildcardSymbol},
|
||||
})
|
||||
s, err := set.Expand()
|
||||
require.NoError(t, err)
|
||||
requirePermission(t, s, rbac.ResourceWorkspace.Type, policy.ActionRead)
|
||||
requireAllowAll(t, s)
|
||||
})
|
||||
|
||||
t.Run("scope_set_helper", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
allowID := uuid.NewString()
|
||||
key := APIKey{
|
||||
Scopes: APIKeyScopes{ApiKeyScopeWorkspaceRead},
|
||||
AllowList: AllowList{
|
||||
{Type: rbac.ResourceWorkspace.Type, ID: allowID},
|
||||
},
|
||||
}
|
||||
s, err := key.ScopeSet().Expand()
|
||||
require.NoError(t, err)
|
||||
require.Len(t, s.AllowIDList, 1)
|
||||
require.Equal(t, rbac.AllowListElement{Type: rbac.ResourceWorkspace.Type, ID: allowID}, s.AllowIDList[0])
|
||||
})
|
||||
}
|
||||
|
||||
// Helpers
|
||||
|
||||
+231
-6
@@ -166,6 +166,51 @@ const (
|
||||
ApiKeyScopeCoderTemplatesbuild APIKeyScope = "coder:templates.build"
|
||||
ApiKeyScopeCoderTemplatesauthor APIKeyScope = "coder:templates.author"
|
||||
ApiKeyScopeCoderApikeysmanageSelf APIKeyScope = "coder:apikeys.manage_self"
|
||||
ApiKeyScopeAibridgeInterception APIKeyScope = "aibridge_interception:*"
|
||||
ApiKeyScopeApiKey APIKeyScope = "api_key:*"
|
||||
ApiKeyScopeAssignOrgRole APIKeyScope = "assign_org_role:*"
|
||||
ApiKeyScopeAssignRole APIKeyScope = "assign_role:*"
|
||||
ApiKeyScopeAuditLog APIKeyScope = "audit_log:*"
|
||||
ApiKeyScopeConnectionLog APIKeyScope = "connection_log:*"
|
||||
ApiKeyScopeCryptoKey APIKeyScope = "crypto_key:*"
|
||||
ApiKeyScopeDebugInfo APIKeyScope = "debug_info:*"
|
||||
ApiKeyScopeDeploymentConfig APIKeyScope = "deployment_config:*"
|
||||
ApiKeyScopeDeploymentStats APIKeyScope = "deployment_stats:*"
|
||||
ApiKeyScopeFile APIKeyScope = "file:*"
|
||||
ApiKeyScopeGroup APIKeyScope = "group:*"
|
||||
ApiKeyScopeGroupMember APIKeyScope = "group_member:*"
|
||||
ApiKeyScopeIdpsyncSettings APIKeyScope = "idpsync_settings:*"
|
||||
ApiKeyScopeInboxNotification APIKeyScope = "inbox_notification:*"
|
||||
ApiKeyScopeLicense APIKeyScope = "license:*"
|
||||
ApiKeyScopeNotificationMessage APIKeyScope = "notification_message:*"
|
||||
ApiKeyScopeNotificationPreference APIKeyScope = "notification_preference:*"
|
||||
ApiKeyScopeNotificationTemplate APIKeyScope = "notification_template:*"
|
||||
ApiKeyScopeOauth2App APIKeyScope = "oauth2_app:*"
|
||||
ApiKeyScopeOauth2AppCodeToken APIKeyScope = "oauth2_app_code_token:*"
|
||||
ApiKeyScopeOauth2AppSecret APIKeyScope = "oauth2_app_secret:*"
|
||||
ApiKeyScopeOrganization APIKeyScope = "organization:*"
|
||||
ApiKeyScopeOrganizationMember APIKeyScope = "organization_member:*"
|
||||
ApiKeyScopePrebuiltWorkspace APIKeyScope = "prebuilt_workspace:*"
|
||||
ApiKeyScopeProvisionerDaemon APIKeyScope = "provisioner_daemon:*"
|
||||
ApiKeyScopeProvisionerJobs APIKeyScope = "provisioner_jobs:*"
|
||||
ApiKeyScopeReplicas APIKeyScope = "replicas:*"
|
||||
ApiKeyScopeSystem APIKeyScope = "system:*"
|
||||
ApiKeyScopeTailnetCoordinator APIKeyScope = "tailnet_coordinator:*"
|
||||
ApiKeyScopeTemplate APIKeyScope = "template:*"
|
||||
ApiKeyScopeUsageEvent APIKeyScope = "usage_event:*"
|
||||
ApiKeyScopeUser APIKeyScope = "user:*"
|
||||
ApiKeyScopeUserSecret APIKeyScope = "user_secret:*"
|
||||
ApiKeyScopeWebpushSubscription APIKeyScope = "webpush_subscription:*"
|
||||
ApiKeyScopeWorkspace APIKeyScope = "workspace:*"
|
||||
ApiKeyScopeWorkspaceAgentDevcontainers APIKeyScope = "workspace_agent_devcontainers:*"
|
||||
ApiKeyScopeWorkspaceAgentResourceMonitor APIKeyScope = "workspace_agent_resource_monitor:*"
|
||||
ApiKeyScopeWorkspaceDormant APIKeyScope = "workspace_dormant:*"
|
||||
ApiKeyScopeWorkspaceProxy APIKeyScope = "workspace_proxy:*"
|
||||
ApiKeyScopeTaskCreate APIKeyScope = "task:create"
|
||||
ApiKeyScopeTaskRead APIKeyScope = "task:read"
|
||||
ApiKeyScopeTaskUpdate APIKeyScope = "task:update"
|
||||
ApiKeyScopeTaskDelete APIKeyScope = "task:delete"
|
||||
ApiKeyScopeTask APIKeyScope = "task:*"
|
||||
)
|
||||
|
||||
func (e *APIKeyScope) Scan(src interface{}) error {
|
||||
@@ -351,7 +396,52 @@ func (e APIKeyScope) Valid() bool {
|
||||
ApiKeyScopeCoderWorkspacesaccess,
|
||||
ApiKeyScopeCoderTemplatesbuild,
|
||||
ApiKeyScopeCoderTemplatesauthor,
|
||||
ApiKeyScopeCoderApikeysmanageSelf:
|
||||
ApiKeyScopeCoderApikeysmanageSelf,
|
||||
ApiKeyScopeAibridgeInterception,
|
||||
ApiKeyScopeApiKey,
|
||||
ApiKeyScopeAssignOrgRole,
|
||||
ApiKeyScopeAssignRole,
|
||||
ApiKeyScopeAuditLog,
|
||||
ApiKeyScopeConnectionLog,
|
||||
ApiKeyScopeCryptoKey,
|
||||
ApiKeyScopeDebugInfo,
|
||||
ApiKeyScopeDeploymentConfig,
|
||||
ApiKeyScopeDeploymentStats,
|
||||
ApiKeyScopeFile,
|
||||
ApiKeyScopeGroup,
|
||||
ApiKeyScopeGroupMember,
|
||||
ApiKeyScopeIdpsyncSettings,
|
||||
ApiKeyScopeInboxNotification,
|
||||
ApiKeyScopeLicense,
|
||||
ApiKeyScopeNotificationMessage,
|
||||
ApiKeyScopeNotificationPreference,
|
||||
ApiKeyScopeNotificationTemplate,
|
||||
ApiKeyScopeOauth2App,
|
||||
ApiKeyScopeOauth2AppCodeToken,
|
||||
ApiKeyScopeOauth2AppSecret,
|
||||
ApiKeyScopeOrganization,
|
||||
ApiKeyScopeOrganizationMember,
|
||||
ApiKeyScopePrebuiltWorkspace,
|
||||
ApiKeyScopeProvisionerDaemon,
|
||||
ApiKeyScopeProvisionerJobs,
|
||||
ApiKeyScopeReplicas,
|
||||
ApiKeyScopeSystem,
|
||||
ApiKeyScopeTailnetCoordinator,
|
||||
ApiKeyScopeTemplate,
|
||||
ApiKeyScopeUsageEvent,
|
||||
ApiKeyScopeUser,
|
||||
ApiKeyScopeUserSecret,
|
||||
ApiKeyScopeWebpushSubscription,
|
||||
ApiKeyScopeWorkspace,
|
||||
ApiKeyScopeWorkspaceAgentDevcontainers,
|
||||
ApiKeyScopeWorkspaceAgentResourceMonitor,
|
||||
ApiKeyScopeWorkspaceDormant,
|
||||
ApiKeyScopeWorkspaceProxy,
|
||||
ApiKeyScopeTaskCreate,
|
||||
ApiKeyScopeTaskRead,
|
||||
ApiKeyScopeTaskUpdate,
|
||||
ApiKeyScopeTaskDelete,
|
||||
ApiKeyScopeTask:
|
||||
return true
|
||||
}
|
||||
return false
|
||||
@@ -506,6 +596,51 @@ func AllAPIKeyScopeValues() []APIKeyScope {
|
||||
ApiKeyScopeCoderTemplatesbuild,
|
||||
ApiKeyScopeCoderTemplatesauthor,
|
||||
ApiKeyScopeCoderApikeysmanageSelf,
|
||||
ApiKeyScopeAibridgeInterception,
|
||||
ApiKeyScopeApiKey,
|
||||
ApiKeyScopeAssignOrgRole,
|
||||
ApiKeyScopeAssignRole,
|
||||
ApiKeyScopeAuditLog,
|
||||
ApiKeyScopeConnectionLog,
|
||||
ApiKeyScopeCryptoKey,
|
||||
ApiKeyScopeDebugInfo,
|
||||
ApiKeyScopeDeploymentConfig,
|
||||
ApiKeyScopeDeploymentStats,
|
||||
ApiKeyScopeFile,
|
||||
ApiKeyScopeGroup,
|
||||
ApiKeyScopeGroupMember,
|
||||
ApiKeyScopeIdpsyncSettings,
|
||||
ApiKeyScopeInboxNotification,
|
||||
ApiKeyScopeLicense,
|
||||
ApiKeyScopeNotificationMessage,
|
||||
ApiKeyScopeNotificationPreference,
|
||||
ApiKeyScopeNotificationTemplate,
|
||||
ApiKeyScopeOauth2App,
|
||||
ApiKeyScopeOauth2AppCodeToken,
|
||||
ApiKeyScopeOauth2AppSecret,
|
||||
ApiKeyScopeOrganization,
|
||||
ApiKeyScopeOrganizationMember,
|
||||
ApiKeyScopePrebuiltWorkspace,
|
||||
ApiKeyScopeProvisionerDaemon,
|
||||
ApiKeyScopeProvisionerJobs,
|
||||
ApiKeyScopeReplicas,
|
||||
ApiKeyScopeSystem,
|
||||
ApiKeyScopeTailnetCoordinator,
|
||||
ApiKeyScopeTemplate,
|
||||
ApiKeyScopeUsageEvent,
|
||||
ApiKeyScopeUser,
|
||||
ApiKeyScopeUserSecret,
|
||||
ApiKeyScopeWebpushSubscription,
|
||||
ApiKeyScopeWorkspace,
|
||||
ApiKeyScopeWorkspaceAgentDevcontainers,
|
||||
ApiKeyScopeWorkspaceAgentResourceMonitor,
|
||||
ApiKeyScopeWorkspaceDormant,
|
||||
ApiKeyScopeWorkspaceProxy,
|
||||
ApiKeyScopeTaskCreate,
|
||||
ApiKeyScopeTaskRead,
|
||||
ApiKeyScopeTaskUpdate,
|
||||
ApiKeyScopeTaskDelete,
|
||||
ApiKeyScopeTask,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2535,6 +2670,7 @@ const (
|
||||
ResourceTypeWorkspaceAgent ResourceType = "workspace_agent"
|
||||
ResourceTypeWorkspaceApp ResourceType = "workspace_app"
|
||||
ResourceTypePrebuildsSettings ResourceType = "prebuilds_settings"
|
||||
ResourceTypeTask ResourceType = "task"
|
||||
)
|
||||
|
||||
func (e *ResourceType) Scan(src interface{}) error {
|
||||
@@ -2598,7 +2734,8 @@ func (e ResourceType) Valid() bool {
|
||||
ResourceTypeIdpSyncSettingsRole,
|
||||
ResourceTypeWorkspaceAgent,
|
||||
ResourceTypeWorkspaceApp,
|
||||
ResourceTypePrebuildsSettings:
|
||||
ResourceTypePrebuildsSettings,
|
||||
ResourceTypeTask:
|
||||
return true
|
||||
}
|
||||
return false
|
||||
@@ -2631,6 +2768,7 @@ func AllResourceTypeValues() []ResourceType {
|
||||
ResourceTypeWorkspaceAgent,
|
||||
ResourceTypeWorkspaceApp,
|
||||
ResourceTypePrebuildsSettings,
|
||||
ResourceTypeTask,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2750,6 +2888,76 @@ func AllTailnetStatusValues() []TailnetStatus {
|
||||
}
|
||||
}
|
||||
|
||||
type TaskStatus string
|
||||
|
||||
const (
|
||||
TaskStatusPending TaskStatus = "pending"
|
||||
TaskStatusInitializing TaskStatus = "initializing"
|
||||
TaskStatusActive TaskStatus = "active"
|
||||
TaskStatusPaused TaskStatus = "paused"
|
||||
TaskStatusUnknown TaskStatus = "unknown"
|
||||
TaskStatusError TaskStatus = "error"
|
||||
)
|
||||
|
||||
func (e *TaskStatus) Scan(src interface{}) error {
|
||||
switch s := src.(type) {
|
||||
case []byte:
|
||||
*e = TaskStatus(s)
|
||||
case string:
|
||||
*e = TaskStatus(s)
|
||||
default:
|
||||
return fmt.Errorf("unsupported scan type for TaskStatus: %T", src)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
type NullTaskStatus struct {
|
||||
TaskStatus TaskStatus `json:"task_status"`
|
||||
Valid bool `json:"valid"` // Valid is true if TaskStatus is not NULL
|
||||
}
|
||||
|
||||
// Scan implements the Scanner interface.
|
||||
func (ns *NullTaskStatus) Scan(value interface{}) error {
|
||||
if value == nil {
|
||||
ns.TaskStatus, ns.Valid = "", false
|
||||
return nil
|
||||
}
|
||||
ns.Valid = true
|
||||
return ns.TaskStatus.Scan(value)
|
||||
}
|
||||
|
||||
// Value implements the driver Valuer interface.
|
||||
func (ns NullTaskStatus) Value() (driver.Value, error) {
|
||||
if !ns.Valid {
|
||||
return nil, nil
|
||||
}
|
||||
return string(ns.TaskStatus), nil
|
||||
}
|
||||
|
||||
func (e TaskStatus) Valid() bool {
|
||||
switch e {
|
||||
case TaskStatusPending,
|
||||
TaskStatusInitializing,
|
||||
TaskStatusActive,
|
||||
TaskStatusPaused,
|
||||
TaskStatusUnknown,
|
||||
TaskStatusError:
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func AllTaskStatusValues() []TaskStatus {
|
||||
return []TaskStatus{
|
||||
TaskStatusPending,
|
||||
TaskStatusInitializing,
|
||||
TaskStatusActive,
|
||||
TaskStatusPaused,
|
||||
TaskStatusUnknown,
|
||||
TaskStatusError,
|
||||
}
|
||||
}
|
||||
|
||||
// Defines the users status: active, dormant, or suspended.
|
||||
type UserStatus string
|
||||
|
||||
@@ -3992,6 +4200,23 @@ type TailnetTunnel struct {
|
||||
}
|
||||
|
||||
type Task struct {
|
||||
ID uuid.UUID `db:"id" json:"id"`
|
||||
OrganizationID uuid.UUID `db:"organization_id" json:"organization_id"`
|
||||
OwnerID uuid.UUID `db:"owner_id" json:"owner_id"`
|
||||
Name string `db:"name" json:"name"`
|
||||
WorkspaceID uuid.NullUUID `db:"workspace_id" json:"workspace_id"`
|
||||
TemplateVersionID uuid.UUID `db:"template_version_id" json:"template_version_id"`
|
||||
TemplateParameters json.RawMessage `db:"template_parameters" json:"template_parameters"`
|
||||
Prompt string `db:"prompt" json:"prompt"`
|
||||
CreatedAt time.Time `db:"created_at" json:"created_at"`
|
||||
DeletedAt sql.NullTime `db:"deleted_at" json:"deleted_at"`
|
||||
Status TaskStatus `db:"status" json:"status"`
|
||||
WorkspaceBuildNumber sql.NullInt32 `db:"workspace_build_number" json:"workspace_build_number"`
|
||||
WorkspaceAgentID uuid.NullUUID `db:"workspace_agent_id" json:"workspace_agent_id"`
|
||||
WorkspaceAppID uuid.NullUUID `db:"workspace_app_id" json:"workspace_app_id"`
|
||||
}
|
||||
|
||||
type TaskTable struct {
|
||||
ID uuid.UUID `db:"id" json:"id"`
|
||||
OrganizationID uuid.UUID `db:"organization_id" json:"organization_id"`
|
||||
OwnerID uuid.UUID `db:"owner_id" json:"owner_id"`
|
||||
@@ -4005,10 +4230,10 @@ type Task struct {
|
||||
}
|
||||
|
||||
type TaskWorkspaceApp struct {
|
||||
TaskID uuid.UUID `db:"task_id" json:"task_id"`
|
||||
WorkspaceBuildID uuid.UUID `db:"workspace_build_id" json:"workspace_build_id"`
|
||||
WorkspaceAgentID uuid.UUID `db:"workspace_agent_id" json:"workspace_agent_id"`
|
||||
WorkspaceAppID uuid.UUID `db:"workspace_app_id" json:"workspace_app_id"`
|
||||
TaskID uuid.UUID `db:"task_id" json:"task_id"`
|
||||
WorkspaceAgentID uuid.NullUUID `db:"workspace_agent_id" json:"workspace_agent_id"`
|
||||
WorkspaceAppID uuid.NullUUID `db:"workspace_app_id" json:"workspace_app_id"`
|
||||
WorkspaceBuildNumber int32 `db:"workspace_build_number" json:"workspace_build_number"`
|
||||
}
|
||||
|
||||
type TelemetryItem struct {
|
||||
|
||||
@@ -331,6 +331,8 @@ type sqlcQuerier interface {
|
||||
GetTailnetPeers(ctx context.Context, id uuid.UUID) ([]TailnetPeer, error)
|
||||
GetTailnetTunnelPeerBindings(ctx context.Context, srcID uuid.UUID) ([]GetTailnetTunnelPeerBindingsRow, error)
|
||||
GetTailnetTunnelPeerIDs(ctx context.Context, srcID uuid.UUID) ([]GetTailnetTunnelPeerIDsRow, error)
|
||||
GetTaskByID(ctx context.Context, id uuid.UUID) (Task, error)
|
||||
GetTaskByWorkspaceID(ctx context.Context, workspaceID uuid.UUID) (Task, error)
|
||||
GetTelemetryItem(ctx context.Context, key string) (TelemetryItem, error)
|
||||
GetTelemetryItems(ctx context.Context) ([]TelemetryItem, error)
|
||||
// GetTemplateAppInsights returns the aggregate usage of each app in a given
|
||||
@@ -550,6 +552,7 @@ type sqlcQuerier interface {
|
||||
InsertProvisionerJobTimings(ctx context.Context, arg InsertProvisionerJobTimingsParams) ([]ProvisionerJobTiming, error)
|
||||
InsertProvisionerKey(ctx context.Context, arg InsertProvisionerKeyParams) (ProvisionerKey, error)
|
||||
InsertReplica(ctx context.Context, arg InsertReplicaParams) (Replica, error)
|
||||
InsertTask(ctx context.Context, arg InsertTaskParams) (TaskTable, error)
|
||||
InsertTelemetryItemIfNotExists(ctx context.Context, arg InsertTelemetryItemIfNotExistsParams) error
|
||||
InsertTemplate(ctx context.Context, arg InsertTemplateParams) error
|
||||
InsertTemplateVersion(ctx context.Context, arg InsertTemplateVersionParams) error
|
||||
@@ -592,6 +595,7 @@ type sqlcQuerier interface {
|
||||
ListAIBridgeUserPromptsByInterceptionIDs(ctx context.Context, interceptionIds []uuid.UUID) ([]AIBridgeUserPrompt, error)
|
||||
ListProvisionerKeysByOrganization(ctx context.Context, organizationID uuid.UUID) ([]ProvisionerKey, error)
|
||||
ListProvisionerKeysByOrganizationExcludeReserved(ctx context.Context, organizationID uuid.UUID) ([]ProvisionerKey, error)
|
||||
ListTasks(ctx context.Context, arg ListTasksParams) ([]Task, error)
|
||||
ListUserSecrets(ctx context.Context, userID uuid.UUID) ([]UserSecret, error)
|
||||
ListWorkspaceAgentPortShares(ctx context.Context, workspaceID uuid.UUID) ([]WorkspaceAgentPortShare, error)
|
||||
MarkAllInboxNotificationsAsRead(ctx context.Context, arg MarkAllInboxNotificationsAsReadParams) error
|
||||
@@ -729,6 +733,7 @@ type sqlcQuerier interface {
|
||||
UpsertTailnetCoordinator(ctx context.Context, id uuid.UUID) (TailnetCoordinator, error)
|
||||
UpsertTailnetPeer(ctx context.Context, arg UpsertTailnetPeerParams) (TailnetPeer, error)
|
||||
UpsertTailnetTunnel(ctx context.Context, arg UpsertTailnetTunnelParams) (TailnetTunnel, error)
|
||||
UpsertTaskWorkspaceApp(ctx context.Context, arg UpsertTaskWorkspaceAppParams) (TaskWorkspaceApp, error)
|
||||
UpsertTelemetryItem(ctx context.Context, arg UpsertTelemetryItemParams) error
|
||||
// This query aggregates the workspace_agent_stats and workspace_app_stats data
|
||||
// into a single table for efficient storage and querying. Half-hour buckets are
|
||||
|
||||
@@ -6653,6 +6653,649 @@ func TestGetLatestWorkspaceBuildsByWorkspaceIDs(t *testing.T) {
|
||||
}
|
||||
}
|
||||
|
||||
func TestTasksWithStatusView(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
createProvisionerJob := func(t *testing.T, db database.Store, org database.Organization, user database.User, buildStatus database.ProvisionerJobStatus) database.ProvisionerJob {
|
||||
t.Helper()
|
||||
|
||||
var jobParams database.ProvisionerJob
|
||||
|
||||
switch buildStatus {
|
||||
case database.ProvisionerJobStatusPending:
|
||||
jobParams = database.ProvisionerJob{
|
||||
OrganizationID: org.ID,
|
||||
Type: database.ProvisionerJobTypeWorkspaceBuild,
|
||||
InitiatorID: user.ID,
|
||||
}
|
||||
case database.ProvisionerJobStatusRunning:
|
||||
jobParams = database.ProvisionerJob{
|
||||
OrganizationID: org.ID,
|
||||
Type: database.ProvisionerJobTypeWorkspaceBuild,
|
||||
InitiatorID: user.ID,
|
||||
StartedAt: sql.NullTime{Valid: true, Time: dbtime.Now()},
|
||||
}
|
||||
case database.ProvisionerJobStatusFailed:
|
||||
jobParams = database.ProvisionerJob{
|
||||
OrganizationID: org.ID,
|
||||
Type: database.ProvisionerJobTypeWorkspaceBuild,
|
||||
InitiatorID: user.ID,
|
||||
StartedAt: sql.NullTime{Valid: true, Time: dbtime.Now()},
|
||||
CompletedAt: sql.NullTime{Valid: true, Time: dbtime.Now()},
|
||||
Error: sql.NullString{Valid: true, String: "job failed"},
|
||||
}
|
||||
case database.ProvisionerJobStatusSucceeded:
|
||||
jobParams = database.ProvisionerJob{
|
||||
OrganizationID: org.ID,
|
||||
Type: database.ProvisionerJobTypeWorkspaceBuild,
|
||||
InitiatorID: user.ID,
|
||||
StartedAt: sql.NullTime{Valid: true, Time: dbtime.Now()},
|
||||
CompletedAt: sql.NullTime{Valid: true, Time: dbtime.Now()},
|
||||
}
|
||||
default:
|
||||
t.Errorf("invalid build status: %v", buildStatus)
|
||||
}
|
||||
|
||||
return dbgen.ProvisionerJob(t, db, nil, jobParams)
|
||||
}
|
||||
|
||||
createTask := func(
|
||||
ctx context.Context,
|
||||
t *testing.T,
|
||||
db database.Store,
|
||||
org database.Organization,
|
||||
user database.User,
|
||||
buildStatus database.ProvisionerJobStatus,
|
||||
buildTransition database.WorkspaceTransition,
|
||||
agentState database.WorkspaceAgentLifecycleState,
|
||||
appHealths []database.WorkspaceAppHealth,
|
||||
) database.TaskTable {
|
||||
t.Helper()
|
||||
|
||||
template := dbgen.Template(t, db, database.Template{
|
||||
OrganizationID: org.ID,
|
||||
CreatedBy: user.ID,
|
||||
})
|
||||
templateVersion := dbgen.TemplateVersion(t, db, database.TemplateVersion{
|
||||
TemplateID: uuid.NullUUID{UUID: template.ID, Valid: true},
|
||||
OrganizationID: org.ID,
|
||||
CreatedBy: user.ID,
|
||||
})
|
||||
|
||||
if buildStatus == "" {
|
||||
return dbgen.Task(t, db, database.TaskTable{
|
||||
OrganizationID: org.ID,
|
||||
OwnerID: user.ID,
|
||||
Name: "test-task",
|
||||
TemplateVersionID: templateVersion.ID,
|
||||
Prompt: "Test prompt",
|
||||
})
|
||||
}
|
||||
|
||||
job := createProvisionerJob(t, db, org, user, buildStatus)
|
||||
|
||||
workspace := dbgen.Workspace(t, db, database.WorkspaceTable{
|
||||
OrganizationID: org.ID,
|
||||
TemplateID: template.ID,
|
||||
OwnerID: user.ID,
|
||||
})
|
||||
workspaceID := uuid.NullUUID{Valid: true, UUID: workspace.ID}
|
||||
|
||||
task := dbgen.Task(t, db, database.TaskTable{
|
||||
OrganizationID: org.ID,
|
||||
OwnerID: user.ID,
|
||||
Name: "test-task",
|
||||
WorkspaceID: workspaceID,
|
||||
TemplateVersionID: templateVersion.ID,
|
||||
Prompt: "Test prompt",
|
||||
})
|
||||
|
||||
workspaceBuild := dbgen.WorkspaceBuild(t, db, database.WorkspaceBuild{
|
||||
WorkspaceID: workspace.ID,
|
||||
TemplateVersionID: templateVersion.ID,
|
||||
BuildNumber: 1,
|
||||
Transition: buildTransition,
|
||||
InitiatorID: user.ID,
|
||||
JobID: job.ID,
|
||||
})
|
||||
workspaceBuildNumber := workspaceBuild.BuildNumber
|
||||
|
||||
_, err := db.UpsertTaskWorkspaceApp(ctx, database.UpsertTaskWorkspaceAppParams{
|
||||
TaskID: task.ID,
|
||||
WorkspaceBuildNumber: workspaceBuildNumber,
|
||||
})
|
||||
require.NoError(t, err)
|
||||
|
||||
resource := dbgen.WorkspaceResource(t, db, database.WorkspaceResource{
|
||||
JobID: job.ID,
|
||||
})
|
||||
|
||||
if agentState != "" {
|
||||
agent := dbgen.WorkspaceAgent(t, db, database.WorkspaceAgent{
|
||||
ResourceID: resource.ID,
|
||||
})
|
||||
workspaceAgentID := agent.ID
|
||||
|
||||
_, err := db.UpsertTaskWorkspaceApp(ctx, database.UpsertTaskWorkspaceAppParams{
|
||||
TaskID: task.ID,
|
||||
WorkspaceBuildNumber: workspaceBuildNumber,
|
||||
WorkspaceAgentID: uuid.NullUUID{UUID: workspaceAgentID, Valid: true},
|
||||
})
|
||||
require.NoError(t, err)
|
||||
|
||||
err = db.UpdateWorkspaceAgentLifecycleStateByID(ctx, database.UpdateWorkspaceAgentLifecycleStateByIDParams{
|
||||
ID: agent.ID,
|
||||
LifecycleState: agentState,
|
||||
})
|
||||
require.NoError(t, err)
|
||||
|
||||
for i, health := range appHealths {
|
||||
app := dbgen.WorkspaceApp(t, db, database.WorkspaceApp{
|
||||
AgentID: workspaceAgentID,
|
||||
Slug: fmt.Sprintf("test-app-%d", i),
|
||||
DisplayName: fmt.Sprintf("Test App %d", i+1),
|
||||
Health: health,
|
||||
})
|
||||
if i == 0 {
|
||||
// Assume the first app is the tasks app.
|
||||
_, err := db.UpsertTaskWorkspaceApp(ctx, database.UpsertTaskWorkspaceAppParams{
|
||||
TaskID: task.ID,
|
||||
WorkspaceBuildNumber: workspaceBuildNumber,
|
||||
WorkspaceAgentID: uuid.NullUUID{UUID: workspaceAgentID, Valid: true},
|
||||
WorkspaceAppID: uuid.NullUUID{UUID: app.ID, Valid: true},
|
||||
})
|
||||
require.NoError(t, err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return task
|
||||
}
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
buildStatus database.ProvisionerJobStatus
|
||||
buildTransition database.WorkspaceTransition
|
||||
agentState database.WorkspaceAgentLifecycleState
|
||||
appHealths []database.WorkspaceAppHealth
|
||||
expectedStatus database.TaskStatus
|
||||
description string
|
||||
expectBuildNumberValid bool
|
||||
expectBuildNumber int32
|
||||
expectWorkspaceAgentValid bool
|
||||
expectWorkspaceAppValid bool
|
||||
}{
|
||||
{
|
||||
name: "NoWorkspace",
|
||||
expectedStatus: "pending",
|
||||
description: "Task with no workspace assigned",
|
||||
expectBuildNumberValid: false,
|
||||
expectWorkspaceAgentValid: false,
|
||||
expectWorkspaceAppValid: false,
|
||||
},
|
||||
{
|
||||
name: "FailedBuild",
|
||||
buildStatus: database.ProvisionerJobStatusFailed,
|
||||
buildTransition: database.WorkspaceTransitionStart,
|
||||
expectedStatus: database.TaskStatusError,
|
||||
description: "Latest workspace build failed",
|
||||
expectBuildNumberValid: true,
|
||||
expectBuildNumber: 1,
|
||||
expectWorkspaceAgentValid: false,
|
||||
expectWorkspaceAppValid: false,
|
||||
},
|
||||
{
|
||||
name: "StoppedWorkspace",
|
||||
buildStatus: database.ProvisionerJobStatusSucceeded,
|
||||
buildTransition: database.WorkspaceTransitionStop,
|
||||
expectedStatus: database.TaskStatusPaused,
|
||||
description: "Workspace is stopped",
|
||||
expectBuildNumberValid: true,
|
||||
expectBuildNumber: 1,
|
||||
expectWorkspaceAgentValid: false,
|
||||
expectWorkspaceAppValid: false,
|
||||
},
|
||||
{
|
||||
name: "DeletedWorkspace",
|
||||
buildStatus: database.ProvisionerJobStatusSucceeded,
|
||||
buildTransition: database.WorkspaceTransitionDelete,
|
||||
expectedStatus: database.TaskStatusPaused,
|
||||
description: "Workspace is deleted",
|
||||
expectBuildNumberValid: true,
|
||||
expectBuildNumber: 1,
|
||||
expectWorkspaceAgentValid: false,
|
||||
expectWorkspaceAppValid: false,
|
||||
},
|
||||
{
|
||||
name: "PendingStart",
|
||||
buildStatus: database.ProvisionerJobStatusPending,
|
||||
buildTransition: database.WorkspaceTransitionStart,
|
||||
expectedStatus: database.TaskStatusInitializing,
|
||||
description: "Workspace build is starting (pending)",
|
||||
expectBuildNumberValid: true,
|
||||
expectBuildNumber: 1,
|
||||
expectWorkspaceAgentValid: false,
|
||||
expectWorkspaceAppValid: false,
|
||||
},
|
||||
{
|
||||
name: "RunningStart",
|
||||
buildStatus: database.ProvisionerJobStatusRunning,
|
||||
buildTransition: database.WorkspaceTransitionStart,
|
||||
expectedStatus: database.TaskStatusInitializing,
|
||||
description: "Workspace build is starting (running)",
|
||||
expectBuildNumberValid: true,
|
||||
expectBuildNumber: 1,
|
||||
expectWorkspaceAgentValid: false,
|
||||
expectWorkspaceAppValid: false,
|
||||
},
|
||||
{
|
||||
name: "StartingAgent",
|
||||
buildStatus: database.ProvisionerJobStatusSucceeded,
|
||||
buildTransition: database.WorkspaceTransitionStart,
|
||||
agentState: database.WorkspaceAgentLifecycleStateStarting,
|
||||
appHealths: []database.WorkspaceAppHealth{database.WorkspaceAppHealthInitializing},
|
||||
expectedStatus: database.TaskStatusInitializing,
|
||||
description: "Workspace is running but agent is starting",
|
||||
expectBuildNumberValid: true,
|
||||
expectBuildNumber: 1,
|
||||
expectWorkspaceAgentValid: true,
|
||||
expectWorkspaceAppValid: true,
|
||||
},
|
||||
{
|
||||
name: "CreatedAgent",
|
||||
buildStatus: database.ProvisionerJobStatusSucceeded,
|
||||
buildTransition: database.WorkspaceTransitionStart,
|
||||
agentState: database.WorkspaceAgentLifecycleStateCreated,
|
||||
appHealths: []database.WorkspaceAppHealth{database.WorkspaceAppHealthInitializing},
|
||||
expectedStatus: database.TaskStatusInitializing,
|
||||
description: "Workspace is running but agent is created",
|
||||
expectBuildNumberValid: true,
|
||||
expectBuildNumber: 1,
|
||||
expectWorkspaceAgentValid: true,
|
||||
expectWorkspaceAppValid: true,
|
||||
},
|
||||
{
|
||||
name: "ReadyAgentInitializingApp",
|
||||
buildStatus: database.ProvisionerJobStatusSucceeded,
|
||||
buildTransition: database.WorkspaceTransitionStart,
|
||||
agentState: database.WorkspaceAgentLifecycleStateReady,
|
||||
appHealths: []database.WorkspaceAppHealth{database.WorkspaceAppHealthInitializing},
|
||||
expectedStatus: database.TaskStatusInitializing,
|
||||
description: "Agent is ready but app is initializing",
|
||||
expectBuildNumberValid: true,
|
||||
expectBuildNumber: 1,
|
||||
expectWorkspaceAgentValid: true,
|
||||
expectWorkspaceAppValid: true,
|
||||
},
|
||||
{
|
||||
name: "ReadyAgentHealthyApp",
|
||||
buildStatus: database.ProvisionerJobStatusSucceeded,
|
||||
buildTransition: database.WorkspaceTransitionStart,
|
||||
agentState: database.WorkspaceAgentLifecycleStateReady,
|
||||
appHealths: []database.WorkspaceAppHealth{database.WorkspaceAppHealthHealthy},
|
||||
expectedStatus: database.TaskStatusActive,
|
||||
description: "Agent is ready and app is healthy",
|
||||
expectBuildNumberValid: true,
|
||||
expectBuildNumber: 1,
|
||||
expectWorkspaceAgentValid: true,
|
||||
expectWorkspaceAppValid: true,
|
||||
},
|
||||
{
|
||||
name: "ReadyAgentDisabledApp",
|
||||
buildStatus: database.ProvisionerJobStatusSucceeded,
|
||||
buildTransition: database.WorkspaceTransitionStart,
|
||||
agentState: database.WorkspaceAgentLifecycleStateReady,
|
||||
appHealths: []database.WorkspaceAppHealth{database.WorkspaceAppHealthDisabled},
|
||||
expectedStatus: database.TaskStatusActive,
|
||||
description: "Agent is ready and app health checking is disabled",
|
||||
expectBuildNumberValid: true,
|
||||
expectBuildNumber: 1,
|
||||
expectWorkspaceAgentValid: true,
|
||||
expectWorkspaceAppValid: true,
|
||||
},
|
||||
{
|
||||
name: "ReadyAgentUnhealthyApp",
|
||||
buildStatus: database.ProvisionerJobStatusSucceeded,
|
||||
buildTransition: database.WorkspaceTransitionStart,
|
||||
agentState: database.WorkspaceAgentLifecycleStateReady,
|
||||
appHealths: []database.WorkspaceAppHealth{database.WorkspaceAppHealthUnhealthy},
|
||||
expectedStatus: database.TaskStatusError,
|
||||
description: "Agent is ready but app is unhealthy",
|
||||
expectBuildNumberValid: true,
|
||||
expectBuildNumber: 1,
|
||||
expectWorkspaceAgentValid: true,
|
||||
expectWorkspaceAppValid: true,
|
||||
},
|
||||
{
|
||||
name: "AgentStartTimeout",
|
||||
buildStatus: database.ProvisionerJobStatusSucceeded,
|
||||
buildTransition: database.WorkspaceTransitionStart,
|
||||
agentState: database.WorkspaceAgentLifecycleStateStartTimeout,
|
||||
expectedStatus: database.TaskStatusUnknown,
|
||||
description: "Agent start timed out",
|
||||
expectBuildNumberValid: true,
|
||||
expectBuildNumber: 1,
|
||||
expectWorkspaceAgentValid: true,
|
||||
expectWorkspaceAppValid: false,
|
||||
},
|
||||
{
|
||||
name: "AgentStartError",
|
||||
buildStatus: database.ProvisionerJobStatusSucceeded,
|
||||
buildTransition: database.WorkspaceTransitionStart,
|
||||
agentState: database.WorkspaceAgentLifecycleStateStartError,
|
||||
expectedStatus: database.TaskStatusUnknown,
|
||||
description: "Agent failed to start",
|
||||
expectBuildNumberValid: true,
|
||||
expectBuildNumber: 1,
|
||||
expectWorkspaceAgentValid: true,
|
||||
expectWorkspaceAppValid: false,
|
||||
},
|
||||
{
|
||||
name: "AgentShuttingDown",
|
||||
buildStatus: database.ProvisionerJobStatusSucceeded,
|
||||
buildTransition: database.WorkspaceTransitionStart,
|
||||
agentState: database.WorkspaceAgentLifecycleStateShuttingDown,
|
||||
expectedStatus: database.TaskStatusUnknown,
|
||||
description: "Agent is shutting down",
|
||||
expectBuildNumberValid: true,
|
||||
expectBuildNumber: 1,
|
||||
expectWorkspaceAgentValid: true,
|
||||
expectWorkspaceAppValid: false,
|
||||
},
|
||||
{
|
||||
name: "AgentOff",
|
||||
buildStatus: database.ProvisionerJobStatusSucceeded,
|
||||
buildTransition: database.WorkspaceTransitionStart,
|
||||
agentState: database.WorkspaceAgentLifecycleStateOff,
|
||||
expectedStatus: database.TaskStatusUnknown,
|
||||
description: "Agent is off",
|
||||
expectBuildNumberValid: true,
|
||||
expectBuildNumber: 1,
|
||||
expectWorkspaceAgentValid: true,
|
||||
expectWorkspaceAppValid: false,
|
||||
},
|
||||
{
|
||||
name: "RunningJobReadyAgentHealthyApp",
|
||||
buildStatus: database.ProvisionerJobStatusRunning,
|
||||
buildTransition: database.WorkspaceTransitionStart,
|
||||
agentState: database.WorkspaceAgentLifecycleStateReady,
|
||||
appHealths: []database.WorkspaceAppHealth{database.WorkspaceAppHealthHealthy},
|
||||
expectedStatus: database.TaskStatusActive,
|
||||
description: "Running job with ready agent and healthy app should be active",
|
||||
expectBuildNumberValid: true,
|
||||
expectBuildNumber: 1,
|
||||
expectWorkspaceAgentValid: true,
|
||||
expectWorkspaceAppValid: true,
|
||||
},
|
||||
{
|
||||
name: "RunningJobReadyAgentInitializingApp",
|
||||
buildStatus: database.ProvisionerJobStatusRunning,
|
||||
buildTransition: database.WorkspaceTransitionStart,
|
||||
agentState: database.WorkspaceAgentLifecycleStateReady,
|
||||
appHealths: []database.WorkspaceAppHealth{database.WorkspaceAppHealthInitializing},
|
||||
expectedStatus: database.TaskStatusInitializing,
|
||||
description: "Running job with ready agent but initializing app should be initializing",
|
||||
expectBuildNumberValid: true,
|
||||
expectBuildNumber: 1,
|
||||
expectWorkspaceAgentValid: true,
|
||||
expectWorkspaceAppValid: true,
|
||||
},
|
||||
{
|
||||
name: "RunningJobReadyAgentUnhealthyApp",
|
||||
buildStatus: database.ProvisionerJobStatusRunning,
|
||||
buildTransition: database.WorkspaceTransitionStart,
|
||||
agentState: database.WorkspaceAgentLifecycleStateReady,
|
||||
appHealths: []database.WorkspaceAppHealth{database.WorkspaceAppHealthUnhealthy},
|
||||
expectedStatus: database.TaskStatusError,
|
||||
description: "Running job with ready agent but unhealthy app should be error",
|
||||
expectBuildNumberValid: true,
|
||||
expectBuildNumber: 1,
|
||||
expectWorkspaceAgentValid: true,
|
||||
expectWorkspaceAppValid: true,
|
||||
},
|
||||
{
|
||||
name: "RunningJobConnectingAgent",
|
||||
buildStatus: database.ProvisionerJobStatusRunning,
|
||||
buildTransition: database.WorkspaceTransitionStart,
|
||||
agentState: database.WorkspaceAgentLifecycleStateStarting,
|
||||
appHealths: []database.WorkspaceAppHealth{database.WorkspaceAppHealthInitializing},
|
||||
expectedStatus: database.TaskStatusInitializing,
|
||||
description: "Running job with connecting agent should be initializing",
|
||||
expectBuildNumberValid: true,
|
||||
expectBuildNumber: 1,
|
||||
expectWorkspaceAgentValid: true,
|
||||
expectWorkspaceAppValid: true,
|
||||
},
|
||||
{
|
||||
name: "RunningJobReadyAgentDisabledApp",
|
||||
buildStatus: database.ProvisionerJobStatusRunning,
|
||||
buildTransition: database.WorkspaceTransitionStart,
|
||||
agentState: database.WorkspaceAgentLifecycleStateReady,
|
||||
appHealths: []database.WorkspaceAppHealth{database.WorkspaceAppHealthDisabled},
|
||||
expectedStatus: database.TaskStatusActive,
|
||||
description: "Running job with ready agent and disabled app health checking should be active",
|
||||
expectBuildNumberValid: true,
|
||||
expectBuildNumber: 1,
|
||||
expectWorkspaceAgentValid: true,
|
||||
expectWorkspaceAppValid: true,
|
||||
},
|
||||
{
|
||||
name: "RunningJobReadyAgentHealthyTaskAppUnhealthyOtherAppIsOK",
|
||||
buildStatus: database.ProvisionerJobStatusRunning,
|
||||
buildTransition: database.WorkspaceTransitionStart,
|
||||
agentState: database.WorkspaceAgentLifecycleStateReady,
|
||||
appHealths: []database.WorkspaceAppHealth{database.WorkspaceAppHealthHealthy, database.WorkspaceAppHealthUnhealthy},
|
||||
expectedStatus: database.TaskStatusActive,
|
||||
description: "Running job with ready agent and multiple healthy apps should be active",
|
||||
expectBuildNumberValid: true,
|
||||
expectBuildNumber: 1,
|
||||
expectWorkspaceAgentValid: true,
|
||||
expectWorkspaceAppValid: true,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
db, _ := dbtestutil.NewDB(t)
|
||||
ctx := testutil.Context(t, testutil.WaitLong)
|
||||
|
||||
org := dbgen.Organization(t, db, database.Organization{})
|
||||
user := dbgen.User(t, db, database.User{})
|
||||
|
||||
task := createTask(ctx, t, db, org, user, tt.buildStatus, tt.buildTransition, tt.agentState, tt.appHealths)
|
||||
|
||||
got, err := db.GetTaskByID(ctx, task.ID)
|
||||
require.NoError(t, err)
|
||||
|
||||
require.Equal(t, tt.expectedStatus, got.Status)
|
||||
|
||||
require.Equal(t, tt.expectBuildNumberValid, got.WorkspaceBuildNumber.Valid)
|
||||
if tt.expectBuildNumberValid {
|
||||
require.Equal(t, tt.expectBuildNumber, got.WorkspaceBuildNumber.Int32)
|
||||
}
|
||||
|
||||
require.Equal(t, tt.expectWorkspaceAgentValid, got.WorkspaceAgentID.Valid)
|
||||
if tt.expectWorkspaceAgentValid {
|
||||
require.NotEqual(t, uuid.Nil, got.WorkspaceAgentID.UUID)
|
||||
}
|
||||
|
||||
require.Equal(t, tt.expectWorkspaceAppValid, got.WorkspaceAppID.Valid)
|
||||
if tt.expectWorkspaceAppValid {
|
||||
require.NotEqual(t, uuid.Nil, got.WorkspaceAppID.UUID)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestGetTaskByWorkspaceID(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
setupTask func(t *testing.T, db database.Store, org database.Organization, user database.User, templateVersion database.TemplateVersion, workspace database.WorkspaceTable)
|
||||
wantErr bool
|
||||
}{
|
||||
{
|
||||
name: "task doesn't exist",
|
||||
wantErr: true,
|
||||
},
|
||||
{
|
||||
name: "task with no workspace id",
|
||||
setupTask: func(t *testing.T, db database.Store, org database.Organization, user database.User, templateVersion database.TemplateVersion, workspace database.WorkspaceTable) {
|
||||
dbgen.Task(t, db, database.TaskTable{
|
||||
OrganizationID: org.ID,
|
||||
OwnerID: user.ID,
|
||||
Name: "test-task",
|
||||
TemplateVersionID: templateVersion.ID,
|
||||
Prompt: "Test prompt",
|
||||
})
|
||||
},
|
||||
wantErr: true,
|
||||
},
|
||||
{
|
||||
name: "task with workspace id",
|
||||
setupTask: func(t *testing.T, db database.Store, org database.Organization, user database.User, templateVersion database.TemplateVersion, workspace database.WorkspaceTable) {
|
||||
workspaceID := uuid.NullUUID{Valid: true, UUID: workspace.ID}
|
||||
dbgen.Task(t, db, database.TaskTable{
|
||||
OrganizationID: org.ID,
|
||||
OwnerID: user.ID,
|
||||
Name: "test-task",
|
||||
WorkspaceID: workspaceID,
|
||||
TemplateVersionID: templateVersion.ID,
|
||||
Prompt: "Test prompt",
|
||||
})
|
||||
},
|
||||
wantErr: false,
|
||||
},
|
||||
}
|
||||
|
||||
db, _ := dbtestutil.NewDB(t)
|
||||
|
||||
for _, tt := range tests {
|
||||
tt := tt
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
org := dbgen.Organization(t, db, database.Organization{})
|
||||
user := dbgen.User(t, db, database.User{})
|
||||
template := dbgen.Template(t, db, database.Template{
|
||||
OrganizationID: org.ID,
|
||||
CreatedBy: user.ID,
|
||||
})
|
||||
templateVersion := dbgen.TemplateVersion(t, db, database.TemplateVersion{
|
||||
OrganizationID: org.ID,
|
||||
TemplateID: uuid.NullUUID{Valid: true, UUID: template.ID},
|
||||
CreatedBy: user.ID,
|
||||
})
|
||||
workspace := dbgen.Workspace(t, db, database.WorkspaceTable{
|
||||
OrganizationID: org.ID,
|
||||
OwnerID: user.ID,
|
||||
TemplateID: template.ID,
|
||||
})
|
||||
|
||||
if tt.setupTask != nil {
|
||||
tt.setupTask(t, db, org, user, templateVersion, workspace)
|
||||
}
|
||||
|
||||
ctx := testutil.Context(t, testutil.WaitLong)
|
||||
|
||||
task, err := db.GetTaskByWorkspaceID(ctx, workspace.ID)
|
||||
if tt.wantErr {
|
||||
require.Error(t, err)
|
||||
} else {
|
||||
require.NoError(t, err)
|
||||
require.False(t, task.WorkspaceBuildNumber.Valid)
|
||||
require.False(t, task.WorkspaceAgentID.Valid)
|
||||
require.False(t, task.WorkspaceAppID.Valid)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestTaskNameUniqueness(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
db, _ := dbtestutil.NewDB(t)
|
||||
|
||||
org := dbgen.Organization(t, db, database.Organization{})
|
||||
user1 := dbgen.User(t, db, database.User{})
|
||||
user2 := dbgen.User(t, db, database.User{})
|
||||
template := dbgen.Template(t, db, database.Template{
|
||||
OrganizationID: org.ID,
|
||||
CreatedBy: user1.ID,
|
||||
})
|
||||
tv := dbgen.TemplateVersion(t, db, database.TemplateVersion{
|
||||
TemplateID: uuid.NullUUID{UUID: template.ID, Valid: true},
|
||||
OrganizationID: org.ID,
|
||||
CreatedBy: user1.ID,
|
||||
})
|
||||
|
||||
taskName := "my-task"
|
||||
|
||||
// Create initial task for user1.
|
||||
task1 := dbgen.Task(t, db, database.TaskTable{
|
||||
OrganizationID: org.ID,
|
||||
OwnerID: user1.ID,
|
||||
Name: taskName,
|
||||
TemplateVersionID: tv.ID,
|
||||
Prompt: "Test prompt",
|
||||
})
|
||||
require.NotEqual(t, uuid.Nil, task1.ID)
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
ownerID uuid.UUID
|
||||
taskName string
|
||||
wantErr bool
|
||||
}{
|
||||
{
|
||||
name: "duplicate task name same user",
|
||||
ownerID: user1.ID,
|
||||
taskName: taskName,
|
||||
wantErr: true,
|
||||
},
|
||||
{
|
||||
name: "duplicate task name different case same user",
|
||||
ownerID: user1.ID,
|
||||
taskName: "MY-TASK",
|
||||
wantErr: true,
|
||||
},
|
||||
{
|
||||
name: "same task name different user",
|
||||
ownerID: user2.ID,
|
||||
taskName: taskName,
|
||||
wantErr: false,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
ctx := testutil.Context(t, testutil.WaitShort)
|
||||
|
||||
task, err := db.InsertTask(ctx, database.InsertTaskParams{
|
||||
OrganizationID: org.ID,
|
||||
OwnerID: tt.ownerID,
|
||||
Name: tt.taskName,
|
||||
TemplateVersionID: tv.ID,
|
||||
TemplateParameters: json.RawMessage("{}"),
|
||||
Prompt: "Test prompt",
|
||||
CreatedAt: dbtime.Now(),
|
||||
})
|
||||
if tt.wantErr {
|
||||
require.Error(t, err)
|
||||
} else {
|
||||
require.NoError(t, err)
|
||||
require.NotEqual(t, uuid.Nil, task.ID)
|
||||
require.NotEqual(t, task1.ID, task.ID)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestUsageEventsTrigger(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
@@ -6780,3 +7423,148 @@ func TestUsageEventsTrigger(t *testing.T) {
|
||||
require.Len(t, rows, 0)
|
||||
})
|
||||
}
|
||||
|
||||
func TestListTasks(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
db, ps := dbtestutil.NewDB(t)
|
||||
|
||||
// Given: two organizations and two users, one of which is a member of both
|
||||
org1 := dbgen.Organization(t, db, database.Organization{})
|
||||
org2 := dbgen.Organization(t, db, database.Organization{})
|
||||
user1 := dbgen.User(t, db, database.User{})
|
||||
user2 := dbgen.User(t, db, database.User{})
|
||||
_ = dbgen.OrganizationMember(t, db, database.OrganizationMember{
|
||||
OrganizationID: org1.ID,
|
||||
UserID: user1.ID,
|
||||
})
|
||||
_ = dbgen.OrganizationMember(t, db, database.OrganizationMember{
|
||||
OrganizationID: org2.ID,
|
||||
UserID: user2.ID,
|
||||
})
|
||||
|
||||
// Given: a template with an active version
|
||||
tv := dbgen.TemplateVersion(t, db, database.TemplateVersion{
|
||||
CreatedBy: user1.ID,
|
||||
OrganizationID: org1.ID,
|
||||
})
|
||||
tpl := dbgen.Template(t, db, database.Template{
|
||||
CreatedBy: user1.ID,
|
||||
OrganizationID: org1.ID,
|
||||
ActiveVersionID: tv.ID,
|
||||
})
|
||||
|
||||
// Helper function to create a task
|
||||
createTask := func(orgID, ownerID uuid.UUID) database.TaskTable {
|
||||
ws := dbgen.Workspace(t, db, database.WorkspaceTable{
|
||||
OrganizationID: orgID,
|
||||
OwnerID: ownerID,
|
||||
TemplateID: tpl.ID,
|
||||
})
|
||||
pj := dbgen.ProvisionerJob(t, db, ps, database.ProvisionerJob{})
|
||||
sidebarAppID := uuid.New()
|
||||
wb := dbgen.WorkspaceBuild(t, db, database.WorkspaceBuild{
|
||||
JobID: pj.ID,
|
||||
TemplateVersionID: tv.ID,
|
||||
WorkspaceID: ws.ID,
|
||||
})
|
||||
wr := dbgen.WorkspaceResource(t, db, database.WorkspaceResource{
|
||||
JobID: pj.ID,
|
||||
})
|
||||
agt := dbgen.WorkspaceAgent(t, db, database.WorkspaceAgent{
|
||||
ResourceID: wr.ID,
|
||||
})
|
||||
wa := dbgen.WorkspaceApp(t, db, database.WorkspaceApp{
|
||||
ID: sidebarAppID,
|
||||
AgentID: agt.ID,
|
||||
})
|
||||
tsk := dbgen.Task(t, db, database.TaskTable{
|
||||
OrganizationID: orgID,
|
||||
OwnerID: ownerID,
|
||||
Prompt: testutil.GetRandomName(t),
|
||||
TemplateVersionID: tv.ID,
|
||||
WorkspaceID: uuid.NullUUID{UUID: ws.ID, Valid: true},
|
||||
})
|
||||
_ = dbgen.TaskWorkspaceApp(t, db, database.TaskWorkspaceApp{
|
||||
TaskID: tsk.ID,
|
||||
WorkspaceBuildNumber: wb.BuildNumber,
|
||||
WorkspaceAgentID: uuid.NullUUID{Valid: true, UUID: agt.ID},
|
||||
WorkspaceAppID: uuid.NullUUID{Valid: true, UUID: wa.ID},
|
||||
})
|
||||
t.Logf("task_id:%s owner_id:%s org_id:%s", tsk.ID, ownerID, orgID)
|
||||
return tsk
|
||||
}
|
||||
|
||||
// Given: user1 has one task, user2 has one task, user3 has two tasks (one in each org)
|
||||
task1 := createTask(org1.ID, user1.ID)
|
||||
task2 := createTask(org1.ID, user2.ID)
|
||||
task3 := createTask(org2.ID, user2.ID)
|
||||
|
||||
// Then: run various filters and assert expected results
|
||||
for _, tc := range []struct {
|
||||
name string
|
||||
filter database.ListTasksParams
|
||||
expectIDs []uuid.UUID
|
||||
}{
|
||||
{
|
||||
name: "no filter",
|
||||
filter: database.ListTasksParams{
|
||||
OwnerID: uuid.Nil,
|
||||
OrganizationID: uuid.Nil,
|
||||
},
|
||||
expectIDs: []uuid.UUID{task3.ID, task2.ID, task1.ID},
|
||||
},
|
||||
{
|
||||
name: "filter by user ID",
|
||||
filter: database.ListTasksParams{
|
||||
OwnerID: user1.ID,
|
||||
OrganizationID: uuid.Nil,
|
||||
},
|
||||
expectIDs: []uuid.UUID{task1.ID},
|
||||
},
|
||||
{
|
||||
name: "filter by organization ID",
|
||||
filter: database.ListTasksParams{
|
||||
OwnerID: uuid.Nil,
|
||||
OrganizationID: org1.ID,
|
||||
},
|
||||
expectIDs: []uuid.UUID{task2.ID, task1.ID},
|
||||
},
|
||||
{
|
||||
name: "filter by user and organization ID",
|
||||
filter: database.ListTasksParams{
|
||||
OwnerID: user2.ID,
|
||||
OrganizationID: org2.ID,
|
||||
},
|
||||
expectIDs: []uuid.UUID{task3.ID},
|
||||
},
|
||||
{
|
||||
name: "no results",
|
||||
filter: database.ListTasksParams{
|
||||
OwnerID: user1.ID,
|
||||
OrganizationID: org2.ID,
|
||||
},
|
||||
expectIDs: nil,
|
||||
},
|
||||
} {
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
t.Parallel()
|
||||
ctx := testutil.Context(t, testutil.WaitShort)
|
||||
tasks, err := db.ListTasks(ctx, tc.filter)
|
||||
require.NoError(t, err)
|
||||
require.Len(t, tasks, len(tc.expectIDs))
|
||||
|
||||
for idx, eid := range tc.expectIDs {
|
||||
task := tasks[idx]
|
||||
assert.Equal(t, eid, task.ID, "task ID mismatch at index %d", idx)
|
||||
|
||||
require.True(t, task.WorkspaceBuildNumber.Valid)
|
||||
require.Greater(t, task.WorkspaceBuildNumber.Int32, int32(0))
|
||||
require.True(t, task.WorkspaceAgentID.Valid)
|
||||
require.NotEqual(t, uuid.Nil, task.WorkspaceAgentID.UUID)
|
||||
require.True(t, task.WorkspaceAppID.Valid)
|
||||
require.NotEqual(t, uuid.Nil, task.WorkspaceAppID.UUID)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
+205
-17
@@ -8966,7 +8966,7 @@ WHERE
|
||||
-- Filter by max age if provided
|
||||
AND (
|
||||
$7::bigint IS NULL
|
||||
OR pd.last_seen_at IS NULL
|
||||
OR pd.last_seen_at IS NULL
|
||||
OR pd.last_seen_at >= (NOW() - ($7::bigint || ' ms')::interval)
|
||||
)
|
||||
AND (
|
||||
@@ -9291,11 +9291,11 @@ func (q *sqlQuerier) InsertProvisionerJobLogs(ctx context.Context, arg InsertPro
|
||||
}
|
||||
|
||||
const updateProvisionerJobLogsLength = `-- name: UpdateProvisionerJobLogsLength :exec
|
||||
UPDATE
|
||||
UPDATE
|
||||
provisioner_jobs
|
||||
SET
|
||||
SET
|
||||
logs_length = logs_length + $2
|
||||
WHERE
|
||||
WHERE
|
||||
id = $1
|
||||
`
|
||||
|
||||
@@ -9310,11 +9310,11 @@ func (q *sqlQuerier) UpdateProvisionerJobLogsLength(ctx context.Context, arg Upd
|
||||
}
|
||||
|
||||
const updateProvisionerJobLogsOverflowed = `-- name: UpdateProvisionerJobLogsOverflowed :exec
|
||||
UPDATE
|
||||
UPDATE
|
||||
provisioner_jobs
|
||||
SET
|
||||
SET
|
||||
logs_overflowed = $2
|
||||
WHERE
|
||||
WHERE
|
||||
id = $1
|
||||
`
|
||||
|
||||
@@ -9834,6 +9834,7 @@ WHERE
|
||||
AND (COALESCE(array_length($2::uuid[], 1), 0) = 0 OR pj.id = ANY($2::uuid[]))
|
||||
AND (COALESCE(array_length($3::provisioner_job_status[], 1), 0) = 0 OR pj.job_status = ANY($3::provisioner_job_status[]))
|
||||
AND ($4::tagset = 'null'::tagset OR provisioner_tagset_contains(pj.tags::tagset, $4::tagset))
|
||||
AND ($5::uuid = '00000000-0000-0000-0000-000000000000'::uuid OR pj.initiator_id = $5::uuid)
|
||||
GROUP BY
|
||||
pj.id,
|
||||
qp.queue_position,
|
||||
@@ -9849,7 +9850,7 @@ GROUP BY
|
||||
ORDER BY
|
||||
pj.created_at DESC
|
||||
LIMIT
|
||||
$5::int
|
||||
$6::int
|
||||
`
|
||||
|
||||
type GetProvisionerJobsByOrganizationAndStatusWithQueuePositionAndProvisionerParams struct {
|
||||
@@ -9857,6 +9858,7 @@ type GetProvisionerJobsByOrganizationAndStatusWithQueuePositionAndProvisionerPar
|
||||
IDs []uuid.UUID `db:"ids" json:"ids"`
|
||||
Status []ProvisionerJobStatus `db:"status" json:"status"`
|
||||
Tags StringMap `db:"tags" json:"tags"`
|
||||
InitiatorID uuid.UUID `db:"initiator_id" json:"initiator_id"`
|
||||
Limit sql.NullInt32 `db:"limit" json:"limit"`
|
||||
}
|
||||
|
||||
@@ -9881,6 +9883,7 @@ func (q *sqlQuerier) GetProvisionerJobsByOrganizationAndStatusWithQueuePositionA
|
||||
pq.Array(arg.IDs),
|
||||
pq.Array(arg.Status),
|
||||
arg.Tags,
|
||||
arg.InitiatorID,
|
||||
arg.Limit,
|
||||
)
|
||||
if err != nil {
|
||||
@@ -10373,7 +10376,7 @@ FROM
|
||||
provisioner_keys
|
||||
WHERE
|
||||
organization_id = $1
|
||||
AND
|
||||
AND
|
||||
lower(name) = lower($2)
|
||||
`
|
||||
|
||||
@@ -10489,10 +10492,10 @@ WHERE
|
||||
AND
|
||||
-- exclude reserved built-in key
|
||||
id != '00000000-0000-0000-0000-000000000001'::uuid
|
||||
AND
|
||||
AND
|
||||
-- exclude reserved user-auth key
|
||||
id != '00000000-0000-0000-0000-000000000002'::uuid
|
||||
AND
|
||||
AND
|
||||
-- exclude reserved psk key
|
||||
id != '00000000-0000-0000-0000-000000000003'::uuid
|
||||
`
|
||||
@@ -12504,6 +12507,191 @@ func (q *sqlQuerier) UpsertTailnetTunnel(ctx context.Context, arg UpsertTailnetT
|
||||
return i, err
|
||||
}
|
||||
|
||||
const getTaskByID = `-- name: GetTaskByID :one
|
||||
SELECT id, organization_id, owner_id, name, workspace_id, template_version_id, template_parameters, prompt, created_at, deleted_at, status, workspace_build_number, workspace_agent_id, workspace_app_id FROM tasks_with_status WHERE id = $1::uuid
|
||||
`
|
||||
|
||||
func (q *sqlQuerier) GetTaskByID(ctx context.Context, id uuid.UUID) (Task, error) {
|
||||
row := q.db.QueryRowContext(ctx, getTaskByID, id)
|
||||
var i Task
|
||||
err := row.Scan(
|
||||
&i.ID,
|
||||
&i.OrganizationID,
|
||||
&i.OwnerID,
|
||||
&i.Name,
|
||||
&i.WorkspaceID,
|
||||
&i.TemplateVersionID,
|
||||
&i.TemplateParameters,
|
||||
&i.Prompt,
|
||||
&i.CreatedAt,
|
||||
&i.DeletedAt,
|
||||
&i.Status,
|
||||
&i.WorkspaceBuildNumber,
|
||||
&i.WorkspaceAgentID,
|
||||
&i.WorkspaceAppID,
|
||||
)
|
||||
return i, err
|
||||
}
|
||||
|
||||
const getTaskByWorkspaceID = `-- name: GetTaskByWorkspaceID :one
|
||||
SELECT id, organization_id, owner_id, name, workspace_id, template_version_id, template_parameters, prompt, created_at, deleted_at, status, workspace_build_number, workspace_agent_id, workspace_app_id FROM tasks_with_status WHERE workspace_id = $1::uuid
|
||||
`
|
||||
|
||||
func (q *sqlQuerier) GetTaskByWorkspaceID(ctx context.Context, workspaceID uuid.UUID) (Task, error) {
|
||||
row := q.db.QueryRowContext(ctx, getTaskByWorkspaceID, workspaceID)
|
||||
var i Task
|
||||
err := row.Scan(
|
||||
&i.ID,
|
||||
&i.OrganizationID,
|
||||
&i.OwnerID,
|
||||
&i.Name,
|
||||
&i.WorkspaceID,
|
||||
&i.TemplateVersionID,
|
||||
&i.TemplateParameters,
|
||||
&i.Prompt,
|
||||
&i.CreatedAt,
|
||||
&i.DeletedAt,
|
||||
&i.Status,
|
||||
&i.WorkspaceBuildNumber,
|
||||
&i.WorkspaceAgentID,
|
||||
&i.WorkspaceAppID,
|
||||
)
|
||||
return i, err
|
||||
}
|
||||
|
||||
const insertTask = `-- name: InsertTask :one
|
||||
INSERT INTO tasks
|
||||
(id, organization_id, owner_id, name, workspace_id, template_version_id, template_parameters, prompt, created_at)
|
||||
VALUES
|
||||
(gen_random_uuid(), $1, $2, $3, $4, $5, $6, $7, $8)
|
||||
RETURNING id, organization_id, owner_id, name, workspace_id, template_version_id, template_parameters, prompt, created_at, deleted_at
|
||||
`
|
||||
|
||||
type InsertTaskParams struct {
|
||||
OrganizationID uuid.UUID `db:"organization_id" json:"organization_id"`
|
||||
OwnerID uuid.UUID `db:"owner_id" json:"owner_id"`
|
||||
Name string `db:"name" json:"name"`
|
||||
WorkspaceID uuid.NullUUID `db:"workspace_id" json:"workspace_id"`
|
||||
TemplateVersionID uuid.UUID `db:"template_version_id" json:"template_version_id"`
|
||||
TemplateParameters json.RawMessage `db:"template_parameters" json:"template_parameters"`
|
||||
Prompt string `db:"prompt" json:"prompt"`
|
||||
CreatedAt time.Time `db:"created_at" json:"created_at"`
|
||||
}
|
||||
|
||||
func (q *sqlQuerier) InsertTask(ctx context.Context, arg InsertTaskParams) (TaskTable, error) {
|
||||
row := q.db.QueryRowContext(ctx, insertTask,
|
||||
arg.OrganizationID,
|
||||
arg.OwnerID,
|
||||
arg.Name,
|
||||
arg.WorkspaceID,
|
||||
arg.TemplateVersionID,
|
||||
arg.TemplateParameters,
|
||||
arg.Prompt,
|
||||
arg.CreatedAt,
|
||||
)
|
||||
var i TaskTable
|
||||
err := row.Scan(
|
||||
&i.ID,
|
||||
&i.OrganizationID,
|
||||
&i.OwnerID,
|
||||
&i.Name,
|
||||
&i.WorkspaceID,
|
||||
&i.TemplateVersionID,
|
||||
&i.TemplateParameters,
|
||||
&i.Prompt,
|
||||
&i.CreatedAt,
|
||||
&i.DeletedAt,
|
||||
)
|
||||
return i, err
|
||||
}
|
||||
|
||||
const listTasks = `-- name: ListTasks :many
|
||||
SELECT id, organization_id, owner_id, name, workspace_id, template_version_id, template_parameters, prompt, created_at, deleted_at, status, workspace_build_number, workspace_agent_id, workspace_app_id FROM tasks_with_status tws
|
||||
WHERE tws.deleted_at IS NULL
|
||||
AND CASE WHEN $1::UUID != '00000000-0000-0000-0000-000000000000' THEN tws.owner_id = $1::UUID ELSE TRUE END
|
||||
AND CASE WHEN $2::UUID != '00000000-0000-0000-0000-000000000000' THEN tws.organization_id = $2::UUID ELSE TRUE END
|
||||
ORDER BY tws.created_at DESC
|
||||
`
|
||||
|
||||
type ListTasksParams struct {
|
||||
OwnerID uuid.UUID `db:"owner_id" json:"owner_id"`
|
||||
OrganizationID uuid.UUID `db:"organization_id" json:"organization_id"`
|
||||
}
|
||||
|
||||
func (q *sqlQuerier) ListTasks(ctx context.Context, arg ListTasksParams) ([]Task, error) {
|
||||
rows, err := q.db.QueryContext(ctx, listTasks, arg.OwnerID, arg.OrganizationID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
var items []Task
|
||||
for rows.Next() {
|
||||
var i Task
|
||||
if err := rows.Scan(
|
||||
&i.ID,
|
||||
&i.OrganizationID,
|
||||
&i.OwnerID,
|
||||
&i.Name,
|
||||
&i.WorkspaceID,
|
||||
&i.TemplateVersionID,
|
||||
&i.TemplateParameters,
|
||||
&i.Prompt,
|
||||
&i.CreatedAt,
|
||||
&i.DeletedAt,
|
||||
&i.Status,
|
||||
&i.WorkspaceBuildNumber,
|
||||
&i.WorkspaceAgentID,
|
||||
&i.WorkspaceAppID,
|
||||
); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
items = append(items, i)
|
||||
}
|
||||
if err := rows.Close(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if err := rows.Err(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return items, nil
|
||||
}
|
||||
|
||||
const upsertTaskWorkspaceApp = `-- name: UpsertTaskWorkspaceApp :one
|
||||
INSERT INTO task_workspace_apps
|
||||
(task_id, workspace_build_number, workspace_agent_id, workspace_app_id)
|
||||
VALUES
|
||||
($1, $2, $3, $4)
|
||||
ON CONFLICT (task_id, workspace_build_number)
|
||||
DO UPDATE SET
|
||||
workspace_agent_id = EXCLUDED.workspace_agent_id,
|
||||
workspace_app_id = EXCLUDED.workspace_app_id
|
||||
RETURNING task_id, workspace_agent_id, workspace_app_id, workspace_build_number
|
||||
`
|
||||
|
||||
type UpsertTaskWorkspaceAppParams struct {
|
||||
TaskID uuid.UUID `db:"task_id" json:"task_id"`
|
||||
WorkspaceBuildNumber int32 `db:"workspace_build_number" json:"workspace_build_number"`
|
||||
WorkspaceAgentID uuid.NullUUID `db:"workspace_agent_id" json:"workspace_agent_id"`
|
||||
WorkspaceAppID uuid.NullUUID `db:"workspace_app_id" json:"workspace_app_id"`
|
||||
}
|
||||
|
||||
func (q *sqlQuerier) UpsertTaskWorkspaceApp(ctx context.Context, arg UpsertTaskWorkspaceAppParams) (TaskWorkspaceApp, error) {
|
||||
row := q.db.QueryRowContext(ctx, upsertTaskWorkspaceApp,
|
||||
arg.TaskID,
|
||||
arg.WorkspaceBuildNumber,
|
||||
arg.WorkspaceAgentID,
|
||||
arg.WorkspaceAppID,
|
||||
)
|
||||
var i TaskWorkspaceApp
|
||||
err := row.Scan(
|
||||
&i.TaskID,
|
||||
&i.WorkspaceAgentID,
|
||||
&i.WorkspaceAppID,
|
||||
&i.WorkspaceBuildNumber,
|
||||
)
|
||||
return i, err
|
||||
}
|
||||
|
||||
const getTelemetryItem = `-- name: GetTelemetryItem :one
|
||||
SELECT key, value, created_at, updated_at FROM telemetry_items WHERE key = $1
|
||||
`
|
||||
@@ -14286,14 +14474,14 @@ DO $$
|
||||
DECLARE
|
||||
table_record record;
|
||||
BEGIN
|
||||
FOR table_record IN
|
||||
SELECT table_schema, table_name
|
||||
FROM information_schema.tables
|
||||
FOR table_record IN
|
||||
SELECT table_schema, table_name
|
||||
FROM information_schema.tables
|
||||
WHERE table_schema NOT IN ('pg_catalog', 'information_schema')
|
||||
AND table_type = 'BASE TABLE'
|
||||
LOOP
|
||||
EXECUTE format('ALTER TABLE %I.%I DISABLE TRIGGER ALL',
|
||||
table_record.table_schema,
|
||||
EXECUTE format('ALTER TABLE %I.%I DISABLE TRIGGER ALL',
|
||||
table_record.table_schema,
|
||||
table_record.table_name);
|
||||
END LOOP;
|
||||
END;
|
||||
@@ -18278,7 +18466,7 @@ WITH agent_stats AS (
|
||||
coalesce((PERCENTILE_CONT(0.95) WITHIN GROUP (ORDER BY connection_median_latency_ms)), -1)::FLOAT AS workspace_connection_latency_95
|
||||
FROM workspace_agent_stats
|
||||
-- The greater than 0 is to support legacy agents that don't report connection_median_latency_ms.
|
||||
WHERE workspace_agent_stats.created_at > $1 AND connection_median_latency_ms > 0
|
||||
WHERE workspace_agent_stats.created_at > $1 AND connection_median_latency_ms > 0
|
||||
GROUP BY user_id, agent_id, workspace_id, template_id
|
||||
), latest_agent_stats AS (
|
||||
SELECT
|
||||
|
||||
@@ -113,7 +113,7 @@ WHERE
|
||||
-- Filter by max age if provided
|
||||
AND (
|
||||
sqlc.narg('max_age_ms')::bigint IS NULL
|
||||
OR pd.last_seen_at IS NULL
|
||||
OR pd.last_seen_at IS NULL
|
||||
OR pd.last_seen_at >= (NOW() - (sqlc.narg('max_age_ms')::bigint || ' ms')::interval)
|
||||
)
|
||||
AND (
|
||||
|
||||
@@ -19,19 +19,19 @@ SELECT
|
||||
unnest(@level :: log_level [ ]) AS LEVEL,
|
||||
unnest(@stage :: VARCHAR(128) [ ]) AS stage,
|
||||
unnest(@output :: VARCHAR(1024) [ ]) AS output RETURNING *;
|
||||
|
||||
|
||||
-- name: UpdateProvisionerJobLogsOverflowed :exec
|
||||
UPDATE
|
||||
UPDATE
|
||||
provisioner_jobs
|
||||
SET
|
||||
SET
|
||||
logs_overflowed = $2
|
||||
WHERE
|
||||
WHERE
|
||||
id = $1;
|
||||
|
||||
|
||||
-- name: UpdateProvisionerJobLogsLength :exec
|
||||
UPDATE
|
||||
UPDATE
|
||||
provisioner_jobs
|
||||
SET
|
||||
SET
|
||||
logs_length = logs_length + $2
|
||||
WHERE
|
||||
WHERE
|
||||
id = $1;
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user