Compare commits
7 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| f3f628d5e8 | |||
| b110888a3b | |||
| d73aab31e8 | |||
| 2d2b866a3e | |||
| a27108b33a | |||
| d10e9f0caa | |||
| 689a7ed59d |
@@ -18,35 +18,35 @@ The 5.x era resolves years of module system ambiguity and cleans house on legacy
|
||||
|
||||
The left column reflects patterns still common before TypeScript 5.x. Write the right column instead. The "Since" column tells you the minimum TypeScript version required.
|
||||
|
||||
| Old pattern | Modern replacement | Since |
|
||||
| ---------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------- | ------ |
|
||||
| `--experimentalDecorators` + legacy decorator signatures | Standard decorators (TC39): `function dec(target, context: ClassMethodDecoratorContext)` — no flag needed | 5.0 |
|
||||
| Requiring callers to add `as const` at call sites | `<const T extends HasNames>(arg: T)` — `const` modifier on type parameter | 5.0 |
|
||||
| `--importsNotUsedAsValues` + `--preserveValueImports` | `--verbatimModuleSyntax` | 5.0 |
|
||||
| `import { Foo } from "..."` when `Foo` is only used as a type | `import { type Foo } from "..."` or `import type { Foo } from "..."` | 5.0 |
|
||||
| `"extends": "@tsconfig/strictest/tsconfig.json"` chain | `"extends": ["@tsconfig/strictest/tsconfig.json", "./tsconfig.base.json"]` (array form) | 5.0 |
|
||||
| `try { ... } finally { resource.close(); resource.delete(); }` | `using resource = acquireResource()` — calls `[Symbol.dispose]()` automatically | 5.2 |
|
||||
| `try { ... } finally { await resource.close() }` | `await using resource = acquireAsyncResource()` | 5.2 |
|
||||
| Ad-hoc cleanup with multiple `try/finally` blocks | `using cleanup = new DisposableStack(); cleanup.defer(() => ...)` | 5.2 |
|
||||
| `import data from "./data.json" assert { type: "json" }` | `import data from "./data.json" with { type: "json" }` | 5.3 |
|
||||
| `.filter(Boolean)` or `.filter(x => !!x)` to remove nulls | `.filter(x => x !== undefined)` or `.filter(x => x !== null)` (infers type predicate) | 5.5 |
|
||||
| Extra phantom type param to block inference bleed: `<C extends string, D extends C>` | `NoInfer<C>` on the parameter you don't want to drive inference | 5.4 |
|
||||
| `/** @typedef {import("./types").Foo} Foo */` in JS files | `/** @import { Foo } from "./types" */` (JSDoc `@import` tag) | 5.5 |
|
||||
| `myArray.reverse()` mutating in place | `myArray.toReversed()` (returns new array) | 5.2 |
|
||||
| `myArray.sort(cmp)` mutating in place | `myArray.toSorted(cmp)` (returns new array) | 5.2 |
|
||||
| `const copy = [...arr]; copy[i] = v` | `arr.with(i, v)` (returns new array) | 5.2 |
|
||||
| Manual `has`/`get`/`set` pattern on `Map` | `map.getOrInsert(key, defaultValue)` or `getOrInsertComputed(key, fn)` | 6.0 RC |
|
||||
| `new RegExp(str.replace(/[.\*+?^${}()\[\]\\]/g, '\\$&'))` | `new RegExp(RegExp.escape(str))` | 6.0 RC |
|
||||
| `--moduleResolution node` (node10) | `--moduleResolution nodenext` (Node.js) or `--moduleResolution bundler` (bundlers/Bun) | 6.0 RC |
|
||||
| `"baseUrl": "./src"` + `"@app/*": ["app/*"]` in paths | Remove `baseUrl`; use `"@app/*": ["./src/app/*"]` in paths directly | 6.0 RC |
|
||||
| `module Foo { export const x = 1; }` | `namespace Foo { export const x = 1; }` | 6.0 RC |
|
||||
| `export * from "..."` when all re-exported members are types | `export type * from "..."` (or `export type * as ns from "..."`) | 5.0 |
|
||||
| `function f(): undefined { return undefined; }` — explicit return required in `: undefined`-returning function | Remove the `return` entirely; `undefined`-returning functions no longer require any return statement | 5.1 |
|
||||
| Manual type predicate annotation on a simple arrow: `(x: T \| undefined): x is T => x !== undefined` | Remove the annotation; TypeScript infers `x is T` from `!== null/undefined` and `instanceof` checks automatically | 5.5 |
|
||||
| `const val = obj[key]; if (typeof val === "string") { use(val); }` — extract to const to narrow indexed access | `if (typeof obj[key] === "string") { obj[key].toUpperCase(); }` directly — both `obj` and `key` must be effectively constant | 5.5 |
|
||||
| Copy narrowed `let`/param to a `const`, or restructure code to escape stale closure narrowing after reassignment | Remove the copy; narrowing survives into closures created after the last assignment to the variable | 5.4 |
|
||||
| `(arr as string[]).filter(...)` or restructure to avoid "not callable" errors on `string[] \| number[]` | Call `.filter`, `.find`, `.some`, `.every`, `.reduce` directly on union-of-array types | 5.2 |
|
||||
| `if`/`else` chain used to work around lack of narrowing inside a `switch (true)` body | `switch (true)` — each `case` condition now narrows the tested variable in its clause | 5.3 |
|
||||
| Old pattern | Modern replacement | Since |
|
||||
| ---------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------- | -------------------------------- | ------ |
|
||||
| `--experimentalDecorators` + legacy decorator signatures | Standard decorators (TC39): `function dec(target, context: ClassMethodDecoratorContext)` — no flag needed | 5.0 |
|
||||
| Requiring callers to add `as const` at call sites | `<const T extends HasNames>(arg: T)` — `const` modifier on type parameter | 5.0 |
|
||||
| `--importsNotUsedAsValues` + `--preserveValueImports` | `--verbatimModuleSyntax` | 5.0 |
|
||||
| `import { Foo } from "..."` when `Foo` is only used as a type | `import { type Foo } from "..."` or `import type { Foo } from "..."` | 5.0 |
|
||||
| `"extends": "@tsconfig/strictest/tsconfig.json"` chain | `"extends": ["@tsconfig/strictest/tsconfig.json", "./tsconfig.base.json"]` (array form) | 5.0 |
|
||||
| `try { ... } finally { resource.close(); resource.delete(); }` | `using resource = acquireResource()` — calls `[Symbol.dispose]()` automatically | 5.2 |
|
||||
| `try { ... } finally { await resource.close() }` | `await using resource = acquireAsyncResource()` | 5.2 |
|
||||
| Ad-hoc cleanup with multiple `try/finally` blocks | `using cleanup = new DisposableStack(); cleanup.defer(() => ...)` | 5.2 |
|
||||
| `import data from "./data.json" assert { type: "json" }` | `import data from "./data.json" with { type: "json" }` | 5.3 |
|
||||
| `.filter(Boolean)` or `.filter(x => !!x)` to remove nulls | `.filter(x => x !== undefined)` or `.filter(x => x !== null)` (infers type predicate) | 5.5 |
|
||||
| Extra phantom type param to block inference bleed: `<C extends string, D extends C>` | `NoInfer<C>` on the parameter you don't want to drive inference | 5.4 |
|
||||
| `/** @typedef {import("./types").Foo} Foo */` in JS files | `/** @import { Foo } from "./types" */` (JSDoc `@import` tag) | 5.5 |
|
||||
| `myArray.reverse()` mutating in place | `myArray.toReversed()` (returns new array) | 5.2 |
|
||||
| `myArray.sort(cmp)` mutating in place | `myArray.toSorted(cmp)` (returns new array) | 5.2 |
|
||||
| `const copy = [...arr]; copy[i] = v` | `arr.with(i, v)` (returns new array) | 5.2 |
|
||||
| Manual `has`/`get`/`set` pattern on `Map` | `map.getOrInsert(key, defaultValue)` or `getOrInsertComputed(key, fn)` | 6.0 RC |
|
||||
| `new RegExp(str.replace(/[.\*+?^${}() | [\]\\]/g, '\\$&'))` | `new RegExp(RegExp.escape(str))` | 6.0 RC |
|
||||
| `--moduleResolution node` (node10) | `--moduleResolution nodenext` (Node.js) or `--moduleResolution bundler` (bundlers/Bun) | 6.0 RC |
|
||||
| `"baseUrl": "./src"` + `"@app/*": ["app/*"]` in paths | Remove `baseUrl`; use `"@app/*": ["./src/app/*"]` in paths directly | 6.0 RC |
|
||||
| `module Foo { export const x = 1; }` | `namespace Foo { export const x = 1; }` | 6.0 RC |
|
||||
| `export * from "..."` when all re-exported members are types | `export type * from "..."` (or `export type * as ns from "..."`) | 5.0 |
|
||||
| `function f(): undefined { return undefined; }` — explicit return required in `: undefined`-returning function | Remove the `return` entirely; `undefined`-returning functions no longer require any return statement | 5.1 |
|
||||
| Manual type predicate annotation on a simple arrow: `(x: T \| undefined): x is T => x !== undefined` | Remove the annotation; TypeScript infers `x is T` from `!== null/undefined` and `instanceof` checks automatically | 5.5 |
|
||||
| `const val = obj[key]; if (typeof val === "string") { use(val); }` — extract to const to narrow indexed access | `if (typeof obj[key] === "string") { obj[key].toUpperCase(); }` directly — both `obj` and `key` must be effectively constant | 5.5 |
|
||||
| Copy narrowed `let`/param to a `const`, or restructure code to escape stale closure narrowing after reassignment | Remove the copy; narrowing survives into closures created after the last assignment to the variable | 5.4 |
|
||||
| `(arr as string[]).filter(...)` or restructure to avoid "not callable" errors on `string[] \| number[]` | Call `.filter`, `.find`, `.some`, `.every`, `.reduce` directly on union-of-array types | 5.2 |
|
||||
| `if`/`else` chain used to work around lack of narrowing inside a `switch (true)` body | `switch (true)` — each `case` condition now narrows the tested variable in its clause | 5.3 |
|
||||
|
||||
## New capabilities
|
||||
|
||||
|
||||
@@ -91,6 +91,12 @@ updates:
|
||||
emotion:
|
||||
patterns:
|
||||
- "@emotion*"
|
||||
exclude-patterns:
|
||||
- "jest-runner-eslint"
|
||||
jest:
|
||||
patterns:
|
||||
- "jest"
|
||||
- "@types/jest"
|
||||
vite:
|
||||
patterns:
|
||||
- "vite*"
|
||||
|
||||
@@ -134,19 +134,10 @@ jobs:
|
||||
exit 0
|
||||
fi
|
||||
|
||||
NEW_PR_URL=$(
|
||||
gh pr create \
|
||||
--base "$RELEASE_BRANCH" \
|
||||
--head "$BACKPORT_BRANCH" \
|
||||
--title "$TITLE" \
|
||||
--body "$BODY" \
|
||||
--assignee "$SENDER" \
|
||||
--reviewer "$SENDER"
|
||||
)
|
||||
|
||||
# Comment on the original PR to notify the author.
|
||||
COMMENT="Cherry-pick PR created: ${NEW_PR_URL}"
|
||||
if [ "$CONFLICT" = true ]; then
|
||||
COMMENT="${COMMENT} (⚠️ conflicts need manual resolution)"
|
||||
fi
|
||||
gh pr comment "$PR_NUMBER" --body "$COMMENT"
|
||||
gh pr create \
|
||||
--base "$RELEASE_BRANCH" \
|
||||
--head "$BACKPORT_BRANCH" \
|
||||
--title "$TITLE" \
|
||||
--body "$BODY" \
|
||||
--assignee "$SENDER" \
|
||||
--reviewer "$SENDER"
|
||||
|
||||
@@ -2862,126 +2862,6 @@ func TestAPI(t *testing.T) {
|
||||
"rebuilt agent should include updated display apps")
|
||||
})
|
||||
|
||||
// Verify that when a terraform-managed subagent is injected into
|
||||
// a devcontainer, the Directory field sent to Create reflects
|
||||
// the container-internal workspaceFolder from devcontainer
|
||||
// read-configuration, not the host-side workspace_folder from
|
||||
// the terraform resource. This is the scenario described in
|
||||
// https://linear.app/codercom/issue/PRODUCT-259:
|
||||
// 1. Non-terraform subagent → directory = /workspaces/foo (correct)
|
||||
// 2. Terraform subagent → directory was stuck on host path (bug)
|
||||
t.Run("TerraformDefinedSubAgentUsesContainerInternalDirectory", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
if runtime.GOOS == "windows" {
|
||||
t.Skip("Dev Container tests are not supported on Windows (this test uses mocks but fails due to Windows paths)")
|
||||
}
|
||||
|
||||
var (
|
||||
ctx = testutil.Context(t, testutil.WaitMedium)
|
||||
logger = slogtest.Make(t, &slogtest.Options{IgnoreErrors: true}).Leveled(slog.LevelDebug)
|
||||
mCtrl = gomock.NewController(t)
|
||||
|
||||
terraformAgentID = uuid.New()
|
||||
containerID = "test-container-id"
|
||||
|
||||
// Given: A container with a host-side workspace folder.
|
||||
terraformContainer = codersdk.WorkspaceAgentContainer{
|
||||
ID: containerID,
|
||||
FriendlyName: "test-container",
|
||||
Image: "test-image",
|
||||
Running: true,
|
||||
CreatedAt: time.Now(),
|
||||
Labels: map[string]string{
|
||||
agentcontainers.DevcontainerLocalFolderLabel: "/home/coder/project",
|
||||
agentcontainers.DevcontainerConfigFileLabel: "/home/coder/project/.devcontainer/devcontainer.json",
|
||||
},
|
||||
}
|
||||
|
||||
// Given: A terraform-defined devcontainer whose
|
||||
// workspace_folder is the HOST-side path (set by provisioner).
|
||||
terraformDevcontainer = codersdk.WorkspaceAgentDevcontainer{
|
||||
ID: uuid.New(),
|
||||
Name: "terraform-devcontainer",
|
||||
WorkspaceFolder: "/home/coder/project",
|
||||
ConfigPath: "/home/coder/project/.devcontainer/devcontainer.json",
|
||||
SubagentID: uuid.NullUUID{UUID: terraformAgentID, Valid: true},
|
||||
}
|
||||
|
||||
fCCLI = &fakeContainerCLI{
|
||||
containers: codersdk.WorkspaceAgentListContainersResponse{
|
||||
Containers: []codersdk.WorkspaceAgentContainer{terraformContainer},
|
||||
},
|
||||
arch: runtime.GOARCH,
|
||||
}
|
||||
|
||||
// Given: devcontainer read-configuration returns the
|
||||
// CONTAINER-INTERNAL workspace folder.
|
||||
fDCCLI = &fakeDevcontainerCLI{
|
||||
upID: containerID,
|
||||
readConfig: agentcontainers.DevcontainerConfig{
|
||||
Workspace: agentcontainers.DevcontainerWorkspace{
|
||||
WorkspaceFolder: "/workspaces/project",
|
||||
},
|
||||
MergedConfiguration: agentcontainers.DevcontainerMergedConfiguration{
|
||||
Customizations: agentcontainers.DevcontainerMergedCustomizations{
|
||||
Coder: []agentcontainers.CoderCustomization{{}},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
mSAC = acmock.NewMockSubAgentClient(mCtrl)
|
||||
createCalls = make(chan agentcontainers.SubAgent, 1)
|
||||
closed bool
|
||||
)
|
||||
|
||||
mSAC.EXPECT().List(gomock.Any()).Return([]agentcontainers.SubAgent{}, nil).AnyTimes()
|
||||
|
||||
mSAC.EXPECT().Create(gomock.Any(), gomock.Any()).DoAndReturn(
|
||||
func(_ context.Context, agent agentcontainers.SubAgent) (agentcontainers.SubAgent, error) {
|
||||
agent.AuthToken = uuid.New()
|
||||
createCalls <- agent
|
||||
return agent, nil
|
||||
},
|
||||
).Times(1)
|
||||
|
||||
mSAC.EXPECT().Delete(gomock.Any(), gomock.Any()).DoAndReturn(func(_ context.Context, _ uuid.UUID) error {
|
||||
assert.True(t, closed, "Delete should only be called after Close")
|
||||
return nil
|
||||
}).AnyTimes()
|
||||
|
||||
api := agentcontainers.NewAPI(logger,
|
||||
agentcontainers.WithContainerCLI(fCCLI),
|
||||
agentcontainers.WithDevcontainerCLI(fDCCLI),
|
||||
agentcontainers.WithDevcontainers(
|
||||
[]codersdk.WorkspaceAgentDevcontainer{terraformDevcontainer},
|
||||
[]codersdk.WorkspaceAgentScript{{ID: terraformDevcontainer.ID, LogSourceID: uuid.New()}},
|
||||
),
|
||||
agentcontainers.WithSubAgentClient(mSAC),
|
||||
agentcontainers.WithSubAgentURL("test-subagent-url"),
|
||||
agentcontainers.WithWatcher(watcher.NewNoop()),
|
||||
)
|
||||
api.Start()
|
||||
defer func() {
|
||||
closed = true
|
||||
api.Close()
|
||||
}()
|
||||
|
||||
// When: The devcontainer is created (triggering injection).
|
||||
err := api.CreateDevcontainer(terraformDevcontainer.WorkspaceFolder, terraformDevcontainer.ConfigPath)
|
||||
require.NoError(t, err)
|
||||
|
||||
// Then: The subagent sent to Create has the correct
|
||||
// container-internal directory, not the host path.
|
||||
createdAgent := testutil.RequireReceive(ctx, t, createCalls)
|
||||
assert.Equal(t, terraformAgentID, createdAgent.ID,
|
||||
"agent should use terraform-defined ID")
|
||||
assert.Equal(t, "/workspaces/project", createdAgent.Directory,
|
||||
"directory should be the container-internal path from devcontainer "+
|
||||
"read-configuration, not the host-side workspace_folder")
|
||||
})
|
||||
|
||||
t.Run("Error", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
|
||||
+1036
-1139
File diff suppressed because it is too large
Load Diff
@@ -98,21 +98,6 @@ message Manifest {
|
||||
repeated WorkspaceApp apps = 11;
|
||||
repeated WorkspaceAgentMetadata.Description metadata = 12;
|
||||
repeated WorkspaceAgentDevcontainer devcontainers = 17;
|
||||
repeated WorkspaceSecret secrets = 19;
|
||||
}
|
||||
|
||||
// WorkspaceSecret is a secret included in the agent manifest
|
||||
// for injection into a workspace.
|
||||
message WorkspaceSecret {
|
||||
// Environment variable name to inject (e.g. "GITHUB_TOKEN").
|
||||
// Empty string means this secret is not injected as an env var.
|
||||
string env_name = 1;
|
||||
// File path to write the secret value to (e.g.
|
||||
// "~/.aws/credentials"). Empty string means this secret is not
|
||||
// written to a file.
|
||||
string file_path = 2;
|
||||
// The decrypted secret value.
|
||||
bytes value = 3;
|
||||
}
|
||||
|
||||
message WorkspaceAgentDevcontainer {
|
||||
|
||||
@@ -812,18 +812,12 @@ func TestPortableDesktop_IdleTimeout_StopsRecordings(t *testing.T) {
|
||||
stopTrap := clk.Trap().NewTimer("agentdesktop", "stop_timeout")
|
||||
|
||||
// Advance past idle timeout to trigger the stop-all.
|
||||
clk.Advance(idleTimeout).MustWait(ctx)
|
||||
clk.Advance(idleTimeout)
|
||||
|
||||
// Wait for the stop timer to be created, then release it.
|
||||
stopTrap.MustWait(ctx).MustRelease(ctx)
|
||||
stopTrap.Close()
|
||||
|
||||
// Advance past the 15s stop timeout so the process is
|
||||
// forcibly killed. Without this the test depends on the real
|
||||
// shell handling SIGINT promptly, which is unreliable on
|
||||
// macOS CI runners (the flake in #1461).
|
||||
clk.Advance(15 * time.Second).MustWait(ctx)
|
||||
|
||||
// The recording process should now be stopped.
|
||||
require.Eventually(t, func() bool {
|
||||
pd.mu.Lock()
|
||||
@@ -945,17 +939,11 @@ func TestPortableDesktop_IdleTimeout_MultipleRecordings(t *testing.T) {
|
||||
stopTrap := clk.Trap().NewTimer("agentdesktop", "stop_timeout")
|
||||
|
||||
// Advance past idle timeout.
|
||||
clk.Advance(idleTimeout).MustWait(ctx)
|
||||
clk.Advance(idleTimeout)
|
||||
|
||||
// Each idle monitor goroutine serializes on p.mu, so the
|
||||
// second stop timer is only created after the first stop
|
||||
// completes. Advance past the 15s stop timeout after each
|
||||
// release so the process is forcibly killed instead of
|
||||
// depending on SIGINT (unreliable on macOS — see #1461).
|
||||
// Wait for both stop timers.
|
||||
stopTrap.MustWait(ctx).MustRelease(ctx)
|
||||
clk.Advance(15 * time.Second).MustWait(ctx)
|
||||
stopTrap.MustWait(ctx).MustRelease(ctx)
|
||||
clk.Advance(15 * time.Second).MustWait(ctx)
|
||||
stopTrap.Close()
|
||||
|
||||
// Both recordings should be stopped.
|
||||
|
||||
@@ -87,12 +87,6 @@ func IsDevVersion(v string) bool {
|
||||
return strings.Contains(v, "-"+develPreRelease)
|
||||
}
|
||||
|
||||
// IsRCVersion returns true if the version has a release candidate
|
||||
// pre-release tag, e.g. "v2.31.0-rc.0".
|
||||
func IsRCVersion(v string) bool {
|
||||
return strings.Contains(v, "-rc.")
|
||||
}
|
||||
|
||||
// IsDev returns true if this is a development build.
|
||||
// CI builds are also considered development builds.
|
||||
func IsDev() bool {
|
||||
|
||||
@@ -102,29 +102,3 @@ func TestBuildInfo(t *testing.T) {
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
func TestIsRCVersion(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
cases := []struct {
|
||||
name string
|
||||
version string
|
||||
expected bool
|
||||
}{
|
||||
{"RC0", "v2.31.0-rc.0", true},
|
||||
{"RC1WithBuild", "v2.31.0-rc.1+abc123", true},
|
||||
{"RC10", "v2.31.0-rc.10", true},
|
||||
{"RCDevel", "v2.33.0-rc.1-devel+727ec00f7", true},
|
||||
{"DevelVersion", "v2.31.0-devel+abc123", false},
|
||||
{"StableVersion", "v2.31.0", false},
|
||||
{"DevNoVersion", "v0.0.0-devel+abc123", false},
|
||||
{"BetaVersion", "v2.31.0-beta.1", false},
|
||||
}
|
||||
|
||||
for _, c := range cases {
|
||||
t.Run(c.name, func(t *testing.T) {
|
||||
t.Parallel()
|
||||
require.Equal(t, c.expected, buildinfo.IsRCVersion(c.version))
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
+5
-28
@@ -7,7 +7,6 @@ import (
|
||||
"encoding/base64"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"flag"
|
||||
"fmt"
|
||||
"io"
|
||||
"net/http"
|
||||
@@ -712,7 +711,7 @@ func (r *RootCmd) createHTTPClient(ctx context.Context, serverURL *url.URL, inv
|
||||
transport = wrapTransportWithTelemetryHeader(transport, inv)
|
||||
transport = wrapTransportWithUserAgentHeader(transport, inv)
|
||||
if !r.noVersionCheck {
|
||||
transport = wrapTransportWithVersionCheck(transport, inv, buildinfo.Version(), func(ctx context.Context) (codersdk.BuildInfoResponse, error) {
|
||||
transport = wrapTransportWithVersionMismatchCheck(transport, inv, buildinfo.Version(), func(ctx context.Context) (codersdk.BuildInfoResponse, error) {
|
||||
// Create a new client without any wrapped transport
|
||||
// otherwise it creates an infinite loop!
|
||||
basicClient := codersdk.New(serverURL)
|
||||
@@ -1436,21 +1435,6 @@ func defaultUpgradeMessage(version string) string {
|
||||
return fmt.Sprintf("download the server version with: 'curl -L https://coder.com/install.sh | sh -s -- --version %s'", version)
|
||||
}
|
||||
|
||||
// serverVersionMessage returns a warning message if the server version
|
||||
// is a release candidate or development build. Returns empty string
|
||||
// for stable versions. RC is checked before devel because RC dev
|
||||
// builds (e.g. v2.33.0-rc.1-devel+hash) contain both tags.
|
||||
func serverVersionMessage(serverVersion string) string {
|
||||
switch {
|
||||
case buildinfo.IsRCVersion(serverVersion):
|
||||
return fmt.Sprintf("the server is running a release candidate of Coder (%s)", serverVersion)
|
||||
case buildinfo.IsDevVersion(serverVersion):
|
||||
return fmt.Sprintf("the server is running a development version of Coder (%s)", serverVersion)
|
||||
default:
|
||||
return ""
|
||||
}
|
||||
}
|
||||
|
||||
// wrapTransportWithEntitlementsCheck adds a middleware to the HTTP transport
|
||||
// that checks for entitlement warnings and prints them to the user.
|
||||
func wrapTransportWithEntitlementsCheck(rt http.RoundTripper, w io.Writer) http.RoundTripper {
|
||||
@@ -1469,10 +1453,10 @@ func wrapTransportWithEntitlementsCheck(rt http.RoundTripper, w io.Writer) http.
|
||||
})
|
||||
}
|
||||
|
||||
// wrapTransportWithVersionCheck adds a middleware to the HTTP transport
|
||||
// that checks the server version and warns about development builds,
|
||||
// release candidates, and client/server version mismatches.
|
||||
func wrapTransportWithVersionCheck(rt http.RoundTripper, inv *serpent.Invocation, clientVersion string, getBuildInfo func(ctx context.Context) (codersdk.BuildInfoResponse, error)) http.RoundTripper {
|
||||
// wrapTransportWithVersionMismatchCheck adds a middleware to the HTTP transport
|
||||
// that checks for version mismatches between the client and server. If a mismatch
|
||||
// is detected, a warning is printed to the user.
|
||||
func wrapTransportWithVersionMismatchCheck(rt http.RoundTripper, inv *serpent.Invocation, clientVersion string, getBuildInfo func(ctx context.Context) (codersdk.BuildInfoResponse, error)) http.RoundTripper {
|
||||
var once sync.Once
|
||||
return roundTripper(func(req *http.Request) (*http.Response, error) {
|
||||
res, err := rt.RoundTrip(req)
|
||||
@@ -1484,16 +1468,9 @@ func wrapTransportWithVersionCheck(rt http.RoundTripper, inv *serpent.Invocation
|
||||
if serverVersion == "" {
|
||||
return
|
||||
}
|
||||
// Warn about non-stable server versions. Skip
|
||||
// during tests to avoid polluting golden files.
|
||||
if msg := serverVersionMessage(serverVersion); msg != "" && flag.Lookup("test.v") == nil {
|
||||
warning := pretty.Sprint(cliui.DefaultStyles.Warn, msg)
|
||||
_, _ = fmt.Fprintln(inv.Stderr, warning)
|
||||
}
|
||||
if buildinfo.VersionsMatch(clientVersion, serverVersion) {
|
||||
return
|
||||
}
|
||||
|
||||
upgradeMessage := defaultUpgradeMessage(semver.Canonical(serverVersion))
|
||||
if serverInfo, err := getBuildInfo(inv.Context()); err == nil {
|
||||
switch {
|
||||
|
||||
@@ -91,7 +91,7 @@ func Test_formatExamples(t *testing.T) {
|
||||
}
|
||||
}
|
||||
|
||||
func Test_wrapTransportWithVersionCheck(t *testing.T) {
|
||||
func Test_wrapTransportWithVersionMismatchCheck(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
t.Run("NoOutput", func(t *testing.T) {
|
||||
@@ -102,7 +102,7 @@ func Test_wrapTransportWithVersionCheck(t *testing.T) {
|
||||
var buf bytes.Buffer
|
||||
inv := cmd.Invoke()
|
||||
inv.Stderr = &buf
|
||||
rt := wrapTransportWithVersionCheck(roundTripper(func(req *http.Request) (*http.Response, error) {
|
||||
rt := wrapTransportWithVersionMismatchCheck(roundTripper(func(req *http.Request) (*http.Response, error) {
|
||||
return &http.Response{
|
||||
StatusCode: http.StatusOK,
|
||||
Header: http.Header{
|
||||
@@ -131,7 +131,7 @@ func Test_wrapTransportWithVersionCheck(t *testing.T) {
|
||||
inv := cmd.Invoke()
|
||||
inv.Stderr = &buf
|
||||
expectedUpgradeMessage := "My custom upgrade message"
|
||||
rt := wrapTransportWithVersionCheck(roundTripper(func(req *http.Request) (*http.Response, error) {
|
||||
rt := wrapTransportWithVersionMismatchCheck(roundTripper(func(req *http.Request) (*http.Response, error) {
|
||||
return &http.Response{
|
||||
StatusCode: http.StatusOK,
|
||||
Header: http.Header{
|
||||
@@ -159,53 +159,6 @@ func Test_wrapTransportWithVersionCheck(t *testing.T) {
|
||||
expectedOutput := fmt.Sprintln(pretty.Sprint(cliui.DefaultStyles.Warn, fmtOutput))
|
||||
require.Equal(t, expectedOutput, buf.String())
|
||||
})
|
||||
|
||||
t.Run("ServerStableVersion", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
r := &RootCmd{}
|
||||
cmd, err := r.Command(nil)
|
||||
require.NoError(t, err)
|
||||
var buf bytes.Buffer
|
||||
inv := cmd.Invoke()
|
||||
inv.Stderr = &buf
|
||||
rt := wrapTransportWithVersionCheck(roundTripper(func(req *http.Request) (*http.Response, error) {
|
||||
return &http.Response{
|
||||
StatusCode: http.StatusOK,
|
||||
Header: http.Header{
|
||||
codersdk.BuildVersionHeader: []string{"v2.31.0"},
|
||||
},
|
||||
Body: io.NopCloser(nil),
|
||||
}, nil
|
||||
}), inv, "v2.31.0", nil)
|
||||
req := httptest.NewRequest(http.MethodGet, "http://example.com", nil)
|
||||
res, err := rt.RoundTrip(req)
|
||||
require.NoError(t, err)
|
||||
defer res.Body.Close()
|
||||
require.Empty(t, buf.String())
|
||||
})
|
||||
}
|
||||
|
||||
func Test_serverVersionMessage(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
cases := []struct {
|
||||
name string
|
||||
version string
|
||||
expected string
|
||||
}{
|
||||
{"Stable", "v2.31.0", ""},
|
||||
{"Dev", "v0.0.0-devel+abc123", "the server is running a development version of Coder (v0.0.0-devel+abc123)"},
|
||||
{"RC", "v2.31.0-rc.1", "the server is running a release candidate of Coder (v2.31.0-rc.1)"},
|
||||
{"RCDevel", "v2.33.0-rc.1-devel+727ec00f7", "the server is running a release candidate of Coder (v2.33.0-rc.1-devel+727ec00f7)"},
|
||||
{"Empty", "", ""},
|
||||
}
|
||||
|
||||
for _, c := range cases {
|
||||
t.Run(c.name, func(t *testing.T) {
|
||||
t.Parallel()
|
||||
require.Equal(t, c.expected, serverVersionMessage(c.version))
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func Test_wrapTransportWithTelemetryHeader(t *testing.T) {
|
||||
|
||||
+117
@@ -56,6 +56,7 @@ import (
|
||||
|
||||
"cdr.dev/slog/v3"
|
||||
"cdr.dev/slog/v3/sloggers/sloghuman"
|
||||
"github.com/coder/aibridge"
|
||||
"github.com/coder/coder/v2/buildinfo"
|
||||
"github.com/coder/coder/v2/cli/clilog"
|
||||
"github.com/coder/coder/v2/cli/cliui"
|
||||
@@ -842,6 +843,12 @@ func (r *RootCmd) Server(newAPI func(context.Context, *coderd.Options) (*coderd.
|
||||
)
|
||||
}
|
||||
|
||||
aibridgeProviders, err := ReadAIBridgeProvidersFromEnv(logger, os.Environ())
|
||||
if err != nil {
|
||||
return xerrors.Errorf("read aibridge providers from env: %w", err)
|
||||
}
|
||||
vals.AI.BridgeConfig.Providers = append(vals.AI.BridgeConfig.Providers, aibridgeProviders...)
|
||||
|
||||
// Manage push notifications.
|
||||
experiments := coderd.ReadExperiments(options.Logger, options.DeploymentValues.Experiments.Value())
|
||||
if experiments.Enabled(codersdk.ExperimentWebPush) || buildinfo.IsDev() {
|
||||
@@ -2914,6 +2921,116 @@ func parseExternalAuthProvidersFromEnv(prefix string, environ []string) ([]coder
|
||||
return providers, nil
|
||||
}
|
||||
|
||||
// ReadAIBridgeProvidersFromEnv parses CODER_AIBRIDGE_PROVIDER_<N>_<KEY>
|
||||
// environment variables into a slice of AIBridgeProviderConfig.
|
||||
// This follows the same indexed pattern as ReadExternalAuthProvidersFromEnv.
|
||||
func ReadAIBridgeProvidersFromEnv(logger slog.Logger, environ []string) ([]codersdk.AIBridgeProviderConfig, error) {
|
||||
parsed := serpent.ParseEnviron(environ, "CODER_AIBRIDGE_PROVIDER_")
|
||||
|
||||
// Sort by numeric index so that PROVIDER_2 comes before PROVIDER_10.
|
||||
slices.SortFunc(parsed, func(a, b serpent.EnvVar) int {
|
||||
aIdx, _ := strconv.Atoi(strings.SplitN(a.Name, "_", 2)[0])
|
||||
bIdx, _ := strconv.Atoi(strings.SplitN(b.Name, "_", 2)[0])
|
||||
if aIdx != bIdx {
|
||||
return aIdx - bIdx
|
||||
}
|
||||
return strings.Compare(a.Name, b.Name)
|
||||
})
|
||||
|
||||
var providers []codersdk.AIBridgeProviderConfig
|
||||
for _, v := range parsed {
|
||||
tokens := strings.SplitN(v.Name, "_", 2)
|
||||
if len(tokens) != 2 {
|
||||
return nil, xerrors.Errorf("invalid env var: %s", v.Name)
|
||||
}
|
||||
|
||||
providerNum, err := strconv.Atoi(tokens[0])
|
||||
if err != nil {
|
||||
return nil, xerrors.Errorf("parse number: %s", v.Name)
|
||||
}
|
||||
|
||||
var provider codersdk.AIBridgeProviderConfig
|
||||
switch {
|
||||
case len(providers) < providerNum:
|
||||
return nil, xerrors.Errorf(
|
||||
"provider num %v skipped: %s",
|
||||
len(providers),
|
||||
v.Name,
|
||||
)
|
||||
case len(providers) == providerNum: // First observation of this index, create a new provider.
|
||||
providers = append(providers, provider)
|
||||
case len(providers) == providerNum+1: // Provider already exists at this index, update it.
|
||||
provider = providers[providerNum]
|
||||
}
|
||||
|
||||
key := tokens[1]
|
||||
switch key {
|
||||
case "TYPE":
|
||||
provider.Type = v.Value
|
||||
case "NAME":
|
||||
provider.Name = v.Value
|
||||
case "KEY":
|
||||
provider.Key = v.Value
|
||||
case "BASE_URL":
|
||||
provider.BaseURL = v.Value
|
||||
case "BEDROCK_BASE_URL":
|
||||
provider.BedrockBaseURL = v.Value
|
||||
case "BEDROCK_REGION":
|
||||
provider.BedrockRegion = v.Value
|
||||
case "BEDROCK_ACCESS_KEY":
|
||||
provider.BedrockAccessKey = v.Value
|
||||
case "BEDROCK_ACCESS_KEY_SECRET":
|
||||
provider.BedrockAccessKeySecret = v.Value
|
||||
case "BEDROCK_MODEL":
|
||||
provider.BedrockModel = v.Value
|
||||
case "BEDROCK_SMALL_FAST_MODEL":
|
||||
provider.BedrockSmallFastModel = v.Value
|
||||
default:
|
||||
logger.Warn(context.Background(), "ignoring unknown aibridge provider field (check for typos)",
|
||||
slog.F("env", fmt.Sprintf("CODER_AIBRIDGE_PROVIDER_%d_%s", providerNum, key)),
|
||||
)
|
||||
}
|
||||
providers[providerNum] = provider
|
||||
}
|
||||
|
||||
// Post-parse validation.
|
||||
names := make(map[string]int, len(providers))
|
||||
for i := range providers {
|
||||
p := &providers[i]
|
||||
if p.Type == "" {
|
||||
return nil, xerrors.Errorf("provider %d: TYPE is required", i)
|
||||
}
|
||||
|
||||
switch p.Type {
|
||||
case aibridge.ProviderOpenAI, aibridge.ProviderAnthropic, aibridge.ProviderCopilot:
|
||||
default:
|
||||
return nil, xerrors.Errorf("provider %d: unknown TYPE %q (must be %s, %s, or %s)",
|
||||
i, p.Type, aibridge.ProviderOpenAI, aibridge.ProviderAnthropic, aibridge.ProviderCopilot)
|
||||
}
|
||||
|
||||
if p.Type != aibridge.ProviderAnthropic && hasBedrockFields(*p) {
|
||||
return nil, xerrors.Errorf("provider %d (%s): BEDROCK_* fields are only supported with TYPE %q",
|
||||
i, p.Type, aibridge.ProviderAnthropic)
|
||||
}
|
||||
|
||||
if p.Name == "" {
|
||||
p.Name = p.Type
|
||||
}
|
||||
if other, exists := names[p.Name]; exists {
|
||||
return nil, xerrors.Errorf("providers %d and %d have duplicate NAME %q (multiple providers of the same type require unique NAME values)", other, i, p.Name)
|
||||
}
|
||||
names[p.Name] = i
|
||||
}
|
||||
|
||||
return providers, nil
|
||||
}
|
||||
|
||||
func hasBedrockFields(p codersdk.AIBridgeProviderConfig) bool {
|
||||
return p.BedrockBaseURL != "" || p.BedrockRegion != "" ||
|
||||
p.BedrockAccessKey != "" || p.BedrockAccessKeySecret != "" ||
|
||||
p.BedrockModel != "" || p.BedrockSmallFastModel != ""
|
||||
}
|
||||
|
||||
var reInvalidPortAfterHost = regexp.MustCompile(`invalid port ".+" after host`)
|
||||
|
||||
// If the user provides a postgres URL with a password that contains special
|
||||
|
||||
@@ -0,0 +1,249 @@
|
||||
package cli
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
|
||||
"cdr.dev/slog/v3"
|
||||
"cdr.dev/slog/v3/sloggers/slogtest"
|
||||
"github.com/coder/aibridge"
|
||||
"github.com/coder/coder/v2/testutil"
|
||||
)
|
||||
|
||||
func TestReadAIBridgeProvidersFromEnv(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
t.Run("Empty", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
providers, err := ReadAIBridgeProvidersFromEnv(slogtest.Make(t, nil), []string{
|
||||
"HOME=/home/frodo",
|
||||
})
|
||||
require.NoError(t, err)
|
||||
require.Empty(t, providers)
|
||||
})
|
||||
|
||||
t.Run("SingleProvider", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
providers, err := ReadAIBridgeProvidersFromEnv(slogtest.Make(t, nil), []string{
|
||||
"CODER_AIBRIDGE_PROVIDER_0_TYPE=anthropic",
|
||||
"CODER_AIBRIDGE_PROVIDER_0_NAME=anthropic-zdr",
|
||||
"CODER_AIBRIDGE_PROVIDER_0_KEY=sk-ant-xxx",
|
||||
"CODER_AIBRIDGE_PROVIDER_0_BASE_URL=https://api.anthropic.com/",
|
||||
})
|
||||
require.NoError(t, err)
|
||||
require.Len(t, providers, 1)
|
||||
assert.Equal(t, aibridge.ProviderAnthropic, providers[0].Type)
|
||||
assert.Equal(t, "anthropic-zdr", providers[0].Name)
|
||||
assert.Equal(t, "sk-ant-xxx", providers[0].Key)
|
||||
assert.Equal(t, "https://api.anthropic.com/", providers[0].BaseURL)
|
||||
})
|
||||
|
||||
t.Run("MultipleProvidersSameType", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
providers, err := ReadAIBridgeProvidersFromEnv(slogtest.Make(t, nil), []string{
|
||||
"CODER_AIBRIDGE_PROVIDER_0_TYPE=anthropic",
|
||||
"CODER_AIBRIDGE_PROVIDER_0_NAME=anthropic-us",
|
||||
"CODER_AIBRIDGE_PROVIDER_1_TYPE=anthropic",
|
||||
"CODER_AIBRIDGE_PROVIDER_1_NAME=anthropic-eu",
|
||||
"CODER_AIBRIDGE_PROVIDER_1_BASE_URL=https://eu.api.anthropic.com/",
|
||||
})
|
||||
require.NoError(t, err)
|
||||
require.Len(t, providers, 2)
|
||||
assert.Equal(t, "anthropic-us", providers[0].Name)
|
||||
assert.Equal(t, "anthropic-eu", providers[1].Name)
|
||||
assert.Equal(t, "https://eu.api.anthropic.com/", providers[1].BaseURL)
|
||||
})
|
||||
|
||||
t.Run("DefaultName", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
providers, err := ReadAIBridgeProvidersFromEnv(slogtest.Make(t, nil), []string{
|
||||
"CODER_AIBRIDGE_PROVIDER_0_TYPE=openai",
|
||||
})
|
||||
require.NoError(t, err)
|
||||
require.Len(t, providers, 1)
|
||||
assert.Equal(t, aibridge.ProviderOpenAI, providers[0].Name)
|
||||
})
|
||||
|
||||
t.Run("MixedTypes", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
providers, err := ReadAIBridgeProvidersFromEnv(slogtest.Make(t, nil), []string{
|
||||
"CODER_AIBRIDGE_PROVIDER_0_TYPE=anthropic",
|
||||
"CODER_AIBRIDGE_PROVIDER_0_NAME=anthropic-main",
|
||||
"CODER_AIBRIDGE_PROVIDER_1_TYPE=openai",
|
||||
"CODER_AIBRIDGE_PROVIDER_2_TYPE=copilot",
|
||||
"CODER_AIBRIDGE_PROVIDER_2_NAME=copilot-custom",
|
||||
"CODER_AIBRIDGE_PROVIDER_2_BASE_URL=https://custom.copilot.com",
|
||||
})
|
||||
require.NoError(t, err)
|
||||
require.Len(t, providers, 3)
|
||||
assert.Equal(t, aibridge.ProviderAnthropic, providers[0].Type)
|
||||
assert.Equal(t, aibridge.ProviderOpenAI, providers[1].Type)
|
||||
assert.Equal(t, aibridge.ProviderCopilot, providers[2].Type)
|
||||
assert.Equal(t, "copilot-custom", providers[2].Name)
|
||||
})
|
||||
|
||||
t.Run("BedrockFields", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
providers, err := ReadAIBridgeProvidersFromEnv(slogtest.Make(t, nil), []string{
|
||||
"CODER_AIBRIDGE_PROVIDER_0_TYPE=anthropic",
|
||||
"CODER_AIBRIDGE_PROVIDER_0_NAME=anthropic-bedrock",
|
||||
"CODER_AIBRIDGE_PROVIDER_0_BEDROCK_REGION=us-west-2",
|
||||
"CODER_AIBRIDGE_PROVIDER_0_BEDROCK_ACCESS_KEY=AKID",
|
||||
"CODER_AIBRIDGE_PROVIDER_0_BEDROCK_ACCESS_KEY_SECRET=secret",
|
||||
"CODER_AIBRIDGE_PROVIDER_0_BEDROCK_MODEL=anthropic.claude-3-sonnet",
|
||||
"CODER_AIBRIDGE_PROVIDER_0_BEDROCK_SMALL_FAST_MODEL=anthropic.claude-3-haiku",
|
||||
"CODER_AIBRIDGE_PROVIDER_0_BEDROCK_BASE_URL=https://bedrock.us-west-2.amazonaws.com",
|
||||
})
|
||||
require.NoError(t, err)
|
||||
require.Len(t, providers, 1)
|
||||
assert.Equal(t, "us-west-2", providers[0].BedrockRegion)
|
||||
assert.Equal(t, "AKID", providers[0].BedrockAccessKey)
|
||||
assert.Equal(t, "secret", providers[0].BedrockAccessKeySecret)
|
||||
assert.Equal(t, "anthropic.claude-3-sonnet", providers[0].BedrockModel)
|
||||
assert.Equal(t, "anthropic.claude-3-haiku", providers[0].BedrockSmallFastModel)
|
||||
assert.Equal(t, "https://bedrock.us-west-2.amazonaws.com", providers[0].BedrockBaseURL)
|
||||
})
|
||||
|
||||
t.Run("OutOfOrderIndices", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
providers, err := ReadAIBridgeProvidersFromEnv(slogtest.Make(t, nil), []string{
|
||||
"CODER_AIBRIDGE_PROVIDER_1_TYPE=anthropic",
|
||||
"CODER_AIBRIDGE_PROVIDER_1_NAME=second",
|
||||
"CODER_AIBRIDGE_PROVIDER_0_TYPE=openai",
|
||||
"CODER_AIBRIDGE_PROVIDER_0_NAME=first",
|
||||
})
|
||||
require.NoError(t, err)
|
||||
require.Len(t, providers, 2)
|
||||
assert.Equal(t, "first", providers[0].Name)
|
||||
assert.Equal(t, aibridge.ProviderOpenAI, providers[0].Type)
|
||||
assert.Equal(t, "second", providers[1].Name)
|
||||
assert.Equal(t, aibridge.ProviderAnthropic, providers[1].Type)
|
||||
})
|
||||
|
||||
t.Run("MultiDigitIndices", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
// Indices 0, 1, 2, ..., 10 — verifies that 10 sorts after 2,
|
||||
// not between 1 and 2 as a lexicographic sort would do.
|
||||
env := []string{}
|
||||
for i := range 11 {
|
||||
env = append(env,
|
||||
fmt.Sprintf("CODER_AIBRIDGE_PROVIDER_%d_TYPE=openai", i),
|
||||
fmt.Sprintf("CODER_AIBRIDGE_PROVIDER_%d_KEY=sk-%d", i, i),
|
||||
fmt.Sprintf("CODER_AIBRIDGE_PROVIDER_%d_NAME=p%d", i, i),
|
||||
)
|
||||
}
|
||||
providers, err := ReadAIBridgeProvidersFromEnv(slogtest.Make(t, nil), env)
|
||||
require.NoError(t, err)
|
||||
require.Len(t, providers, 11)
|
||||
for i, p := range providers {
|
||||
assert.Equal(t, fmt.Sprintf("p%d", i), p.Name, "provider at index %d", i)
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("SkippedIndex", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
_, err := ReadAIBridgeProvidersFromEnv(slogtest.Make(t, nil), []string{
|
||||
"CODER_AIBRIDGE_PROVIDER_0_TYPE=openai",
|
||||
"CODER_AIBRIDGE_PROVIDER_2_TYPE=anthropic",
|
||||
})
|
||||
require.Error(t, err)
|
||||
assert.Contains(t, err.Error(), "skipped")
|
||||
})
|
||||
|
||||
t.Run("InvalidKey", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
_, err := ReadAIBridgeProvidersFromEnv(slogtest.Make(t, nil), []string{
|
||||
"CODER_AIBRIDGE_PROVIDER_XXX_TYPE=openai",
|
||||
})
|
||||
require.Error(t, err)
|
||||
assert.Contains(t, err.Error(), "parse number")
|
||||
})
|
||||
|
||||
t.Run("MissingType", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
_, err := ReadAIBridgeProvidersFromEnv(slogtest.Make(t, nil), []string{
|
||||
"CODER_AIBRIDGE_PROVIDER_0_NAME=my-provider",
|
||||
"CODER_AIBRIDGE_PROVIDER_0_KEY=sk-xxx",
|
||||
})
|
||||
require.Error(t, err)
|
||||
assert.Contains(t, err.Error(), "TYPE is required")
|
||||
})
|
||||
|
||||
t.Run("InvalidType", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
_, err := ReadAIBridgeProvidersFromEnv(slogtest.Make(t, nil), []string{
|
||||
"CODER_AIBRIDGE_PROVIDER_0_TYPE=gemini",
|
||||
})
|
||||
require.Error(t, err)
|
||||
assert.Contains(t, err.Error(), "unknown TYPE")
|
||||
})
|
||||
|
||||
t.Run("DuplicateExplicitNames", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
_, err := ReadAIBridgeProvidersFromEnv(slogtest.Make(t, nil), []string{
|
||||
"CODER_AIBRIDGE_PROVIDER_0_TYPE=anthropic",
|
||||
"CODER_AIBRIDGE_PROVIDER_0_NAME=my-provider",
|
||||
"CODER_AIBRIDGE_PROVIDER_1_TYPE=openai",
|
||||
"CODER_AIBRIDGE_PROVIDER_1_NAME=my-provider",
|
||||
})
|
||||
require.Error(t, err)
|
||||
assert.Contains(t, err.Error(), "duplicate NAME")
|
||||
})
|
||||
|
||||
t.Run("DuplicateDefaultNames", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
_, err := ReadAIBridgeProvidersFromEnv(slogtest.Make(t, nil), []string{
|
||||
"CODER_AIBRIDGE_PROVIDER_0_TYPE=anthropic",
|
||||
"CODER_AIBRIDGE_PROVIDER_1_TYPE=anthropic",
|
||||
})
|
||||
require.Error(t, err)
|
||||
assert.Contains(t, err.Error(), "duplicate NAME")
|
||||
})
|
||||
|
||||
t.Run("BedrockFieldsOnNonAnthropic", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
_, err := ReadAIBridgeProvidersFromEnv(slogtest.Make(t, nil), []string{
|
||||
"CODER_AIBRIDGE_PROVIDER_0_TYPE=openai",
|
||||
"CODER_AIBRIDGE_PROVIDER_0_BEDROCK_REGION=us-west-2",
|
||||
})
|
||||
require.Error(t, err)
|
||||
assert.Contains(t, err.Error(), "BEDROCK_* fields are only supported with TYPE")
|
||||
})
|
||||
|
||||
t.Run("IgnoresUnrelatedEnvVars", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
providers, err := ReadAIBridgeProvidersFromEnv(slogtest.Make(t, nil), []string{
|
||||
"CODER_AIBRIDGE_OPENAI_KEY=should-be-ignored",
|
||||
"CODER_AIBRIDGE_ANTHROPIC_KEY=also-ignored",
|
||||
"CODER_AIBRIDGE_PROVIDER_0_TYPE=openai",
|
||||
"CODER_AIBRIDGE_PROVIDER_0_KEY=sk-xxx",
|
||||
"SOME_OTHER_VAR=hello",
|
||||
})
|
||||
require.NoError(t, err)
|
||||
require.Len(t, providers, 1)
|
||||
assert.Equal(t, "sk-xxx", providers[0].Key)
|
||||
})
|
||||
|
||||
t.Run("UnknownFieldWarnsButSucceeds", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
// A typo like TPYE instead of TYPE should not prevent startup;
|
||||
// the function logs a warning and continues.
|
||||
sink := testutil.NewFakeSink(t)
|
||||
providers, err := ReadAIBridgeProvidersFromEnv(sink.Logger(), []string{
|
||||
"CODER_AIBRIDGE_PROVIDER_0_TYPE=openai",
|
||||
"CODER_AIBRIDGE_PROVIDER_0_TPYE=openai",
|
||||
})
|
||||
require.NoError(t, err)
|
||||
require.Len(t, providers, 1)
|
||||
|
||||
warnings := sink.Entries(func(e slog.SinkEntry) bool {
|
||||
return e.Message == "ignoring unknown aibridge provider field (check for typos)"
|
||||
})
|
||||
require.Len(t, warnings, 1)
|
||||
require.Len(t, warnings[0].Fields, 1)
|
||||
assert.Equal(t, "CODER_AIBRIDGE_PROVIDER_0_TPYE", warnings[0].Fields[0].Value)
|
||||
})
|
||||
}
|
||||
+4
-6
@@ -69,17 +69,15 @@ var (
|
||||
// isRetryableError checks for transient connection errors worth
|
||||
// retrying: DNS failures, connection refused, and server 5xx.
|
||||
func isRetryableError(err error) bool {
|
||||
if err == nil || xerrors.Is(err, context.Canceled) {
|
||||
if err == nil {
|
||||
return false
|
||||
}
|
||||
if xerrors.Is(err, context.Canceled) || xerrors.Is(err, context.DeadlineExceeded) {
|
||||
return false
|
||||
}
|
||||
// Check connection errors before context.DeadlineExceeded because
|
||||
// net.Dialer.Timeout produces *net.OpError that matches both.
|
||||
if codersdk.IsConnectionError(err) {
|
||||
return true
|
||||
}
|
||||
if xerrors.Is(err, context.DeadlineExceeded) {
|
||||
return false
|
||||
}
|
||||
var sdkErr *codersdk.Error
|
||||
if xerrors.As(err, &sdkErr) {
|
||||
return sdkErr.StatusCode() >= 500
|
||||
|
||||
@@ -516,23 +516,6 @@ func TestIsRetryableError(t *testing.T) {
|
||||
assert.Equal(t, tt.retryable, isRetryableError(tt.err))
|
||||
})
|
||||
}
|
||||
|
||||
// net.Dialer.Timeout produces *net.OpError that matches both
|
||||
// IsConnectionError and context.DeadlineExceeded. Verify it is retryable.
|
||||
t.Run("DialTimeout", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
ctx, cancel := context.WithDeadline(context.Background(), time.Now())
|
||||
defer cancel()
|
||||
<-ctx.Done() // ensure deadline has fired
|
||||
_, err := (&net.Dialer{}).DialContext(ctx, "tcp", "127.0.0.1:1")
|
||||
require.Error(t, err)
|
||||
// Proves the ambiguity: this error matches BOTH checks.
|
||||
require.ErrorIs(t, err, context.DeadlineExceeded)
|
||||
require.ErrorAs(t, err, new(*net.OpError))
|
||||
assert.True(t, isRetryableError(err))
|
||||
// Also when wrapped, as runCoderConnectStdio does.
|
||||
assert.True(t, isRetryableError(xerrors.Errorf("dial coder connect: %w", err)))
|
||||
})
|
||||
}
|
||||
|
||||
func TestRetryWithInterval(t *testing.T) {
|
||||
|
||||
+1
-1
@@ -11,7 +11,7 @@ OPTIONS:
|
||||
-O, --org string, $CODER_ORGANIZATION
|
||||
Select which organization (uuid or name) to use.
|
||||
|
||||
-c, --column [id|created at|started at|completed at|canceled at|error|error code|status|worker id|worker name|file id|tags|queue position|queue size|organization id|initiator id|template version id|workspace build id|type|available workers|template version name|template id|template name|template display name|template icon|workspace id|workspace name|workspace build transition|logs overflowed|organization|queue] (default: created at,id,type,template display name,status,queue,tags)
|
||||
-c, --column [id|created at|started at|completed at|canceled at|error|error code|status|worker id|worker name|file id|tags|queue position|queue size|organization id|initiator id|template version id|workspace build id|type|available workers|template version name|template id|template name|template display name|template icon|workspace id|workspace name|logs overflowed|organization|queue] (default: created at,id,type,template display name,status,queue,tags)
|
||||
Columns to display in table output.
|
||||
|
||||
-i, --initiator string, $CODER_PROVISIONER_JOB_LIST_INITIATOR
|
||||
|
||||
@@ -58,8 +58,7 @@
|
||||
"template_display_name": "",
|
||||
"template_icon": "",
|
||||
"workspace_id": "===========[workspace ID]===========",
|
||||
"workspace_name": "test-workspace",
|
||||
"workspace_build_transition": "start"
|
||||
"workspace_name": "test-workspace"
|
||||
},
|
||||
"logs_overflowed": false,
|
||||
"organization_name": "Coder"
|
||||
|
||||
-7
@@ -211,13 +211,6 @@ AI BRIDGE PROXY OPTIONS:
|
||||
certificates not trusted by the system. If not provided, the system
|
||||
certificate pool is used.
|
||||
|
||||
CHAT OPTIONS:
|
||||
Configure the background chat processing daemon.
|
||||
|
||||
--chat-debug-logging-enabled bool, $CODER_CHAT_DEBUG_LOGGING_ENABLED (default: false)
|
||||
Force chat debug logging on for every chat, bypassing the runtime
|
||||
admin and user opt-in settings.
|
||||
|
||||
CLIENT OPTIONS:
|
||||
These options change the behavior of how clients interact with the Coder.
|
||||
Clients include the Coder CLI, Coder Desktop, IDE extensions, and the web UI.
|
||||
|
||||
+4
-19
@@ -757,10 +757,6 @@ chat:
|
||||
# How many pending chats a worker should acquire per polling cycle.
|
||||
# (default: 10, type: int)
|
||||
acquireBatchSize: 10
|
||||
# Force chat debug logging on for every chat, bypassing the runtime admin and user
|
||||
# opt-in settings.
|
||||
# (default: false, type: bool)
|
||||
debugLoggingEnabled: false
|
||||
aibridge:
|
||||
# Whether to start an in-memory aibridged instance.
|
||||
# (default: false, type: bool)
|
||||
@@ -858,21 +854,10 @@ aibridgeproxy:
|
||||
# clients.
|
||||
# (default: <unset>, type: string)
|
||||
key_file: ""
|
||||
# Comma-separated list of AI provider domains for which HTTPS traffic will be
|
||||
# decrypted and routed through AI Bridge. Requests to other domains will be
|
||||
# tunneled directly without decryption. Supported domains: api.anthropic.com,
|
||||
# api.openai.com, api.individual.githubcopilot.com,
|
||||
# api.business.githubcopilot.com, api.enterprise.githubcopilot.com, chatgpt.com.
|
||||
# (default:
|
||||
# api.anthropic.com,api.openai.com,api.individual.githubcopilot.com,api.business.githubcopilot.com,api.enterprise.githubcopilot.com,chatgpt.com,
|
||||
# type: string-array)
|
||||
domain_allowlist:
|
||||
- api.anthropic.com
|
||||
- api.openai.com
|
||||
- api.individual.githubcopilot.com
|
||||
- api.business.githubcopilot.com
|
||||
- api.enterprise.githubcopilot.com
|
||||
- chatgpt.com
|
||||
# Deprecated: This value is now derived automatically from the configured AI
|
||||
# Bridge providers' base URLs. This option will be removed in a future release.
|
||||
# (default: <unset>, type: string-array)
|
||||
domain_allowlist: []
|
||||
# URL of an upstream HTTP proxy to chain tunneled (non-allowlisted) requests
|
||||
# through. Format: http://[user:pass@]host:port or https://[user:pass@]host:port.
|
||||
# (default: <unset>, type: string)
|
||||
|
||||
@@ -71,7 +71,7 @@ func (a *SubAgentAPI) CreateSubAgent(ctx context.Context, req *agentproto.Create
|
||||
// An ID is only given in the request when it is a terraform-defined devcontainer
|
||||
// that has attached resources. These subagents are pre-provisioned by terraform
|
||||
// (the agent record already exists), so we update configurable fields like
|
||||
// display_apps and directory rather than creating a new agent.
|
||||
// display_apps rather than creating a new agent.
|
||||
if req.Id != nil {
|
||||
id, err := uuid.FromBytes(req.Id)
|
||||
if err != nil {
|
||||
@@ -97,16 +97,6 @@ func (a *SubAgentAPI) CreateSubAgent(ctx context.Context, req *agentproto.Create
|
||||
return nil, xerrors.Errorf("update workspace agent display apps: %w", err)
|
||||
}
|
||||
|
||||
if req.Directory != "" {
|
||||
if err := a.Database.UpdateWorkspaceAgentDirectoryByID(ctx, database.UpdateWorkspaceAgentDirectoryByIDParams{
|
||||
ID: id,
|
||||
Directory: req.Directory,
|
||||
UpdatedAt: createdAt,
|
||||
}); err != nil {
|
||||
return nil, xerrors.Errorf("update workspace agent directory: %w", err)
|
||||
}
|
||||
}
|
||||
|
||||
return &agentproto.CreateSubAgentResponse{
|
||||
Agent: &agentproto.SubAgent{
|
||||
Name: subAgent.Name,
|
||||
|
||||
@@ -1267,11 +1267,11 @@ func TestSubAgentAPI(t *testing.T) {
|
||||
agentID, err := uuid.FromBytes(resp.Agent.Id)
|
||||
require.NoError(t, err)
|
||||
|
||||
// And: The database agent's name, architecture, and OS are unchanged.
|
||||
// And: The database agent's other fields are unchanged.
|
||||
updatedAgent, err := db.GetWorkspaceAgentByID(dbauthz.AsSystemRestricted(ctx), agentID)
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, baseChildAgent.Name, updatedAgent.Name)
|
||||
require.Equal(t, "/different/path", updatedAgent.Directory)
|
||||
require.Equal(t, baseChildAgent.Directory, updatedAgent.Directory)
|
||||
require.Equal(t, baseChildAgent.Architecture, updatedAgent.Architecture)
|
||||
require.Equal(t, baseChildAgent.OperatingSystem, updatedAgent.OperatingSystem)
|
||||
|
||||
@@ -1280,42 +1280,6 @@ func TestSubAgentAPI(t *testing.T) {
|
||||
require.Equal(t, database.DisplayAppWebTerminal, updatedAgent.DisplayApps[0])
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "OK_DirectoryUpdated",
|
||||
setup: func(t *testing.T, db database.Store, agent database.WorkspaceAgent) *proto.CreateSubAgentRequest {
|
||||
// Given: An existing child agent with a stale host-side
|
||||
// directory (as set by the provisioner at build time).
|
||||
childAgent := dbgen.WorkspaceAgent(t, db, database.WorkspaceAgent{
|
||||
ParentID: uuid.NullUUID{Valid: true, UUID: agent.ID},
|
||||
ResourceID: agent.ResourceID,
|
||||
Name: baseChildAgent.Name,
|
||||
Directory: "/home/coder/project",
|
||||
Architecture: baseChildAgent.Architecture,
|
||||
OperatingSystem: baseChildAgent.OperatingSystem,
|
||||
DisplayApps: baseChildAgent.DisplayApps,
|
||||
})
|
||||
|
||||
// When: Agent injection sends the correct
|
||||
// container-internal path.
|
||||
return &proto.CreateSubAgentRequest{
|
||||
Id: childAgent.ID[:],
|
||||
Directory: "/workspaces/project",
|
||||
DisplayApps: []proto.CreateSubAgentRequest_DisplayApp{
|
||||
proto.CreateSubAgentRequest_WEB_TERMINAL,
|
||||
},
|
||||
}
|
||||
},
|
||||
check: func(t *testing.T, ctx context.Context, db database.Store, resp *proto.CreateSubAgentResponse, agent database.WorkspaceAgent) {
|
||||
agentID, err := uuid.FromBytes(resp.Agent.Id)
|
||||
require.NoError(t, err)
|
||||
|
||||
// Then: Directory is updated to the container-internal
|
||||
// path.
|
||||
updatedAgent, err := db.GetWorkspaceAgentByID(dbauthz.AsSystemRestricted(ctx), agentID)
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, "/workspaces/project", updatedAgent.Directory)
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "Error/MalformedID",
|
||||
setup: func(t *testing.T, db database.Store, agent database.WorkspaceAgent) *proto.CreateSubAgentRequest {
|
||||
|
||||
Generated
+51
-287
@@ -9514,212 +9514,6 @@ const docTemplate = `{
|
||||
]
|
||||
}
|
||||
},
|
||||
"/users/{user}/secrets": {
|
||||
"get": {
|
||||
"produces": [
|
||||
"application/json"
|
||||
],
|
||||
"tags": [
|
||||
"Secrets"
|
||||
],
|
||||
"summary": "List user secrets",
|
||||
"operationId": "list-user-secrets",
|
||||
"parameters": [
|
||||
{
|
||||
"type": "string",
|
||||
"description": "User ID, username, or me",
|
||||
"name": "user",
|
||||
"in": "path",
|
||||
"required": true
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "OK",
|
||||
"schema": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"$ref": "#/definitions/codersdk.UserSecret"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"security": [
|
||||
{
|
||||
"CoderSessionToken": []
|
||||
}
|
||||
]
|
||||
},
|
||||
"post": {
|
||||
"consumes": [
|
||||
"application/json"
|
||||
],
|
||||
"produces": [
|
||||
"application/json"
|
||||
],
|
||||
"tags": [
|
||||
"Secrets"
|
||||
],
|
||||
"summary": "Create a new user secret",
|
||||
"operationId": "create-a-new-user-secret",
|
||||
"parameters": [
|
||||
{
|
||||
"type": "string",
|
||||
"description": "User ID, username, or me",
|
||||
"name": "user",
|
||||
"in": "path",
|
||||
"required": true
|
||||
},
|
||||
{
|
||||
"description": "Create secret request",
|
||||
"name": "request",
|
||||
"in": "body",
|
||||
"required": true,
|
||||
"schema": {
|
||||
"$ref": "#/definitions/codersdk.CreateUserSecretRequest"
|
||||
}
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
"201": {
|
||||
"description": "Created",
|
||||
"schema": {
|
||||
"$ref": "#/definitions/codersdk.UserSecret"
|
||||
}
|
||||
}
|
||||
},
|
||||
"security": [
|
||||
{
|
||||
"CoderSessionToken": []
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"/users/{user}/secrets/{name}": {
|
||||
"get": {
|
||||
"produces": [
|
||||
"application/json"
|
||||
],
|
||||
"tags": [
|
||||
"Secrets"
|
||||
],
|
||||
"summary": "Get a user secret by name",
|
||||
"operationId": "get-a-user-secret-by-name",
|
||||
"parameters": [
|
||||
{
|
||||
"type": "string",
|
||||
"description": "User ID, username, or me",
|
||||
"name": "user",
|
||||
"in": "path",
|
||||
"required": true
|
||||
},
|
||||
{
|
||||
"type": "string",
|
||||
"description": "Secret name",
|
||||
"name": "name",
|
||||
"in": "path",
|
||||
"required": true
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "OK",
|
||||
"schema": {
|
||||
"$ref": "#/definitions/codersdk.UserSecret"
|
||||
}
|
||||
}
|
||||
},
|
||||
"security": [
|
||||
{
|
||||
"CoderSessionToken": []
|
||||
}
|
||||
]
|
||||
},
|
||||
"delete": {
|
||||
"tags": [
|
||||
"Secrets"
|
||||
],
|
||||
"summary": "Delete a user secret",
|
||||
"operationId": "delete-a-user-secret",
|
||||
"parameters": [
|
||||
{
|
||||
"type": "string",
|
||||
"description": "User ID, username, or me",
|
||||
"name": "user",
|
||||
"in": "path",
|
||||
"required": true
|
||||
},
|
||||
{
|
||||
"type": "string",
|
||||
"description": "Secret name",
|
||||
"name": "name",
|
||||
"in": "path",
|
||||
"required": true
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
"204": {
|
||||
"description": "No Content"
|
||||
}
|
||||
},
|
||||
"security": [
|
||||
{
|
||||
"CoderSessionToken": []
|
||||
}
|
||||
]
|
||||
},
|
||||
"patch": {
|
||||
"consumes": [
|
||||
"application/json"
|
||||
],
|
||||
"produces": [
|
||||
"application/json"
|
||||
],
|
||||
"tags": [
|
||||
"Secrets"
|
||||
],
|
||||
"summary": "Update a user secret",
|
||||
"operationId": "update-a-user-secret",
|
||||
"parameters": [
|
||||
{
|
||||
"type": "string",
|
||||
"description": "User ID, username, or me",
|
||||
"name": "user",
|
||||
"in": "path",
|
||||
"required": true
|
||||
},
|
||||
{
|
||||
"type": "string",
|
||||
"description": "Secret name",
|
||||
"name": "name",
|
||||
"in": "path",
|
||||
"required": true
|
||||
},
|
||||
{
|
||||
"description": "Update secret request",
|
||||
"name": "request",
|
||||
"in": "body",
|
||||
"required": true,
|
||||
"schema": {
|
||||
"$ref": "#/definitions/codersdk.UpdateUserSecretRequest"
|
||||
}
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "OK",
|
||||
"schema": {
|
||||
"$ref": "#/definitions/codersdk.UserSecret"
|
||||
}
|
||||
}
|
||||
},
|
||||
"security": [
|
||||
{
|
||||
"CoderSessionToken": []
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"/users/{user}/status/activate": {
|
||||
"put": {
|
||||
"produces": [
|
||||
@@ -13103,10 +12897,20 @@ const docTemplate = `{
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"anthropic": {
|
||||
"$ref": "#/definitions/codersdk.AIBridgeAnthropicConfig"
|
||||
"description": "Deprecated: Use Providers with indexed CODER_AIBRIDGE_PROVIDER_\u003cN\u003e_* env vars instead.",
|
||||
"allOf": [
|
||||
{
|
||||
"$ref": "#/definitions/codersdk.AIBridgeAnthropicConfig"
|
||||
}
|
||||
]
|
||||
},
|
||||
"bedrock": {
|
||||
"$ref": "#/definitions/codersdk.AIBridgeBedrockConfig"
|
||||
"description": "Deprecated: Use Providers with indexed CODER_AIBRIDGE_PROVIDER_\u003cN\u003e_* env vars instead.",
|
||||
"allOf": [
|
||||
{
|
||||
"$ref": "#/definitions/codersdk.AIBridgeBedrockConfig"
|
||||
}
|
||||
]
|
||||
},
|
||||
"circuit_breaker_enabled": {
|
||||
"description": "Circuit breaker protects against cascading failures from upstream AI\nprovider rate limits (429, 503, 529 overloaded).",
|
||||
@@ -13135,7 +12939,19 @@ const docTemplate = `{
|
||||
"type": "integer"
|
||||
},
|
||||
"openai": {
|
||||
"$ref": "#/definitions/codersdk.AIBridgeOpenAIConfig"
|
||||
"description": "Deprecated: Use Providers with indexed CODER_AIBRIDGE_PROVIDER_\u003cN\u003e_* env vars instead.",
|
||||
"allOf": [
|
||||
{
|
||||
"$ref": "#/definitions/codersdk.AIBridgeOpenAIConfig"
|
||||
}
|
||||
]
|
||||
},
|
||||
"providers": {
|
||||
"description": "Providers holds provider instances populated from CODER_AIBRIDGE_PROVIDER_\u003cN\u003e_\u003cKEY\u003e\nenv vars and/or the deprecated LegacyOpenAI/LegacyAnthropic/LegacyBedrock fields above.",
|
||||
"type": "array",
|
||||
"items": {
|
||||
"$ref": "#/definitions/codersdk.AIBridgeProviderConfig"
|
||||
}
|
||||
},
|
||||
"rate_limit": {
|
||||
"type": "integer"
|
||||
@@ -13255,6 +13071,32 @@ const docTemplate = `{
|
||||
}
|
||||
}
|
||||
},
|
||||
"codersdk.AIBridgeProviderConfig": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"base_url": {
|
||||
"description": "BaseURL is the base URL of the upstream provider API.",
|
||||
"type": "string"
|
||||
},
|
||||
"bedrock_model": {
|
||||
"type": "string"
|
||||
},
|
||||
"bedrock_region": {
|
||||
"type": "string"
|
||||
},
|
||||
"bedrock_small_fast_model": {
|
||||
"type": "string"
|
||||
},
|
||||
"name": {
|
||||
"description": "Name is the unique instance identifier used for routing.\nDefaults to Type if not provided.",
|
||||
"type": "string"
|
||||
},
|
||||
"type": {
|
||||
"description": "Type is the provider type: \"openai\", \"anthropic\", or \"copilot\".",
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
},
|
||||
"codersdk.AIBridgeProxyConfig": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
@@ -13445,12 +13287,6 @@ const docTemplate = `{
|
||||
"$ref": "#/definitions/codersdk.AIBridgeAgenticAction"
|
||||
}
|
||||
},
|
||||
"credential_hint": {
|
||||
"type": "string"
|
||||
},
|
||||
"credential_kind": {
|
||||
"type": "string"
|
||||
},
|
||||
"ended_at": {
|
||||
"type": "string",
|
||||
"format": "date-time"
|
||||
@@ -14691,9 +14527,6 @@ const docTemplate = `{
|
||||
"properties": {
|
||||
"acquire_batch_size": {
|
||||
"type": "integer"
|
||||
},
|
||||
"debug_logging_enabled": {
|
||||
"type": "boolean"
|
||||
}
|
||||
}
|
||||
},
|
||||
@@ -15357,26 +15190,6 @@ const docTemplate = `{
|
||||
}
|
||||
}
|
||||
},
|
||||
"codersdk.CreateUserSecretRequest": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"description": {
|
||||
"type": "string"
|
||||
},
|
||||
"env_name": {
|
||||
"type": "string"
|
||||
},
|
||||
"file_path": {
|
||||
"type": "string"
|
||||
},
|
||||
"name": {
|
||||
"type": "string"
|
||||
},
|
||||
"value": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
},
|
||||
"codersdk.CreateWorkspaceBuildReason": {
|
||||
"type": "string",
|
||||
"enum": [
|
||||
@@ -19152,9 +18965,6 @@ const docTemplate = `{
|
||||
"template_version_name": {
|
||||
"type": "string"
|
||||
},
|
||||
"workspace_build_transition": {
|
||||
"$ref": "#/definitions/codersdk.WorkspaceTransition"
|
||||
},
|
||||
"workspace_id": {
|
||||
"type": "string",
|
||||
"format": "uuid"
|
||||
@@ -21509,23 +21319,6 @@ const docTemplate = `{
|
||||
}
|
||||
}
|
||||
},
|
||||
"codersdk.UpdateUserSecretRequest": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"description": {
|
||||
"type": "string"
|
||||
},
|
||||
"env_name": {
|
||||
"type": "string"
|
||||
},
|
||||
"file_path": {
|
||||
"type": "string"
|
||||
},
|
||||
"value": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
},
|
||||
"codersdk.UpdateWorkspaceACL": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
@@ -21981,35 +21774,6 @@ const docTemplate = `{
|
||||
}
|
||||
}
|
||||
},
|
||||
"codersdk.UserSecret": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"created_at": {
|
||||
"type": "string",
|
||||
"format": "date-time"
|
||||
},
|
||||
"description": {
|
||||
"type": "string"
|
||||
},
|
||||
"env_name": {
|
||||
"type": "string"
|
||||
},
|
||||
"file_path": {
|
||||
"type": "string"
|
||||
},
|
||||
"id": {
|
||||
"type": "string",
|
||||
"format": "uuid"
|
||||
},
|
||||
"name": {
|
||||
"type": "string"
|
||||
},
|
||||
"updated_at": {
|
||||
"type": "string",
|
||||
"format": "date-time"
|
||||
}
|
||||
}
|
||||
},
|
||||
"codersdk.UserStatus": {
|
||||
"type": "string",
|
||||
"enum": [
|
||||
|
||||
Generated
+51
-265
@@ -8431,190 +8431,6 @@
|
||||
]
|
||||
}
|
||||
},
|
||||
"/users/{user}/secrets": {
|
||||
"get": {
|
||||
"produces": ["application/json"],
|
||||
"tags": ["Secrets"],
|
||||
"summary": "List user secrets",
|
||||
"operationId": "list-user-secrets",
|
||||
"parameters": [
|
||||
{
|
||||
"type": "string",
|
||||
"description": "User ID, username, or me",
|
||||
"name": "user",
|
||||
"in": "path",
|
||||
"required": true
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "OK",
|
||||
"schema": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"$ref": "#/definitions/codersdk.UserSecret"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"security": [
|
||||
{
|
||||
"CoderSessionToken": []
|
||||
}
|
||||
]
|
||||
},
|
||||
"post": {
|
||||
"consumes": ["application/json"],
|
||||
"produces": ["application/json"],
|
||||
"tags": ["Secrets"],
|
||||
"summary": "Create a new user secret",
|
||||
"operationId": "create-a-new-user-secret",
|
||||
"parameters": [
|
||||
{
|
||||
"type": "string",
|
||||
"description": "User ID, username, or me",
|
||||
"name": "user",
|
||||
"in": "path",
|
||||
"required": true
|
||||
},
|
||||
{
|
||||
"description": "Create secret request",
|
||||
"name": "request",
|
||||
"in": "body",
|
||||
"required": true,
|
||||
"schema": {
|
||||
"$ref": "#/definitions/codersdk.CreateUserSecretRequest"
|
||||
}
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
"201": {
|
||||
"description": "Created",
|
||||
"schema": {
|
||||
"$ref": "#/definitions/codersdk.UserSecret"
|
||||
}
|
||||
}
|
||||
},
|
||||
"security": [
|
||||
{
|
||||
"CoderSessionToken": []
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"/users/{user}/secrets/{name}": {
|
||||
"get": {
|
||||
"produces": ["application/json"],
|
||||
"tags": ["Secrets"],
|
||||
"summary": "Get a user secret by name",
|
||||
"operationId": "get-a-user-secret-by-name",
|
||||
"parameters": [
|
||||
{
|
||||
"type": "string",
|
||||
"description": "User ID, username, or me",
|
||||
"name": "user",
|
||||
"in": "path",
|
||||
"required": true
|
||||
},
|
||||
{
|
||||
"type": "string",
|
||||
"description": "Secret name",
|
||||
"name": "name",
|
||||
"in": "path",
|
||||
"required": true
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "OK",
|
||||
"schema": {
|
||||
"$ref": "#/definitions/codersdk.UserSecret"
|
||||
}
|
||||
}
|
||||
},
|
||||
"security": [
|
||||
{
|
||||
"CoderSessionToken": []
|
||||
}
|
||||
]
|
||||
},
|
||||
"delete": {
|
||||
"tags": ["Secrets"],
|
||||
"summary": "Delete a user secret",
|
||||
"operationId": "delete-a-user-secret",
|
||||
"parameters": [
|
||||
{
|
||||
"type": "string",
|
||||
"description": "User ID, username, or me",
|
||||
"name": "user",
|
||||
"in": "path",
|
||||
"required": true
|
||||
},
|
||||
{
|
||||
"type": "string",
|
||||
"description": "Secret name",
|
||||
"name": "name",
|
||||
"in": "path",
|
||||
"required": true
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
"204": {
|
||||
"description": "No Content"
|
||||
}
|
||||
},
|
||||
"security": [
|
||||
{
|
||||
"CoderSessionToken": []
|
||||
}
|
||||
]
|
||||
},
|
||||
"patch": {
|
||||
"consumes": ["application/json"],
|
||||
"produces": ["application/json"],
|
||||
"tags": ["Secrets"],
|
||||
"summary": "Update a user secret",
|
||||
"operationId": "update-a-user-secret",
|
||||
"parameters": [
|
||||
{
|
||||
"type": "string",
|
||||
"description": "User ID, username, or me",
|
||||
"name": "user",
|
||||
"in": "path",
|
||||
"required": true
|
||||
},
|
||||
{
|
||||
"type": "string",
|
||||
"description": "Secret name",
|
||||
"name": "name",
|
||||
"in": "path",
|
||||
"required": true
|
||||
},
|
||||
{
|
||||
"description": "Update secret request",
|
||||
"name": "request",
|
||||
"in": "body",
|
||||
"required": true,
|
||||
"schema": {
|
||||
"$ref": "#/definitions/codersdk.UpdateUserSecretRequest"
|
||||
}
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "OK",
|
||||
"schema": {
|
||||
"$ref": "#/definitions/codersdk.UserSecret"
|
||||
}
|
||||
}
|
||||
},
|
||||
"security": [
|
||||
{
|
||||
"CoderSessionToken": []
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"/users/{user}/status/activate": {
|
||||
"put": {
|
||||
"produces": ["application/json"],
|
||||
@@ -11651,10 +11467,20 @@
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"anthropic": {
|
||||
"$ref": "#/definitions/codersdk.AIBridgeAnthropicConfig"
|
||||
"description": "Deprecated: Use Providers with indexed CODER_AIBRIDGE_PROVIDER_\u003cN\u003e_* env vars instead.",
|
||||
"allOf": [
|
||||
{
|
||||
"$ref": "#/definitions/codersdk.AIBridgeAnthropicConfig"
|
||||
}
|
||||
]
|
||||
},
|
||||
"bedrock": {
|
||||
"$ref": "#/definitions/codersdk.AIBridgeBedrockConfig"
|
||||
"description": "Deprecated: Use Providers with indexed CODER_AIBRIDGE_PROVIDER_\u003cN\u003e_* env vars instead.",
|
||||
"allOf": [
|
||||
{
|
||||
"$ref": "#/definitions/codersdk.AIBridgeBedrockConfig"
|
||||
}
|
||||
]
|
||||
},
|
||||
"circuit_breaker_enabled": {
|
||||
"description": "Circuit breaker protects against cascading failures from upstream AI\nprovider rate limits (429, 503, 529 overloaded).",
|
||||
@@ -11683,7 +11509,19 @@
|
||||
"type": "integer"
|
||||
},
|
||||
"openai": {
|
||||
"$ref": "#/definitions/codersdk.AIBridgeOpenAIConfig"
|
||||
"description": "Deprecated: Use Providers with indexed CODER_AIBRIDGE_PROVIDER_\u003cN\u003e_* env vars instead.",
|
||||
"allOf": [
|
||||
{
|
||||
"$ref": "#/definitions/codersdk.AIBridgeOpenAIConfig"
|
||||
}
|
||||
]
|
||||
},
|
||||
"providers": {
|
||||
"description": "Providers holds provider instances populated from CODER_AIBRIDGE_PROVIDER_\u003cN\u003e_\u003cKEY\u003e\nenv vars and/or the deprecated LegacyOpenAI/LegacyAnthropic/LegacyBedrock fields above.",
|
||||
"type": "array",
|
||||
"items": {
|
||||
"$ref": "#/definitions/codersdk.AIBridgeProviderConfig"
|
||||
}
|
||||
},
|
||||
"rate_limit": {
|
||||
"type": "integer"
|
||||
@@ -11803,6 +11641,32 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"codersdk.AIBridgeProviderConfig": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"base_url": {
|
||||
"description": "BaseURL is the base URL of the upstream provider API.",
|
||||
"type": "string"
|
||||
},
|
||||
"bedrock_model": {
|
||||
"type": "string"
|
||||
},
|
||||
"bedrock_region": {
|
||||
"type": "string"
|
||||
},
|
||||
"bedrock_small_fast_model": {
|
||||
"type": "string"
|
||||
},
|
||||
"name": {
|
||||
"description": "Name is the unique instance identifier used for routing.\nDefaults to Type if not provided.",
|
||||
"type": "string"
|
||||
},
|
||||
"type": {
|
||||
"description": "Type is the provider type: \"openai\", \"anthropic\", or \"copilot\".",
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
},
|
||||
"codersdk.AIBridgeProxyConfig": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
@@ -11993,12 +11857,6 @@
|
||||
"$ref": "#/definitions/codersdk.AIBridgeAgenticAction"
|
||||
}
|
||||
},
|
||||
"credential_hint": {
|
||||
"type": "string"
|
||||
},
|
||||
"credential_kind": {
|
||||
"type": "string"
|
||||
},
|
||||
"ended_at": {
|
||||
"type": "string",
|
||||
"format": "date-time"
|
||||
@@ -13204,9 +13062,6 @@
|
||||
"properties": {
|
||||
"acquire_batch_size": {
|
||||
"type": "integer"
|
||||
},
|
||||
"debug_logging_enabled": {
|
||||
"type": "boolean"
|
||||
}
|
||||
}
|
||||
},
|
||||
@@ -13836,26 +13691,6 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"codersdk.CreateUserSecretRequest": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"description": {
|
||||
"type": "string"
|
||||
},
|
||||
"env_name": {
|
||||
"type": "string"
|
||||
},
|
||||
"file_path": {
|
||||
"type": "string"
|
||||
},
|
||||
"name": {
|
||||
"type": "string"
|
||||
},
|
||||
"value": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
},
|
||||
"codersdk.CreateWorkspaceBuildReason": {
|
||||
"type": "string",
|
||||
"enum": [
|
||||
@@ -17512,9 +17347,6 @@
|
||||
"template_version_name": {
|
||||
"type": "string"
|
||||
},
|
||||
"workspace_build_transition": {
|
||||
"$ref": "#/definitions/codersdk.WorkspaceTransition"
|
||||
},
|
||||
"workspace_id": {
|
||||
"type": "string",
|
||||
"format": "uuid"
|
||||
@@ -19761,23 +19593,6 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"codersdk.UpdateUserSecretRequest": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"description": {
|
||||
"type": "string"
|
||||
},
|
||||
"env_name": {
|
||||
"type": "string"
|
||||
},
|
||||
"file_path": {
|
||||
"type": "string"
|
||||
},
|
||||
"value": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
},
|
||||
"codersdk.UpdateWorkspaceACL": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
@@ -20208,35 +20023,6 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"codersdk.UserSecret": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"created_at": {
|
||||
"type": "string",
|
||||
"format": "date-time"
|
||||
},
|
||||
"description": {
|
||||
"type": "string"
|
||||
},
|
||||
"env_name": {
|
||||
"type": "string"
|
||||
},
|
||||
"file_path": {
|
||||
"type": "string"
|
||||
},
|
||||
"id": {
|
||||
"type": "string",
|
||||
"format": "uuid"
|
||||
},
|
||||
"name": {
|
||||
"type": "string"
|
||||
},
|
||||
"updated_at": {
|
||||
"type": "string",
|
||||
"format": "date-time"
|
||||
}
|
||||
}
|
||||
},
|
||||
"codersdk.UserStatus": {
|
||||
"type": "string",
|
||||
"enum": ["active", "dormant", "suspended"],
|
||||
|
||||
@@ -1608,15 +1608,6 @@ func New(options *Options) *API {
|
||||
|
||||
r.Get("/gitsshkey", api.gitSSHKey)
|
||||
r.Put("/gitsshkey", api.regenerateGitSSHKey)
|
||||
r.Route("/secrets", func(r chi.Router) {
|
||||
r.Post("/", api.postUserSecret)
|
||||
r.Get("/", api.getUserSecrets)
|
||||
r.Route("/{name}", func(r chi.Router) {
|
||||
r.Get("/", api.getUserSecret)
|
||||
r.Patch("/", api.patchUserSecret)
|
||||
r.Delete("/", api.deleteUserSecret)
|
||||
})
|
||||
})
|
||||
r.Route("/notifications", func(r chi.Router) {
|
||||
r.Route("/preferences", func(r chi.Router) {
|
||||
r.Get("/", api.userNotificationPreferences)
|
||||
|
||||
@@ -1240,8 +1240,6 @@ func buildAIBridgeThread(
|
||||
if rootIntc != nil {
|
||||
thread.Model = rootIntc.Model
|
||||
thread.Provider = rootIntc.Provider
|
||||
thread.CredentialKind = string(rootIntc.CredentialKind)
|
||||
thread.CredentialHint = rootIntc.CredentialHint
|
||||
// Get first user prompt from root interception.
|
||||
// A thread can only have one prompt, by definition, since we currently
|
||||
// only store the last prompt observed in an interception.
|
||||
@@ -1533,22 +1531,6 @@ func nullInt64Ptr(v sql.NullInt64) *int64 {
|
||||
return &value
|
||||
}
|
||||
|
||||
func nullStringPtr(v sql.NullString) *string {
|
||||
if !v.Valid {
|
||||
return nil
|
||||
}
|
||||
value := v.String
|
||||
return &value
|
||||
}
|
||||
|
||||
func nullTimePtr(v sql.NullTime) *time.Time {
|
||||
if !v.Valid {
|
||||
return nil
|
||||
}
|
||||
value := v.Time
|
||||
return &value
|
||||
}
|
||||
|
||||
// Chat converts a database.Chat to a codersdk.Chat. It coalesces
|
||||
// nil slices and maps to empty values for JSON serialization and
|
||||
// derives RootChatID from the parent chain when not explicitly set.
|
||||
@@ -1635,88 +1617,6 @@ func Chat(c database.Chat, diffStatus *database.ChatDiffStatus, files []database
|
||||
return chat
|
||||
}
|
||||
|
||||
func chatDebugAttempts(raw json.RawMessage) []map[string]any {
|
||||
if len(raw) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
var attempts []map[string]any
|
||||
if err := json.Unmarshal(raw, &attempts); err != nil {
|
||||
return []map[string]any{{
|
||||
"error": "malformed attempts payload",
|
||||
"raw": string(raw),
|
||||
}}
|
||||
}
|
||||
return attempts
|
||||
}
|
||||
|
||||
// rawJSONObject deserializes a JSON object payload for debug display.
|
||||
// If the payload is malformed, it returns a map with "error" and "raw"
|
||||
// keys preserving the original content for diagnostics. Callers that
|
||||
// consume the result programmatically should check for the "error" key.
|
||||
func rawJSONObject(raw json.RawMessage) map[string]any {
|
||||
if len(raw) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
var object map[string]any
|
||||
if err := json.Unmarshal(raw, &object); err != nil {
|
||||
return map[string]any{
|
||||
"error": "malformed debug payload",
|
||||
"raw": string(raw),
|
||||
}
|
||||
}
|
||||
return object
|
||||
}
|
||||
|
||||
func nullRawJSONObject(raw pqtype.NullRawMessage) map[string]any {
|
||||
if !raw.Valid {
|
||||
return nil
|
||||
}
|
||||
return rawJSONObject(raw.RawMessage)
|
||||
}
|
||||
|
||||
// ChatDebugRunSummary converts a database.ChatDebugRun to a
|
||||
// codersdk.ChatDebugRunSummary.
|
||||
func ChatDebugRunSummary(r database.ChatDebugRun) codersdk.ChatDebugRunSummary {
|
||||
return codersdk.ChatDebugRunSummary{
|
||||
ID: r.ID,
|
||||
ChatID: r.ChatID,
|
||||
Kind: codersdk.ChatDebugRunKind(r.Kind),
|
||||
Status: codersdk.ChatDebugStatus(r.Status),
|
||||
Provider: nullStringPtr(r.Provider),
|
||||
Model: nullStringPtr(r.Model),
|
||||
Summary: rawJSONObject(r.Summary),
|
||||
StartedAt: r.StartedAt,
|
||||
UpdatedAt: r.UpdatedAt,
|
||||
FinishedAt: nullTimePtr(r.FinishedAt),
|
||||
}
|
||||
}
|
||||
|
||||
// ChatDebugStep converts a database.ChatDebugStep to a
|
||||
// codersdk.ChatDebugStep.
|
||||
func ChatDebugStep(s database.ChatDebugStep) codersdk.ChatDebugStep {
|
||||
return codersdk.ChatDebugStep{
|
||||
ID: s.ID,
|
||||
RunID: s.RunID,
|
||||
ChatID: s.ChatID,
|
||||
StepNumber: s.StepNumber,
|
||||
Operation: codersdk.ChatDebugStepOperation(s.Operation),
|
||||
Status: codersdk.ChatDebugStatus(s.Status),
|
||||
HistoryTipMessageID: nullInt64Ptr(s.HistoryTipMessageID),
|
||||
AssistantMessageID: nullInt64Ptr(s.AssistantMessageID),
|
||||
NormalizedRequest: rawJSONObject(s.NormalizedRequest),
|
||||
NormalizedResponse: nullRawJSONObject(s.NormalizedResponse),
|
||||
Usage: nullRawJSONObject(s.Usage),
|
||||
Attempts: chatDebugAttempts(s.Attempts),
|
||||
Error: nullRawJSONObject(s.Error),
|
||||
Metadata: rawJSONObject(s.Metadata),
|
||||
StartedAt: s.StartedAt,
|
||||
UpdatedAt: s.UpdatedAt,
|
||||
FinishedAt: nullTimePtr(s.FinishedAt),
|
||||
}
|
||||
}
|
||||
|
||||
// ChatRows converts a slice of database.GetChatsRow (which embeds
|
||||
// Chat plus HasUnread) to codersdk.Chat, looking up diff statuses
|
||||
// from the provided map. When diffStatusesByChatID is non-nil,
|
||||
|
||||
@@ -210,231 +210,6 @@ func TestTemplateVersionParameter_BadDescription(t *testing.T) {
|
||||
req.NotEmpty(sdk.DescriptionPlaintext, "broke the markdown parser with %v", desc)
|
||||
}
|
||||
|
||||
func TestChatDebugRunSummary(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
startedAt := time.Now().UTC().Round(time.Second)
|
||||
finishedAt := startedAt.Add(5 * time.Second)
|
||||
|
||||
run := database.ChatDebugRun{
|
||||
ID: uuid.New(),
|
||||
ChatID: uuid.New(),
|
||||
Kind: "chat_turn",
|
||||
Status: "completed",
|
||||
Provider: sql.NullString{String: "openai", Valid: true},
|
||||
Model: sql.NullString{String: "gpt-4o", Valid: true},
|
||||
Summary: json.RawMessage(`{"step_count":3,"has_error":false}`),
|
||||
StartedAt: startedAt,
|
||||
UpdatedAt: finishedAt,
|
||||
FinishedAt: sql.NullTime{Time: finishedAt, Valid: true},
|
||||
}
|
||||
|
||||
sdk := db2sdk.ChatDebugRunSummary(run)
|
||||
|
||||
require.Equal(t, run.ID, sdk.ID)
|
||||
require.Equal(t, run.ChatID, sdk.ChatID)
|
||||
require.Equal(t, codersdk.ChatDebugRunKindChatTurn, sdk.Kind)
|
||||
require.Equal(t, codersdk.ChatDebugStatusCompleted, sdk.Status)
|
||||
require.NotNil(t, sdk.Provider)
|
||||
require.Equal(t, "openai", *sdk.Provider)
|
||||
require.NotNil(t, sdk.Model)
|
||||
require.Equal(t, "gpt-4o", *sdk.Model)
|
||||
require.Equal(t, map[string]any{"step_count": float64(3), "has_error": false}, sdk.Summary)
|
||||
require.Equal(t, startedAt, sdk.StartedAt)
|
||||
require.Equal(t, finishedAt, sdk.UpdatedAt)
|
||||
require.NotNil(t, sdk.FinishedAt)
|
||||
require.Equal(t, finishedAt, *sdk.FinishedAt)
|
||||
}
|
||||
|
||||
func TestChatDebugRunSummary_NullableFieldsNil(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
run := database.ChatDebugRun{
|
||||
ID: uuid.New(),
|
||||
ChatID: uuid.New(),
|
||||
Kind: "title_generation",
|
||||
Status: "in_progress",
|
||||
Summary: json.RawMessage(`{}`),
|
||||
StartedAt: time.Now().UTC(),
|
||||
UpdatedAt: time.Now().UTC(),
|
||||
}
|
||||
|
||||
sdk := db2sdk.ChatDebugRunSummary(run)
|
||||
|
||||
require.Nil(t, sdk.Provider, "NULL Provider should map to nil")
|
||||
require.Nil(t, sdk.Model, "NULL Model should map to nil")
|
||||
require.Nil(t, sdk.FinishedAt, "NULL FinishedAt should map to nil")
|
||||
}
|
||||
|
||||
func TestChatDebugStep(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
startedAt := time.Now().UTC().Round(time.Second)
|
||||
finishedAt := startedAt.Add(2 * time.Second)
|
||||
attempts := json.RawMessage(`[
|
||||
{
|
||||
"attempt_number": 1,
|
||||
"status": "completed",
|
||||
"raw_request": {"url": "https://example.com"},
|
||||
"raw_response": {"status": "200"},
|
||||
"duration_ms": 123,
|
||||
"started_at": "2026-03-01T10:00:01Z",
|
||||
"finished_at": "2026-03-01T10:00:02Z"
|
||||
}
|
||||
]`)
|
||||
step := database.ChatDebugStep{
|
||||
ID: uuid.New(),
|
||||
RunID: uuid.New(),
|
||||
ChatID: uuid.New(),
|
||||
StepNumber: 1,
|
||||
Operation: "stream",
|
||||
Status: "completed",
|
||||
NormalizedRequest: json.RawMessage(`{"messages":[]}`),
|
||||
Attempts: attempts,
|
||||
Metadata: json.RawMessage(`{"provider":"openai"}`),
|
||||
StartedAt: startedAt,
|
||||
UpdatedAt: finishedAt,
|
||||
FinishedAt: sql.NullTime{Time: finishedAt, Valid: true},
|
||||
}
|
||||
|
||||
sdk := db2sdk.ChatDebugStep(step)
|
||||
|
||||
// Verify all scalar fields are mapped correctly.
|
||||
require.Equal(t, step.ID, sdk.ID)
|
||||
require.Equal(t, step.RunID, sdk.RunID)
|
||||
require.Equal(t, step.ChatID, sdk.ChatID)
|
||||
require.Equal(t, step.StepNumber, sdk.StepNumber)
|
||||
require.Equal(t, codersdk.ChatDebugStepOperationStream, sdk.Operation)
|
||||
require.Equal(t, codersdk.ChatDebugStatusCompleted, sdk.Status)
|
||||
require.Equal(t, startedAt, sdk.StartedAt)
|
||||
require.Equal(t, finishedAt, sdk.UpdatedAt)
|
||||
require.Equal(t, &finishedAt, sdk.FinishedAt)
|
||||
|
||||
// Verify JSON object fields are deserialized.
|
||||
require.NotNil(t, sdk.NormalizedRequest)
|
||||
require.Equal(t, map[string]any{"messages": []any{}}, sdk.NormalizedRequest)
|
||||
require.NotNil(t, sdk.Metadata)
|
||||
require.Equal(t, map[string]any{"provider": "openai"}, sdk.Metadata)
|
||||
|
||||
// Verify nullable fields are nil when the DB row has NULL values.
|
||||
require.Nil(t, sdk.HistoryTipMessageID, "NULL HistoryTipMessageID should map to nil")
|
||||
require.Nil(t, sdk.AssistantMessageID, "NULL AssistantMessageID should map to nil")
|
||||
require.Nil(t, sdk.NormalizedResponse, "NULL NormalizedResponse should map to nil")
|
||||
require.Nil(t, sdk.Usage, "NULL Usage should map to nil")
|
||||
require.Nil(t, sdk.Error, "NULL Error should map to nil")
|
||||
|
||||
// Verify attempts are preserved with all fields.
|
||||
require.Len(t, sdk.Attempts, 1)
|
||||
require.Equal(t, float64(1), sdk.Attempts[0]["attempt_number"])
|
||||
require.Equal(t, "completed", sdk.Attempts[0]["status"])
|
||||
require.Equal(t, float64(123), sdk.Attempts[0]["duration_ms"])
|
||||
require.Equal(t, map[string]any{"url": "https://example.com"}, sdk.Attempts[0]["raw_request"])
|
||||
require.Equal(t, map[string]any{"status": "200"}, sdk.Attempts[0]["raw_response"])
|
||||
}
|
||||
|
||||
func TestChatDebugStep_NullableFieldsPopulated(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
tipID := int64(42)
|
||||
asstID := int64(99)
|
||||
step := database.ChatDebugStep{
|
||||
ID: uuid.New(),
|
||||
RunID: uuid.New(),
|
||||
ChatID: uuid.New(),
|
||||
StepNumber: 2,
|
||||
Operation: "generate",
|
||||
Status: "completed",
|
||||
HistoryTipMessageID: sql.NullInt64{Int64: tipID, Valid: true},
|
||||
AssistantMessageID: sql.NullInt64{Int64: asstID, Valid: true},
|
||||
NormalizedRequest: json.RawMessage(`{}`),
|
||||
NormalizedResponse: pqtype.NullRawMessage{RawMessage: json.RawMessage(`{"text":"hi"}`), Valid: true},
|
||||
Usage: pqtype.NullRawMessage{RawMessage: json.RawMessage(`{"tokens":10}`), Valid: true},
|
||||
Error: pqtype.NullRawMessage{RawMessage: json.RawMessage(`{"code":"rate_limit"}`), Valid: true},
|
||||
Attempts: json.RawMessage(`[]`),
|
||||
Metadata: json.RawMessage(`{}`),
|
||||
StartedAt: time.Now().UTC(),
|
||||
UpdatedAt: time.Now().UTC(),
|
||||
}
|
||||
|
||||
sdk := db2sdk.ChatDebugStep(step)
|
||||
|
||||
require.NotNil(t, sdk.HistoryTipMessageID)
|
||||
require.Equal(t, tipID, *sdk.HistoryTipMessageID)
|
||||
require.NotNil(t, sdk.AssistantMessageID)
|
||||
require.Equal(t, asstID, *sdk.AssistantMessageID)
|
||||
require.NotNil(t, sdk.NormalizedResponse)
|
||||
require.Equal(t, map[string]any{"text": "hi"}, sdk.NormalizedResponse)
|
||||
require.NotNil(t, sdk.Usage)
|
||||
require.Equal(t, map[string]any{"tokens": float64(10)}, sdk.Usage)
|
||||
require.NotNil(t, sdk.Error)
|
||||
require.Equal(t, map[string]any{"code": "rate_limit"}, sdk.Error)
|
||||
}
|
||||
|
||||
func TestChatDebugStep_PreservesMalformedAttempts(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
step := database.ChatDebugStep{
|
||||
ID: uuid.New(),
|
||||
RunID: uuid.New(),
|
||||
ChatID: uuid.New(),
|
||||
StepNumber: 1,
|
||||
Operation: "stream",
|
||||
Status: "completed",
|
||||
NormalizedRequest: json.RawMessage(`{"messages":[]}`),
|
||||
Attempts: json.RawMessage(`{"bad":true}`),
|
||||
Metadata: json.RawMessage(`{"provider":"openai"}`),
|
||||
StartedAt: time.Now().UTC(),
|
||||
UpdatedAt: time.Now().UTC(),
|
||||
}
|
||||
|
||||
sdk := db2sdk.ChatDebugStep(step)
|
||||
require.Len(t, sdk.Attempts, 1)
|
||||
require.Equal(t, "malformed attempts payload", sdk.Attempts[0]["error"])
|
||||
require.Equal(t, `{"bad":true}`, sdk.Attempts[0]["raw"])
|
||||
}
|
||||
|
||||
func TestChatDebugRunSummary_PreservesMalformedSummary(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
run := database.ChatDebugRun{
|
||||
ID: uuid.New(),
|
||||
ChatID: uuid.New(),
|
||||
Kind: "chat_turn",
|
||||
Status: "completed",
|
||||
Summary: json.RawMessage(`not-an-object`),
|
||||
StartedAt: time.Now().UTC(),
|
||||
UpdatedAt: time.Now().UTC(),
|
||||
}
|
||||
|
||||
sdk := db2sdk.ChatDebugRunSummary(run)
|
||||
require.Equal(t, "malformed debug payload", sdk.Summary["error"])
|
||||
require.Equal(t, "not-an-object", sdk.Summary["raw"])
|
||||
}
|
||||
|
||||
func TestChatDebugStep_PreservesMalformedRequest(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
step := database.ChatDebugStep{
|
||||
ID: uuid.New(),
|
||||
RunID: uuid.New(),
|
||||
ChatID: uuid.New(),
|
||||
StepNumber: 1,
|
||||
Operation: "stream",
|
||||
Status: "completed",
|
||||
NormalizedRequest: json.RawMessage(`[1,2,3]`),
|
||||
Attempts: json.RawMessage(`[]`),
|
||||
Metadata: json.RawMessage(`"just-a-string"`),
|
||||
StartedAt: time.Now().UTC(),
|
||||
UpdatedAt: time.Now().UTC(),
|
||||
}
|
||||
|
||||
sdk := db2sdk.ChatDebugStep(step)
|
||||
require.Equal(t, "malformed debug payload", sdk.NormalizedRequest["error"])
|
||||
require.Equal(t, "[1,2,3]", sdk.NormalizedRequest["raw"])
|
||||
require.Equal(t, "malformed debug payload", sdk.Metadata["error"])
|
||||
require.Equal(t, `"just-a-string"`, sdk.Metadata["raw"])
|
||||
}
|
||||
|
||||
func TestAIBridgeInterception(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
|
||||
@@ -1860,28 +1860,6 @@ func (q *querier) DeleteApplicationConnectAPIKeysByUserID(ctx context.Context, u
|
||||
return q.db.DeleteApplicationConnectAPIKeysByUserID(ctx, userID)
|
||||
}
|
||||
|
||||
func (q *querier) DeleteChatDebugDataAfterMessageID(ctx context.Context, arg database.DeleteChatDebugDataAfterMessageIDParams) (int64, error) {
|
||||
chat, err := q.db.GetChatByID(ctx, arg.ChatID)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
if err := q.authorizeContext(ctx, policy.ActionUpdate, chat); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
return q.db.DeleteChatDebugDataAfterMessageID(ctx, arg)
|
||||
}
|
||||
|
||||
func (q *querier) DeleteChatDebugDataByChatID(ctx context.Context, chatID uuid.UUID) (int64, error) {
|
||||
chat, err := q.db.GetChatByID(ctx, chatID)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
if err := q.authorizeContext(ctx, policy.ActionUpdate, chat); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
return q.db.DeleteChatDebugDataByChatID(ctx, chatID)
|
||||
}
|
||||
|
||||
func (q *querier) DeleteChatModelConfigByID(ctx context.Context, id uuid.UUID) error {
|
||||
if err := q.authorizeContext(ctx, policy.ActionUpdate, rbac.ResourceDeploymentConfig); err != nil {
|
||||
return err
|
||||
@@ -2202,10 +2180,10 @@ func (q *querier) DeleteUserChatProviderKey(ctx context.Context, arg database.De
|
||||
return q.db.DeleteUserChatProviderKey(ctx, arg)
|
||||
}
|
||||
|
||||
func (q *querier) DeleteUserSecretByUserIDAndName(ctx context.Context, arg database.DeleteUserSecretByUserIDAndNameParams) (int64, error) {
|
||||
func (q *querier) DeleteUserSecretByUserIDAndName(ctx context.Context, arg database.DeleteUserSecretByUserIDAndNameParams) error {
|
||||
obj := rbac.ResourceUserSecret.WithOwner(arg.UserID.String())
|
||||
if err := q.authorizeContext(ctx, policy.ActionDelete, obj); err != nil {
|
||||
return 0, err
|
||||
return err
|
||||
}
|
||||
return q.db.DeleteUserSecretByUserIDAndName(ctx, arg)
|
||||
}
|
||||
@@ -2369,14 +2347,6 @@ func (q *querier) FetchVolumesResourceMonitorsUpdatedAfter(ctx context.Context,
|
||||
return q.db.FetchVolumesResourceMonitorsUpdatedAfter(ctx, updatedAt)
|
||||
}
|
||||
|
||||
func (q *querier) FinalizeStaleChatDebugRows(ctx context.Context, updatedBefore time.Time) (database.FinalizeStaleChatDebugRowsRow, error) {
|
||||
// Background sweep operates across all chats.
|
||||
if err := q.authorizeContext(ctx, policy.ActionUpdate, rbac.ResourceChat); err != nil {
|
||||
return database.FinalizeStaleChatDebugRowsRow{}, err
|
||||
}
|
||||
return q.db.FinalizeStaleChatDebugRows(ctx, updatedBefore)
|
||||
}
|
||||
|
||||
func (q *querier) FindMatchingPresetID(ctx context.Context, arg database.FindMatchingPresetIDParams) (uuid.UUID, error) {
|
||||
_, err := q.GetTemplateVersionByID(ctx, arg.TemplateVersionID)
|
||||
if err != nil {
|
||||
@@ -2585,59 +2555,6 @@ func (q *querier) GetChatCostSummary(ctx context.Context, arg database.GetChatCo
|
||||
return q.db.GetChatCostSummary(ctx, arg)
|
||||
}
|
||||
|
||||
func (q *querier) GetChatDebugLoggingAllowUsers(ctx context.Context) (bool, error) {
|
||||
// The allow-users flag is a deployment-wide setting read by any
|
||||
// authenticated chat user. We only require that an explicit actor
|
||||
// is present in the context so unauthenticated calls fail closed.
|
||||
if _, ok := ActorFromContext(ctx); !ok {
|
||||
return false, ErrNoActor
|
||||
}
|
||||
return q.db.GetChatDebugLoggingAllowUsers(ctx)
|
||||
}
|
||||
|
||||
func (q *querier) GetChatDebugRunByID(ctx context.Context, id uuid.UUID) (database.ChatDebugRun, error) {
|
||||
run, err := q.db.GetChatDebugRunByID(ctx, id)
|
||||
if err != nil {
|
||||
return database.ChatDebugRun{}, err
|
||||
}
|
||||
// Authorize via the owning chat.
|
||||
chat, err := q.db.GetChatByID(ctx, run.ChatID)
|
||||
if err != nil {
|
||||
return database.ChatDebugRun{}, err
|
||||
}
|
||||
if err := q.authorizeContext(ctx, policy.ActionRead, chat); err != nil {
|
||||
return database.ChatDebugRun{}, err
|
||||
}
|
||||
return run, nil
|
||||
}
|
||||
|
||||
func (q *querier) GetChatDebugRunsByChatID(ctx context.Context, arg database.GetChatDebugRunsByChatIDParams) ([]database.ChatDebugRun, error) {
|
||||
chat, err := q.db.GetChatByID(ctx, arg.ChatID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if err := q.authorizeContext(ctx, policy.ActionRead, chat); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return q.db.GetChatDebugRunsByChatID(ctx, arg)
|
||||
}
|
||||
|
||||
func (q *querier) GetChatDebugStepsByRunID(ctx context.Context, runID uuid.UUID) ([]database.ChatDebugStep, error) {
|
||||
run, err := q.db.GetChatDebugRunByID(ctx, runID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
// Authorize via the owning chat.
|
||||
chat, err := q.db.GetChatByID(ctx, run.ChatID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if err := q.authorizeContext(ctx, policy.ActionRead, chat); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return q.db.GetChatDebugStepsByRunID(ctx, runID)
|
||||
}
|
||||
|
||||
func (q *querier) GetChatDesktopEnabled(ctx context.Context) (bool, error) {
|
||||
// The desktop-enabled flag is a deployment-wide setting read by any
|
||||
// authenticated chat user and by chatd when deciding whether to expose
|
||||
@@ -3484,11 +3401,11 @@ func (q *querier) GetPRInsightsPerModel(ctx context.Context, arg database.GetPRI
|
||||
return q.db.GetPRInsightsPerModel(ctx, arg)
|
||||
}
|
||||
|
||||
func (q *querier) GetPRInsightsPullRequests(ctx context.Context, arg database.GetPRInsightsPullRequestsParams) ([]database.GetPRInsightsPullRequestsRow, error) {
|
||||
func (q *querier) GetPRInsightsRecentPRs(ctx context.Context, arg database.GetPRInsightsRecentPRsParams) ([]database.GetPRInsightsRecentPRsRow, error) {
|
||||
if err := q.authorizeContext(ctx, policy.ActionRead, rbac.ResourceDeploymentConfig); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return q.db.GetPRInsightsPullRequests(ctx, arg)
|
||||
return q.db.GetPRInsightsRecentPRs(ctx, arg)
|
||||
}
|
||||
|
||||
func (q *querier) GetPRInsightsSummary(ctx context.Context, arg database.GetPRInsightsSummaryParams) (database.GetPRInsightsSummaryRow, error) {
|
||||
@@ -4186,17 +4103,6 @@ func (q *querier) GetUserChatCustomPrompt(ctx context.Context, userID uuid.UUID)
|
||||
return q.db.GetUserChatCustomPrompt(ctx, userID)
|
||||
}
|
||||
|
||||
func (q *querier) GetUserChatDebugLoggingEnabled(ctx context.Context, userID uuid.UUID) (bool, error) {
|
||||
u, err := q.db.GetUserByID(ctx, userID)
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
if err := q.authorizeContext(ctx, policy.ActionReadPersonal, u); err != nil {
|
||||
return false, err
|
||||
}
|
||||
return q.db.GetUserChatDebugLoggingEnabled(ctx, userID)
|
||||
}
|
||||
|
||||
func (q *querier) GetUserChatProviderKeys(ctx context.Context, userID uuid.UUID) ([]database.UserChatProviderKey, error) {
|
||||
u, err := q.db.GetUserByID(ctx, userID)
|
||||
if err != nil {
|
||||
@@ -4943,33 +4849,6 @@ func (q *querier) InsertChat(ctx context.Context, arg database.InsertChatParams)
|
||||
return insert(q.log, q.auth, rbac.ResourceChat.WithOwner(arg.OwnerID.String()), q.db.InsertChat)(ctx, arg)
|
||||
}
|
||||
|
||||
func (q *querier) InsertChatDebugRun(ctx context.Context, arg database.InsertChatDebugRunParams) (database.ChatDebugRun, error) {
|
||||
chat, err := q.db.GetChatByID(ctx, arg.ChatID)
|
||||
if err != nil {
|
||||
return database.ChatDebugRun{}, err
|
||||
}
|
||||
if err := q.authorizeContext(ctx, policy.ActionUpdate, chat); err != nil {
|
||||
return database.ChatDebugRun{}, err
|
||||
}
|
||||
return q.db.InsertChatDebugRun(ctx, arg)
|
||||
}
|
||||
|
||||
// InsertChatDebugStep creates a new step in a debug run. The underlying
|
||||
// SQL uses INSERT ... SELECT ... FROM chat_debug_runs to enforce that the
|
||||
// run exists and belongs to the specified chat. If the run_id is invalid
|
||||
// or the chat_id doesn't match, the INSERT produces 0 rows and SQLC
|
||||
// returns sql.ErrNoRows.
|
||||
func (q *querier) InsertChatDebugStep(ctx context.Context, arg database.InsertChatDebugStepParams) (database.ChatDebugStep, error) {
|
||||
chat, err := q.db.GetChatByID(ctx, arg.ChatID)
|
||||
if err != nil {
|
||||
return database.ChatDebugStep{}, err
|
||||
}
|
||||
if err := q.authorizeContext(ctx, policy.ActionUpdate, chat); err != nil {
|
||||
return database.ChatDebugStep{}, err
|
||||
}
|
||||
return q.db.InsertChatDebugStep(ctx, arg)
|
||||
}
|
||||
|
||||
func (q *querier) InsertChatFile(ctx context.Context, arg database.InsertChatFileParams) (database.InsertChatFileRow, error) {
|
||||
// Authorize create on chat resource scoped to the owner and org.
|
||||
return insert(q.log, q.auth, rbac.ResourceChat.WithOwner(arg.OwnerID.String()).InOrg(arg.OrganizationID), q.db.InsertChatFile)(ctx, arg)
|
||||
@@ -5968,28 +5847,6 @@ func (q *querier) UpdateChatByID(ctx context.Context, arg database.UpdateChatByI
|
||||
return q.db.UpdateChatByID(ctx, arg)
|
||||
}
|
||||
|
||||
func (q *querier) UpdateChatDebugRun(ctx context.Context, arg database.UpdateChatDebugRunParams) (database.ChatDebugRun, error) {
|
||||
chat, err := q.db.GetChatByID(ctx, arg.ChatID)
|
||||
if err != nil {
|
||||
return database.ChatDebugRun{}, err
|
||||
}
|
||||
if err := q.authorizeContext(ctx, policy.ActionUpdate, chat); err != nil {
|
||||
return database.ChatDebugRun{}, err
|
||||
}
|
||||
return q.db.UpdateChatDebugRun(ctx, arg)
|
||||
}
|
||||
|
||||
func (q *querier) UpdateChatDebugStep(ctx context.Context, arg database.UpdateChatDebugStepParams) (database.ChatDebugStep, error) {
|
||||
chat, err := q.db.GetChatByID(ctx, arg.ChatID)
|
||||
if err != nil {
|
||||
return database.ChatDebugStep{}, err
|
||||
}
|
||||
if err := q.authorizeContext(ctx, policy.ActionUpdate, chat); err != nil {
|
||||
return database.ChatDebugStep{}, err
|
||||
}
|
||||
return q.db.UpdateChatDebugStep(ctx, arg)
|
||||
}
|
||||
|
||||
func (q *querier) UpdateChatHeartbeats(ctx context.Context, arg database.UpdateChatHeartbeatsParams) ([]uuid.UUID, error) {
|
||||
// The batch heartbeat is a system-level operation filtered by
|
||||
// worker_id. Authorization is enforced by the AsChatd context
|
||||
@@ -6926,19 +6783,6 @@ func (q *querier) UpdateWorkspaceAgentConnectionByID(ctx context.Context, arg da
|
||||
return q.db.UpdateWorkspaceAgentConnectionByID(ctx, arg)
|
||||
}
|
||||
|
||||
func (q *querier) UpdateWorkspaceAgentDirectoryByID(ctx context.Context, arg database.UpdateWorkspaceAgentDirectoryByIDParams) error {
|
||||
workspace, err := q.db.GetWorkspaceByAgentID(ctx, arg.ID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := q.authorizeContext(ctx, policy.ActionUpdateAgent, workspace); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return q.db.UpdateWorkspaceAgentDirectoryByID(ctx, arg)
|
||||
}
|
||||
|
||||
func (q *querier) UpdateWorkspaceAgentDisplayAppsByID(ctx context.Context, arg database.UpdateWorkspaceAgentDisplayAppsByIDParams) error {
|
||||
workspace, err := q.db.GetWorkspaceByAgentID(ctx, arg.ID)
|
||||
if err != nil {
|
||||
@@ -7222,13 +7066,6 @@ func (q *querier) UpsertBoundaryUsageStats(ctx context.Context, arg database.Ups
|
||||
return q.db.UpsertBoundaryUsageStats(ctx, arg)
|
||||
}
|
||||
|
||||
func (q *querier) UpsertChatDebugLoggingAllowUsers(ctx context.Context, allowUsers bool) error {
|
||||
if err := q.authorizeContext(ctx, policy.ActionUpdate, rbac.ResourceDeploymentConfig); err != nil {
|
||||
return err
|
||||
}
|
||||
return q.db.UpsertChatDebugLoggingAllowUsers(ctx, allowUsers)
|
||||
}
|
||||
|
||||
func (q *querier) UpsertChatDesktopEnabled(ctx context.Context, enableDesktop bool) error {
|
||||
if err := q.authorizeContext(ctx, policy.ActionUpdate, rbac.ResourceDeploymentConfig); err != nil {
|
||||
return err
|
||||
@@ -7459,17 +7296,6 @@ func (q *querier) UpsertTemplateUsageStats(ctx context.Context) error {
|
||||
return q.db.UpsertTemplateUsageStats(ctx)
|
||||
}
|
||||
|
||||
func (q *querier) UpsertUserChatDebugLoggingEnabled(ctx context.Context, arg database.UpsertUserChatDebugLoggingEnabledParams) error {
|
||||
u, err := q.db.GetUserByID(ctx, arg.UserID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if err := q.authorizeContext(ctx, policy.ActionUpdatePersonal, u); err != nil {
|
||||
return err
|
||||
}
|
||||
return q.db.UpsertUserChatDebugLoggingEnabled(ctx, arg)
|
||||
}
|
||||
|
||||
func (q *querier) UpsertUserChatProviderKey(ctx context.Context, arg database.UpsertUserChatProviderKeyParams) (database.UserChatProviderKey, error) {
|
||||
u, err := q.db.GetUserByID(ctx, arg.UserID)
|
||||
if err != nil {
|
||||
|
||||
@@ -461,89 +461,6 @@ func (s *MethodTestSuite) TestChats() {
|
||||
dbm.EXPECT().DeleteChatQueuedMessage(gomock.Any(), args).Return(nil).AnyTimes()
|
||||
check.Args(args).Asserts(chat, policy.ActionUpdate).Returns()
|
||||
}))
|
||||
s.Run("DeleteChatDebugDataAfterMessageID", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) {
|
||||
chat := testutil.Fake(s.T(), faker, database.Chat{})
|
||||
arg := database.DeleteChatDebugDataAfterMessageIDParams{ChatID: chat.ID, MessageID: 123}
|
||||
dbm.EXPECT().GetChatByID(gomock.Any(), chat.ID).Return(chat, nil).AnyTimes()
|
||||
dbm.EXPECT().DeleteChatDebugDataAfterMessageID(gomock.Any(), arg).Return(int64(1), nil).AnyTimes()
|
||||
check.Args(arg).Asserts(chat, policy.ActionUpdate).Returns(int64(1))
|
||||
}))
|
||||
s.Run("DeleteChatDebugDataByChatID", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) {
|
||||
chat := testutil.Fake(s.T(), faker, database.Chat{})
|
||||
dbm.EXPECT().GetChatByID(gomock.Any(), chat.ID).Return(chat, nil).AnyTimes()
|
||||
dbm.EXPECT().DeleteChatDebugDataByChatID(gomock.Any(), chat.ID).Return(int64(1), nil).AnyTimes()
|
||||
check.Args(chat.ID).Asserts(chat, policy.ActionUpdate).Returns(int64(1))
|
||||
}))
|
||||
s.Run("FinalizeStaleChatDebugRows", s.Mocked(func(dbm *dbmock.MockStore, _ *gofakeit.Faker, check *expects) {
|
||||
updatedBefore := dbtime.Now()
|
||||
row := database.FinalizeStaleChatDebugRowsRow{RunsFinalized: 1, StepsFinalized: 2}
|
||||
dbm.EXPECT().FinalizeStaleChatDebugRows(gomock.Any(), updatedBefore).Return(row, nil).AnyTimes()
|
||||
check.Args(updatedBefore).Asserts(rbac.ResourceChat, policy.ActionUpdate).Returns(row)
|
||||
}))
|
||||
s.Run("GetChatDebugLoggingAllowUsers", s.Mocked(func(dbm *dbmock.MockStore, _ *gofakeit.Faker, check *expects) {
|
||||
dbm.EXPECT().GetChatDebugLoggingAllowUsers(gomock.Any()).Return(true, nil).AnyTimes()
|
||||
check.Args().Asserts().Returns(true)
|
||||
}))
|
||||
s.Run("GetChatDebugRunByID", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) {
|
||||
chat := testutil.Fake(s.T(), faker, database.Chat{})
|
||||
run := database.ChatDebugRun{ID: uuid.New(), ChatID: chat.ID}
|
||||
dbm.EXPECT().GetChatDebugRunByID(gomock.Any(), run.ID).Return(run, nil).AnyTimes()
|
||||
dbm.EXPECT().GetChatByID(gomock.Any(), chat.ID).Return(chat, nil).AnyTimes()
|
||||
check.Args(run.ID).Asserts(chat, policy.ActionRead).Returns(run)
|
||||
}))
|
||||
s.Run("GetChatDebugRunsByChatID", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) {
|
||||
chat := testutil.Fake(s.T(), faker, database.Chat{})
|
||||
runs := []database.ChatDebugRun{{ID: uuid.New(), ChatID: chat.ID}}
|
||||
arg := database.GetChatDebugRunsByChatIDParams{ChatID: chat.ID, LimitVal: 100}
|
||||
dbm.EXPECT().GetChatByID(gomock.Any(), chat.ID).Return(chat, nil).AnyTimes()
|
||||
dbm.EXPECT().GetChatDebugRunsByChatID(gomock.Any(), arg).Return(runs, nil).AnyTimes()
|
||||
check.Args(arg).Asserts(chat, policy.ActionRead).Returns(runs)
|
||||
}))
|
||||
s.Run("GetChatDebugStepsByRunID", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) {
|
||||
chat := testutil.Fake(s.T(), faker, database.Chat{})
|
||||
run := database.ChatDebugRun{ID: uuid.New(), ChatID: chat.ID}
|
||||
steps := []database.ChatDebugStep{{ID: uuid.New(), RunID: run.ID, ChatID: chat.ID}}
|
||||
dbm.EXPECT().GetChatDebugRunByID(gomock.Any(), run.ID).Return(run, nil).AnyTimes()
|
||||
dbm.EXPECT().GetChatByID(gomock.Any(), chat.ID).Return(chat, nil).AnyTimes()
|
||||
dbm.EXPECT().GetChatDebugStepsByRunID(gomock.Any(), run.ID).Return(steps, nil).AnyTimes()
|
||||
check.Args(run.ID).Asserts(chat, policy.ActionRead).Returns(steps)
|
||||
}))
|
||||
s.Run("InsertChatDebugRun", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) {
|
||||
chat := testutil.Fake(s.T(), faker, database.Chat{})
|
||||
arg := database.InsertChatDebugRunParams{ChatID: chat.ID, Kind: "chat_turn", Status: "in_progress"}
|
||||
run := database.ChatDebugRun{ID: uuid.New(), ChatID: chat.ID}
|
||||
dbm.EXPECT().GetChatByID(gomock.Any(), chat.ID).Return(chat, nil).AnyTimes()
|
||||
dbm.EXPECT().InsertChatDebugRun(gomock.Any(), arg).Return(run, nil).AnyTimes()
|
||||
check.Args(arg).Asserts(chat, policy.ActionUpdate).Returns(run)
|
||||
}))
|
||||
s.Run("InsertChatDebugStep", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) {
|
||||
chat := testutil.Fake(s.T(), faker, database.Chat{})
|
||||
arg := database.InsertChatDebugStepParams{RunID: uuid.New(), ChatID: chat.ID, StepNumber: 1, Operation: "stream", Status: "in_progress"}
|
||||
step := database.ChatDebugStep{ID: uuid.New(), RunID: arg.RunID, ChatID: chat.ID}
|
||||
dbm.EXPECT().GetChatByID(gomock.Any(), chat.ID).Return(chat, nil).AnyTimes()
|
||||
dbm.EXPECT().InsertChatDebugStep(gomock.Any(), arg).Return(step, nil).AnyTimes()
|
||||
check.Args(arg).Asserts(chat, policy.ActionUpdate).Returns(step)
|
||||
}))
|
||||
s.Run("UpdateChatDebugRun", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) {
|
||||
chat := testutil.Fake(s.T(), faker, database.Chat{})
|
||||
arg := database.UpdateChatDebugRunParams{ID: uuid.New(), ChatID: chat.ID}
|
||||
run := database.ChatDebugRun{ID: arg.ID, ChatID: chat.ID}
|
||||
dbm.EXPECT().GetChatByID(gomock.Any(), chat.ID).Return(chat, nil).AnyTimes()
|
||||
dbm.EXPECT().UpdateChatDebugRun(gomock.Any(), arg).Return(run, nil).AnyTimes()
|
||||
check.Args(arg).Asserts(chat, policy.ActionUpdate).Returns(run)
|
||||
}))
|
||||
s.Run("UpdateChatDebugStep", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) {
|
||||
chat := testutil.Fake(s.T(), faker, database.Chat{})
|
||||
arg := database.UpdateChatDebugStepParams{ID: uuid.New(), ChatID: chat.ID}
|
||||
step := database.ChatDebugStep{ID: arg.ID, ChatID: chat.ID}
|
||||
dbm.EXPECT().GetChatByID(gomock.Any(), chat.ID).Return(chat, nil).AnyTimes()
|
||||
dbm.EXPECT().UpdateChatDebugStep(gomock.Any(), arg).Return(step, nil).AnyTimes()
|
||||
check.Args(arg).Asserts(chat, policy.ActionUpdate).Returns(step)
|
||||
}))
|
||||
s.Run("UpsertChatDebugLoggingAllowUsers", s.Mocked(func(dbm *dbmock.MockStore, _ *gofakeit.Faker, check *expects) {
|
||||
dbm.EXPECT().UpsertChatDebugLoggingAllowUsers(gomock.Any(), true).Return(nil).AnyTimes()
|
||||
check.Args(true).Asserts(rbac.ResourceDeploymentConfig, policy.ActionUpdate)
|
||||
}))
|
||||
s.Run("GetChatByID", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) {
|
||||
chat := testutil.Fake(s.T(), faker, database.Chat{})
|
||||
dbm.EXPECT().GetChatByID(gomock.Any(), chat.ID).Return(chat, nil).AnyTimes()
|
||||
@@ -2344,9 +2261,9 @@ func (s *MethodTestSuite) TestTemplate() {
|
||||
dbm.EXPECT().GetPRInsightsPerModel(gomock.Any(), arg).Return([]database.GetPRInsightsPerModelRow{}, nil).AnyTimes()
|
||||
check.Args(arg).Asserts(rbac.ResourceDeploymentConfig, policy.ActionRead)
|
||||
}))
|
||||
s.Run("GetPRInsightsPullRequests", s.Mocked(func(dbm *dbmock.MockStore, _ *gofakeit.Faker, check *expects) {
|
||||
arg := database.GetPRInsightsPullRequestsParams{}
|
||||
dbm.EXPECT().GetPRInsightsPullRequests(gomock.Any(), arg).Return([]database.GetPRInsightsPullRequestsRow{}, nil).AnyTimes()
|
||||
s.Run("GetPRInsightsRecentPRs", s.Mocked(func(dbm *dbmock.MockStore, _ *gofakeit.Faker, check *expects) {
|
||||
arg := database.GetPRInsightsRecentPRsParams{}
|
||||
dbm.EXPECT().GetPRInsightsRecentPRs(gomock.Any(), arg).Return([]database.GetPRInsightsRecentPRsRow{}, nil).AnyTimes()
|
||||
check.Args(arg).Asserts(rbac.ResourceDeploymentConfig, policy.ActionRead)
|
||||
}))
|
||||
s.Run("GetTelemetryTaskEvents", s.Mocked(func(dbm *dbmock.MockStore, _ *gofakeit.Faker, check *expects) {
|
||||
@@ -2577,19 +2494,6 @@ func (s *MethodTestSuite) TestUser() {
|
||||
dbm.EXPECT().UpsertUserChatProviderKey(gomock.Any(), arg).Return(key, nil).AnyTimes()
|
||||
check.Args(arg).Asserts(u, policy.ActionUpdatePersonal).Returns(key)
|
||||
}))
|
||||
s.Run("GetUserChatDebugLoggingEnabled", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) {
|
||||
u := testutil.Fake(s.T(), faker, database.User{})
|
||||
dbm.EXPECT().GetUserByID(gomock.Any(), u.ID).Return(u, nil).AnyTimes()
|
||||
dbm.EXPECT().GetUserChatDebugLoggingEnabled(gomock.Any(), u.ID).Return(true, nil).AnyTimes()
|
||||
check.Args(u.ID).Asserts(u, policy.ActionReadPersonal).Returns(true)
|
||||
}))
|
||||
s.Run("UpsertUserChatDebugLoggingEnabled", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) {
|
||||
u := testutil.Fake(s.T(), faker, database.User{})
|
||||
arg := database.UpsertUserChatDebugLoggingEnabledParams{UserID: u.ID, DebugLoggingEnabled: true}
|
||||
dbm.EXPECT().GetUserByID(gomock.Any(), u.ID).Return(u, nil).AnyTimes()
|
||||
dbm.EXPECT().UpsertUserChatDebugLoggingEnabled(gomock.Any(), arg).Return(nil).AnyTimes()
|
||||
check.Args(arg).Asserts(u, policy.ActionUpdatePersonal)
|
||||
}))
|
||||
s.Run("UpdateUserChatCustomPrompt", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) {
|
||||
u := testutil.Fake(s.T(), faker, database.User{})
|
||||
uc := database.UserConfig{UserID: u.ID, Key: "chat_custom_prompt", Value: "my custom prompt"}
|
||||
@@ -3031,17 +2935,6 @@ func (s *MethodTestSuite) TestWorkspace() {
|
||||
dbm.EXPECT().UpdateWorkspaceAgentStartupByID(gomock.Any(), arg).Return(nil).AnyTimes()
|
||||
check.Args(arg).Asserts(w, policy.ActionUpdate).Returns()
|
||||
}))
|
||||
s.Run("UpdateWorkspaceAgentDirectoryByID", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) {
|
||||
w := testutil.Fake(s.T(), faker, database.Workspace{})
|
||||
agt := testutil.Fake(s.T(), faker, database.WorkspaceAgent{})
|
||||
arg := database.UpdateWorkspaceAgentDirectoryByIDParams{
|
||||
ID: agt.ID,
|
||||
Directory: "/workspaces/project",
|
||||
}
|
||||
dbm.EXPECT().GetWorkspaceByAgentID(gomock.Any(), agt.ID).Return(w, nil).AnyTimes()
|
||||
dbm.EXPECT().UpdateWorkspaceAgentDirectoryByID(gomock.Any(), arg).Return(nil).AnyTimes()
|
||||
check.Args(arg).Asserts(w, policy.ActionUpdateAgent).Returns()
|
||||
}))
|
||||
s.Run("UpdateWorkspaceAgentDisplayAppsByID", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) {
|
||||
w := testutil.Fake(s.T(), faker, database.Workspace{})
|
||||
agt := testutil.Fake(s.T(), faker, database.WorkspaceAgent{})
|
||||
@@ -5538,10 +5431,10 @@ func (s *MethodTestSuite) TestUserSecrets() {
|
||||
s.Run("DeleteUserSecretByUserIDAndName", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) {
|
||||
user := testutil.Fake(s.T(), faker, database.User{})
|
||||
arg := database.DeleteUserSecretByUserIDAndNameParams{UserID: user.ID, Name: "test"}
|
||||
dbm.EXPECT().DeleteUserSecretByUserIDAndName(gomock.Any(), arg).Return(int64(1), nil).AnyTimes()
|
||||
dbm.EXPECT().DeleteUserSecretByUserIDAndName(gomock.Any(), arg).Return(nil).AnyTimes()
|
||||
check.Args(arg).
|
||||
Asserts(rbac.ResourceUserSecret.WithOwner(user.ID.String()), policy.ActionDelete).
|
||||
Returns(int64(1))
|
||||
Returns()
|
||||
}))
|
||||
}
|
||||
|
||||
|
||||
@@ -416,22 +416,6 @@ func (m queryMetricsStore) DeleteApplicationConnectAPIKeysByUserID(ctx context.C
|
||||
return r0
|
||||
}
|
||||
|
||||
func (m queryMetricsStore) DeleteChatDebugDataAfterMessageID(ctx context.Context, arg database.DeleteChatDebugDataAfterMessageIDParams) (int64, error) {
|
||||
start := time.Now()
|
||||
r0, r1 := m.s.DeleteChatDebugDataAfterMessageID(ctx, arg)
|
||||
m.queryLatencies.WithLabelValues("DeleteChatDebugDataAfterMessageID").Observe(time.Since(start).Seconds())
|
||||
m.queryCounts.WithLabelValues(httpmw.ExtractHTTPRoute(ctx), httpmw.ExtractHTTPMethod(ctx), "DeleteChatDebugDataAfterMessageID").Inc()
|
||||
return r0, r1
|
||||
}
|
||||
|
||||
func (m queryMetricsStore) DeleteChatDebugDataByChatID(ctx context.Context, chatID uuid.UUID) (int64, error) {
|
||||
start := time.Now()
|
||||
r0, r1 := m.s.DeleteChatDebugDataByChatID(ctx, chatID)
|
||||
m.queryLatencies.WithLabelValues("DeleteChatDebugDataByChatID").Observe(time.Since(start).Seconds())
|
||||
m.queryCounts.WithLabelValues(httpmw.ExtractHTTPRoute(ctx), httpmw.ExtractHTTPMethod(ctx), "DeleteChatDebugDataByChatID").Inc()
|
||||
return r0, r1
|
||||
}
|
||||
|
||||
func (m queryMetricsStore) DeleteChatModelConfigByID(ctx context.Context, id uuid.UUID) error {
|
||||
start := time.Now()
|
||||
r0 := m.s.DeleteChatModelConfigByID(ctx, id)
|
||||
@@ -752,12 +736,12 @@ func (m queryMetricsStore) DeleteUserChatProviderKey(ctx context.Context, arg da
|
||||
return r0
|
||||
}
|
||||
|
||||
func (m queryMetricsStore) DeleteUserSecretByUserIDAndName(ctx context.Context, arg database.DeleteUserSecretByUserIDAndNameParams) (int64, error) {
|
||||
func (m queryMetricsStore) DeleteUserSecretByUserIDAndName(ctx context.Context, arg database.DeleteUserSecretByUserIDAndNameParams) error {
|
||||
start := time.Now()
|
||||
r0, r1 := m.s.DeleteUserSecretByUserIDAndName(ctx, arg)
|
||||
r0 := m.s.DeleteUserSecretByUserIDAndName(ctx, arg)
|
||||
m.queryLatencies.WithLabelValues("DeleteUserSecretByUserIDAndName").Observe(time.Since(start).Seconds())
|
||||
m.queryCounts.WithLabelValues(httpmw.ExtractHTTPRoute(ctx), httpmw.ExtractHTTPMethod(ctx), "DeleteUserSecretByUserIDAndName").Inc()
|
||||
return r0, r1
|
||||
return r0
|
||||
}
|
||||
|
||||
func (m queryMetricsStore) DeleteWebpushSubscriptionByUserIDAndEndpoint(ctx context.Context, arg database.DeleteWebpushSubscriptionByUserIDAndEndpointParams) error {
|
||||
@@ -888,14 +872,6 @@ func (m queryMetricsStore) FetchVolumesResourceMonitorsUpdatedAfter(ctx context.
|
||||
return r0, r1
|
||||
}
|
||||
|
||||
func (m queryMetricsStore) FinalizeStaleChatDebugRows(ctx context.Context, updatedBefore time.Time) (database.FinalizeStaleChatDebugRowsRow, error) {
|
||||
start := time.Now()
|
||||
r0, r1 := m.s.FinalizeStaleChatDebugRows(ctx, updatedBefore)
|
||||
m.queryLatencies.WithLabelValues("FinalizeStaleChatDebugRows").Observe(time.Since(start).Seconds())
|
||||
m.queryCounts.WithLabelValues(httpmw.ExtractHTTPRoute(ctx), httpmw.ExtractHTTPMethod(ctx), "FinalizeStaleChatDebugRows").Inc()
|
||||
return r0, r1
|
||||
}
|
||||
|
||||
func (m queryMetricsStore) FindMatchingPresetID(ctx context.Context, arg database.FindMatchingPresetIDParams) (uuid.UUID, error) {
|
||||
start := time.Now()
|
||||
r0, r1 := m.s.FindMatchingPresetID(ctx, arg)
|
||||
@@ -1152,38 +1128,6 @@ func (m queryMetricsStore) GetChatCostSummary(ctx context.Context, arg database.
|
||||
return r0, r1
|
||||
}
|
||||
|
||||
func (m queryMetricsStore) GetChatDebugLoggingAllowUsers(ctx context.Context) (bool, error) {
|
||||
start := time.Now()
|
||||
r0, r1 := m.s.GetChatDebugLoggingAllowUsers(ctx)
|
||||
m.queryLatencies.WithLabelValues("GetChatDebugLoggingAllowUsers").Observe(time.Since(start).Seconds())
|
||||
m.queryCounts.WithLabelValues(httpmw.ExtractHTTPRoute(ctx), httpmw.ExtractHTTPMethod(ctx), "GetChatDebugLoggingAllowUsers").Inc()
|
||||
return r0, r1
|
||||
}
|
||||
|
||||
func (m queryMetricsStore) GetChatDebugRunByID(ctx context.Context, id uuid.UUID) (database.ChatDebugRun, error) {
|
||||
start := time.Now()
|
||||
r0, r1 := m.s.GetChatDebugRunByID(ctx, id)
|
||||
m.queryLatencies.WithLabelValues("GetChatDebugRunByID").Observe(time.Since(start).Seconds())
|
||||
m.queryCounts.WithLabelValues(httpmw.ExtractHTTPRoute(ctx), httpmw.ExtractHTTPMethod(ctx), "GetChatDebugRunByID").Inc()
|
||||
return r0, r1
|
||||
}
|
||||
|
||||
func (m queryMetricsStore) GetChatDebugRunsByChatID(ctx context.Context, chatID database.GetChatDebugRunsByChatIDParams) ([]database.ChatDebugRun, error) {
|
||||
start := time.Now()
|
||||
r0, r1 := m.s.GetChatDebugRunsByChatID(ctx, chatID)
|
||||
m.queryLatencies.WithLabelValues("GetChatDebugRunsByChatID").Observe(time.Since(start).Seconds())
|
||||
m.queryCounts.WithLabelValues(httpmw.ExtractHTTPRoute(ctx), httpmw.ExtractHTTPMethod(ctx), "GetChatDebugRunsByChatID").Inc()
|
||||
return r0, r1
|
||||
}
|
||||
|
||||
func (m queryMetricsStore) GetChatDebugStepsByRunID(ctx context.Context, runID uuid.UUID) ([]database.ChatDebugStep, error) {
|
||||
start := time.Now()
|
||||
r0, r1 := m.s.GetChatDebugStepsByRunID(ctx, runID)
|
||||
m.queryLatencies.WithLabelValues("GetChatDebugStepsByRunID").Observe(time.Since(start).Seconds())
|
||||
m.queryCounts.WithLabelValues(httpmw.ExtractHTTPRoute(ctx), httpmw.ExtractHTTPMethod(ctx), "GetChatDebugStepsByRunID").Inc()
|
||||
return r0, r1
|
||||
}
|
||||
|
||||
func (m queryMetricsStore) GetChatDesktopEnabled(ctx context.Context) (bool, error) {
|
||||
start := time.Now()
|
||||
r0, r1 := m.s.GetChatDesktopEnabled(ctx)
|
||||
@@ -2048,11 +1992,11 @@ func (m queryMetricsStore) GetPRInsightsPerModel(ctx context.Context, arg databa
|
||||
return r0, r1
|
||||
}
|
||||
|
||||
func (m queryMetricsStore) GetPRInsightsPullRequests(ctx context.Context, arg database.GetPRInsightsPullRequestsParams) ([]database.GetPRInsightsPullRequestsRow, error) {
|
||||
func (m queryMetricsStore) GetPRInsightsRecentPRs(ctx context.Context, arg database.GetPRInsightsRecentPRsParams) ([]database.GetPRInsightsRecentPRsRow, error) {
|
||||
start := time.Now()
|
||||
r0, r1 := m.s.GetPRInsightsPullRequests(ctx, arg)
|
||||
m.queryLatencies.WithLabelValues("GetPRInsightsPullRequests").Observe(time.Since(start).Seconds())
|
||||
m.queryCounts.WithLabelValues(httpmw.ExtractHTTPRoute(ctx), httpmw.ExtractHTTPMethod(ctx), "GetPRInsightsPullRequests").Inc()
|
||||
r0, r1 := m.s.GetPRInsightsRecentPRs(ctx, arg)
|
||||
m.queryLatencies.WithLabelValues("GetPRInsightsRecentPRs").Observe(time.Since(start).Seconds())
|
||||
m.queryCounts.WithLabelValues(httpmw.ExtractHTTPRoute(ctx), httpmw.ExtractHTTPMethod(ctx), "GetPRInsightsRecentPRs").Inc()
|
||||
return r0, r1
|
||||
}
|
||||
|
||||
@@ -2672,14 +2616,6 @@ func (m queryMetricsStore) GetUserChatCustomPrompt(ctx context.Context, userID u
|
||||
return r0, r1
|
||||
}
|
||||
|
||||
func (m queryMetricsStore) GetUserChatDebugLoggingEnabled(ctx context.Context, userID uuid.UUID) (bool, error) {
|
||||
start := time.Now()
|
||||
r0, r1 := m.s.GetUserChatDebugLoggingEnabled(ctx, userID)
|
||||
m.queryLatencies.WithLabelValues("GetUserChatDebugLoggingEnabled").Observe(time.Since(start).Seconds())
|
||||
m.queryCounts.WithLabelValues(httpmw.ExtractHTTPRoute(ctx), httpmw.ExtractHTTPMethod(ctx), "GetUserChatDebugLoggingEnabled").Inc()
|
||||
return r0, r1
|
||||
}
|
||||
|
||||
func (m queryMetricsStore) GetUserChatProviderKeys(ctx context.Context, userID uuid.UUID) ([]database.UserChatProviderKey, error) {
|
||||
start := time.Now()
|
||||
r0, r1 := m.s.GetUserChatProviderKeys(ctx, userID)
|
||||
@@ -3376,22 +3312,6 @@ func (m queryMetricsStore) InsertChat(ctx context.Context, arg database.InsertCh
|
||||
return r0, r1
|
||||
}
|
||||
|
||||
func (m queryMetricsStore) InsertChatDebugRun(ctx context.Context, arg database.InsertChatDebugRunParams) (database.ChatDebugRun, error) {
|
||||
start := time.Now()
|
||||
r0, r1 := m.s.InsertChatDebugRun(ctx, arg)
|
||||
m.queryLatencies.WithLabelValues("InsertChatDebugRun").Observe(time.Since(start).Seconds())
|
||||
m.queryCounts.WithLabelValues(httpmw.ExtractHTTPRoute(ctx), httpmw.ExtractHTTPMethod(ctx), "InsertChatDebugRun").Inc()
|
||||
return r0, r1
|
||||
}
|
||||
|
||||
func (m queryMetricsStore) InsertChatDebugStep(ctx context.Context, arg database.InsertChatDebugStepParams) (database.ChatDebugStep, error) {
|
||||
start := time.Now()
|
||||
r0, r1 := m.s.InsertChatDebugStep(ctx, arg)
|
||||
m.queryLatencies.WithLabelValues("InsertChatDebugStep").Observe(time.Since(start).Seconds())
|
||||
m.queryCounts.WithLabelValues(httpmw.ExtractHTTPRoute(ctx), httpmw.ExtractHTTPMethod(ctx), "InsertChatDebugStep").Inc()
|
||||
return r0, r1
|
||||
}
|
||||
|
||||
func (m queryMetricsStore) InsertChatFile(ctx context.Context, arg database.InsertChatFileParams) (database.InsertChatFileRow, error) {
|
||||
start := time.Now()
|
||||
r0, r1 := m.s.InsertChatFile(ctx, arg)
|
||||
@@ -4288,22 +4208,6 @@ func (m queryMetricsStore) UpdateChatByID(ctx context.Context, arg database.Upda
|
||||
return r0, r1
|
||||
}
|
||||
|
||||
func (m queryMetricsStore) UpdateChatDebugRun(ctx context.Context, arg database.UpdateChatDebugRunParams) (database.ChatDebugRun, error) {
|
||||
start := time.Now()
|
||||
r0, r1 := m.s.UpdateChatDebugRun(ctx, arg)
|
||||
m.queryLatencies.WithLabelValues("UpdateChatDebugRun").Observe(time.Since(start).Seconds())
|
||||
m.queryCounts.WithLabelValues(httpmw.ExtractHTTPRoute(ctx), httpmw.ExtractHTTPMethod(ctx), "UpdateChatDebugRun").Inc()
|
||||
return r0, r1
|
||||
}
|
||||
|
||||
func (m queryMetricsStore) UpdateChatDebugStep(ctx context.Context, arg database.UpdateChatDebugStepParams) (database.ChatDebugStep, error) {
|
||||
start := time.Now()
|
||||
r0, r1 := m.s.UpdateChatDebugStep(ctx, arg)
|
||||
m.queryLatencies.WithLabelValues("UpdateChatDebugStep").Observe(time.Since(start).Seconds())
|
||||
m.queryCounts.WithLabelValues(httpmw.ExtractHTTPRoute(ctx), httpmw.ExtractHTTPMethod(ctx), "UpdateChatDebugStep").Inc()
|
||||
return r0, r1
|
||||
}
|
||||
|
||||
func (m queryMetricsStore) UpdateChatHeartbeats(ctx context.Context, arg database.UpdateChatHeartbeatsParams) ([]uuid.UUID, error) {
|
||||
start := time.Now()
|
||||
r0, r1 := m.s.UpdateChatHeartbeats(ctx, arg)
|
||||
@@ -4936,14 +4840,6 @@ func (m queryMetricsStore) UpdateWorkspaceAgentConnectionByID(ctx context.Contex
|
||||
return r0
|
||||
}
|
||||
|
||||
func (m queryMetricsStore) UpdateWorkspaceAgentDirectoryByID(ctx context.Context, arg database.UpdateWorkspaceAgentDirectoryByIDParams) error {
|
||||
start := time.Now()
|
||||
r0 := m.s.UpdateWorkspaceAgentDirectoryByID(ctx, arg)
|
||||
m.queryLatencies.WithLabelValues("UpdateWorkspaceAgentDirectoryByID").Observe(time.Since(start).Seconds())
|
||||
m.queryCounts.WithLabelValues(httpmw.ExtractHTTPRoute(ctx), httpmw.ExtractHTTPMethod(ctx), "UpdateWorkspaceAgentDirectoryByID").Inc()
|
||||
return r0
|
||||
}
|
||||
|
||||
func (m queryMetricsStore) UpdateWorkspaceAgentDisplayAppsByID(ctx context.Context, arg database.UpdateWorkspaceAgentDisplayAppsByIDParams) error {
|
||||
start := time.Now()
|
||||
r0 := m.s.UpdateWorkspaceAgentDisplayAppsByID(ctx, arg)
|
||||
@@ -5144,14 +5040,6 @@ func (m queryMetricsStore) UpsertBoundaryUsageStats(ctx context.Context, arg dat
|
||||
return r0, r1
|
||||
}
|
||||
|
||||
func (m queryMetricsStore) UpsertChatDebugLoggingAllowUsers(ctx context.Context, allowUsers bool) error {
|
||||
start := time.Now()
|
||||
r0 := m.s.UpsertChatDebugLoggingAllowUsers(ctx, allowUsers)
|
||||
m.queryLatencies.WithLabelValues("UpsertChatDebugLoggingAllowUsers").Observe(time.Since(start).Seconds())
|
||||
m.queryCounts.WithLabelValues(httpmw.ExtractHTTPRoute(ctx), httpmw.ExtractHTTPMethod(ctx), "UpsertChatDebugLoggingAllowUsers").Inc()
|
||||
return r0
|
||||
}
|
||||
|
||||
func (m queryMetricsStore) UpsertChatDesktopEnabled(ctx context.Context, enableDesktop bool) error {
|
||||
start := time.Now()
|
||||
r0 := m.s.UpsertChatDesktopEnabled(ctx, enableDesktop)
|
||||
@@ -5384,14 +5272,6 @@ func (m queryMetricsStore) UpsertTemplateUsageStats(ctx context.Context) error {
|
||||
return r0
|
||||
}
|
||||
|
||||
func (m queryMetricsStore) UpsertUserChatDebugLoggingEnabled(ctx context.Context, arg database.UpsertUserChatDebugLoggingEnabledParams) error {
|
||||
start := time.Now()
|
||||
r0 := m.s.UpsertUserChatDebugLoggingEnabled(ctx, arg)
|
||||
m.queryLatencies.WithLabelValues("UpsertUserChatDebugLoggingEnabled").Observe(time.Since(start).Seconds())
|
||||
m.queryCounts.WithLabelValues(httpmw.ExtractHTTPRoute(ctx), httpmw.ExtractHTTPMethod(ctx), "UpsertUserChatDebugLoggingEnabled").Inc()
|
||||
return r0
|
||||
}
|
||||
|
||||
func (m queryMetricsStore) UpsertUserChatProviderKey(ctx context.Context, arg database.UpsertUserChatProviderKeyParams) (database.UserChatProviderKey, error) {
|
||||
start := time.Now()
|
||||
r0, r1 := m.s.UpsertUserChatProviderKey(ctx, arg)
|
||||
|
||||
@@ -671,36 +671,6 @@ func (mr *MockStoreMockRecorder) DeleteApplicationConnectAPIKeysByUserID(ctx, us
|
||||
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "DeleteApplicationConnectAPIKeysByUserID", reflect.TypeOf((*MockStore)(nil).DeleteApplicationConnectAPIKeysByUserID), ctx, userID)
|
||||
}
|
||||
|
||||
// DeleteChatDebugDataAfterMessageID mocks base method.
|
||||
func (m *MockStore) DeleteChatDebugDataAfterMessageID(ctx context.Context, arg database.DeleteChatDebugDataAfterMessageIDParams) (int64, error) {
|
||||
m.ctrl.T.Helper()
|
||||
ret := m.ctrl.Call(m, "DeleteChatDebugDataAfterMessageID", ctx, arg)
|
||||
ret0, _ := ret[0].(int64)
|
||||
ret1, _ := ret[1].(error)
|
||||
return ret0, ret1
|
||||
}
|
||||
|
||||
// DeleteChatDebugDataAfterMessageID indicates an expected call of DeleteChatDebugDataAfterMessageID.
|
||||
func (mr *MockStoreMockRecorder) DeleteChatDebugDataAfterMessageID(ctx, arg any) *gomock.Call {
|
||||
mr.mock.ctrl.T.Helper()
|
||||
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "DeleteChatDebugDataAfterMessageID", reflect.TypeOf((*MockStore)(nil).DeleteChatDebugDataAfterMessageID), ctx, arg)
|
||||
}
|
||||
|
||||
// DeleteChatDebugDataByChatID mocks base method.
|
||||
func (m *MockStore) DeleteChatDebugDataByChatID(ctx context.Context, chatID uuid.UUID) (int64, error) {
|
||||
m.ctrl.T.Helper()
|
||||
ret := m.ctrl.Call(m, "DeleteChatDebugDataByChatID", ctx, chatID)
|
||||
ret0, _ := ret[0].(int64)
|
||||
ret1, _ := ret[1].(error)
|
||||
return ret0, ret1
|
||||
}
|
||||
|
||||
// DeleteChatDebugDataByChatID indicates an expected call of DeleteChatDebugDataByChatID.
|
||||
func (mr *MockStoreMockRecorder) DeleteChatDebugDataByChatID(ctx, chatID any) *gomock.Call {
|
||||
mr.mock.ctrl.T.Helper()
|
||||
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "DeleteChatDebugDataByChatID", reflect.TypeOf((*MockStore)(nil).DeleteChatDebugDataByChatID), ctx, chatID)
|
||||
}
|
||||
|
||||
// DeleteChatModelConfigByID mocks base method.
|
||||
func (m *MockStore) DeleteChatModelConfigByID(ctx context.Context, id uuid.UUID) error {
|
||||
m.ctrl.T.Helper()
|
||||
@@ -1274,12 +1244,11 @@ func (mr *MockStoreMockRecorder) DeleteUserChatProviderKey(ctx, arg any) *gomock
|
||||
}
|
||||
|
||||
// DeleteUserSecretByUserIDAndName mocks base method.
|
||||
func (m *MockStore) DeleteUserSecretByUserIDAndName(ctx context.Context, arg database.DeleteUserSecretByUserIDAndNameParams) (int64, error) {
|
||||
func (m *MockStore) DeleteUserSecretByUserIDAndName(ctx context.Context, arg database.DeleteUserSecretByUserIDAndNameParams) error {
|
||||
m.ctrl.T.Helper()
|
||||
ret := m.ctrl.Call(m, "DeleteUserSecretByUserIDAndName", ctx, arg)
|
||||
ret0, _ := ret[0].(int64)
|
||||
ret1, _ := ret[1].(error)
|
||||
return ret0, ret1
|
||||
ret0, _ := ret[0].(error)
|
||||
return ret0
|
||||
}
|
||||
|
||||
// DeleteUserSecretByUserIDAndName indicates an expected call of DeleteUserSecretByUserIDAndName.
|
||||
@@ -1517,21 +1486,6 @@ func (mr *MockStoreMockRecorder) FetchVolumesResourceMonitorsUpdatedAfter(ctx, u
|
||||
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "FetchVolumesResourceMonitorsUpdatedAfter", reflect.TypeOf((*MockStore)(nil).FetchVolumesResourceMonitorsUpdatedAfter), ctx, updatedAt)
|
||||
}
|
||||
|
||||
// FinalizeStaleChatDebugRows mocks base method.
|
||||
func (m *MockStore) FinalizeStaleChatDebugRows(ctx context.Context, updatedBefore time.Time) (database.FinalizeStaleChatDebugRowsRow, error) {
|
||||
m.ctrl.T.Helper()
|
||||
ret := m.ctrl.Call(m, "FinalizeStaleChatDebugRows", ctx, updatedBefore)
|
||||
ret0, _ := ret[0].(database.FinalizeStaleChatDebugRowsRow)
|
||||
ret1, _ := ret[1].(error)
|
||||
return ret0, ret1
|
||||
}
|
||||
|
||||
// FinalizeStaleChatDebugRows indicates an expected call of FinalizeStaleChatDebugRows.
|
||||
func (mr *MockStoreMockRecorder) FinalizeStaleChatDebugRows(ctx, updatedBefore any) *gomock.Call {
|
||||
mr.mock.ctrl.T.Helper()
|
||||
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "FinalizeStaleChatDebugRows", reflect.TypeOf((*MockStore)(nil).FinalizeStaleChatDebugRows), ctx, updatedBefore)
|
||||
}
|
||||
|
||||
// FindMatchingPresetID mocks base method.
|
||||
func (m *MockStore) FindMatchingPresetID(ctx context.Context, arg database.FindMatchingPresetIDParams) (uuid.UUID, error) {
|
||||
m.ctrl.T.Helper()
|
||||
@@ -2117,66 +2071,6 @@ func (mr *MockStoreMockRecorder) GetChatCostSummary(ctx, arg any) *gomock.Call {
|
||||
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetChatCostSummary", reflect.TypeOf((*MockStore)(nil).GetChatCostSummary), ctx, arg)
|
||||
}
|
||||
|
||||
// GetChatDebugLoggingAllowUsers mocks base method.
|
||||
func (m *MockStore) GetChatDebugLoggingAllowUsers(ctx context.Context) (bool, error) {
|
||||
m.ctrl.T.Helper()
|
||||
ret := m.ctrl.Call(m, "GetChatDebugLoggingAllowUsers", ctx)
|
||||
ret0, _ := ret[0].(bool)
|
||||
ret1, _ := ret[1].(error)
|
||||
return ret0, ret1
|
||||
}
|
||||
|
||||
// GetChatDebugLoggingAllowUsers indicates an expected call of GetChatDebugLoggingAllowUsers.
|
||||
func (mr *MockStoreMockRecorder) GetChatDebugLoggingAllowUsers(ctx any) *gomock.Call {
|
||||
mr.mock.ctrl.T.Helper()
|
||||
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetChatDebugLoggingAllowUsers", reflect.TypeOf((*MockStore)(nil).GetChatDebugLoggingAllowUsers), ctx)
|
||||
}
|
||||
|
||||
// GetChatDebugRunByID mocks base method.
|
||||
func (m *MockStore) GetChatDebugRunByID(ctx context.Context, id uuid.UUID) (database.ChatDebugRun, error) {
|
||||
m.ctrl.T.Helper()
|
||||
ret := m.ctrl.Call(m, "GetChatDebugRunByID", ctx, id)
|
||||
ret0, _ := ret[0].(database.ChatDebugRun)
|
||||
ret1, _ := ret[1].(error)
|
||||
return ret0, ret1
|
||||
}
|
||||
|
||||
// GetChatDebugRunByID indicates an expected call of GetChatDebugRunByID.
|
||||
func (mr *MockStoreMockRecorder) GetChatDebugRunByID(ctx, id any) *gomock.Call {
|
||||
mr.mock.ctrl.T.Helper()
|
||||
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetChatDebugRunByID", reflect.TypeOf((*MockStore)(nil).GetChatDebugRunByID), ctx, id)
|
||||
}
|
||||
|
||||
// GetChatDebugRunsByChatID mocks base method.
|
||||
func (m *MockStore) GetChatDebugRunsByChatID(ctx context.Context, arg database.GetChatDebugRunsByChatIDParams) ([]database.ChatDebugRun, error) {
|
||||
m.ctrl.T.Helper()
|
||||
ret := m.ctrl.Call(m, "GetChatDebugRunsByChatID", ctx, arg)
|
||||
ret0, _ := ret[0].([]database.ChatDebugRun)
|
||||
ret1, _ := ret[1].(error)
|
||||
return ret0, ret1
|
||||
}
|
||||
|
||||
// GetChatDebugRunsByChatID indicates an expected call of GetChatDebugRunsByChatID.
|
||||
func (mr *MockStoreMockRecorder) GetChatDebugRunsByChatID(ctx, arg any) *gomock.Call {
|
||||
mr.mock.ctrl.T.Helper()
|
||||
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetChatDebugRunsByChatID", reflect.TypeOf((*MockStore)(nil).GetChatDebugRunsByChatID), ctx, arg)
|
||||
}
|
||||
|
||||
// GetChatDebugStepsByRunID mocks base method.
|
||||
func (m *MockStore) GetChatDebugStepsByRunID(ctx context.Context, runID uuid.UUID) ([]database.ChatDebugStep, error) {
|
||||
m.ctrl.T.Helper()
|
||||
ret := m.ctrl.Call(m, "GetChatDebugStepsByRunID", ctx, runID)
|
||||
ret0, _ := ret[0].([]database.ChatDebugStep)
|
||||
ret1, _ := ret[1].(error)
|
||||
return ret0, ret1
|
||||
}
|
||||
|
||||
// GetChatDebugStepsByRunID indicates an expected call of GetChatDebugStepsByRunID.
|
||||
func (mr *MockStoreMockRecorder) GetChatDebugStepsByRunID(ctx, runID any) *gomock.Call {
|
||||
mr.mock.ctrl.T.Helper()
|
||||
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetChatDebugStepsByRunID", reflect.TypeOf((*MockStore)(nil).GetChatDebugStepsByRunID), ctx, runID)
|
||||
}
|
||||
|
||||
// GetChatDesktopEnabled mocks base method.
|
||||
func (m *MockStore) GetChatDesktopEnabled(ctx context.Context) (bool, error) {
|
||||
m.ctrl.T.Helper()
|
||||
@@ -3797,19 +3691,19 @@ func (mr *MockStoreMockRecorder) GetPRInsightsPerModel(ctx, arg any) *gomock.Cal
|
||||
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetPRInsightsPerModel", reflect.TypeOf((*MockStore)(nil).GetPRInsightsPerModel), ctx, arg)
|
||||
}
|
||||
|
||||
// GetPRInsightsPullRequests mocks base method.
|
||||
func (m *MockStore) GetPRInsightsPullRequests(ctx context.Context, arg database.GetPRInsightsPullRequestsParams) ([]database.GetPRInsightsPullRequestsRow, error) {
|
||||
// GetPRInsightsRecentPRs mocks base method.
|
||||
func (m *MockStore) GetPRInsightsRecentPRs(ctx context.Context, arg database.GetPRInsightsRecentPRsParams) ([]database.GetPRInsightsRecentPRsRow, error) {
|
||||
m.ctrl.T.Helper()
|
||||
ret := m.ctrl.Call(m, "GetPRInsightsPullRequests", ctx, arg)
|
||||
ret0, _ := ret[0].([]database.GetPRInsightsPullRequestsRow)
|
||||
ret := m.ctrl.Call(m, "GetPRInsightsRecentPRs", ctx, arg)
|
||||
ret0, _ := ret[0].([]database.GetPRInsightsRecentPRsRow)
|
||||
ret1, _ := ret[1].(error)
|
||||
return ret0, ret1
|
||||
}
|
||||
|
||||
// GetPRInsightsPullRequests indicates an expected call of GetPRInsightsPullRequests.
|
||||
func (mr *MockStoreMockRecorder) GetPRInsightsPullRequests(ctx, arg any) *gomock.Call {
|
||||
// GetPRInsightsRecentPRs indicates an expected call of GetPRInsightsRecentPRs.
|
||||
func (mr *MockStoreMockRecorder) GetPRInsightsRecentPRs(ctx, arg any) *gomock.Call {
|
||||
mr.mock.ctrl.T.Helper()
|
||||
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetPRInsightsPullRequests", reflect.TypeOf((*MockStore)(nil).GetPRInsightsPullRequests), ctx, arg)
|
||||
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetPRInsightsRecentPRs", reflect.TypeOf((*MockStore)(nil).GetPRInsightsRecentPRs), ctx, arg)
|
||||
}
|
||||
|
||||
// GetPRInsightsSummary mocks base method.
|
||||
@@ -4997,21 +4891,6 @@ func (mr *MockStoreMockRecorder) GetUserChatCustomPrompt(ctx, userID any) *gomoc
|
||||
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetUserChatCustomPrompt", reflect.TypeOf((*MockStore)(nil).GetUserChatCustomPrompt), ctx, userID)
|
||||
}
|
||||
|
||||
// GetUserChatDebugLoggingEnabled mocks base method.
|
||||
func (m *MockStore) GetUserChatDebugLoggingEnabled(ctx context.Context, userID uuid.UUID) (bool, error) {
|
||||
m.ctrl.T.Helper()
|
||||
ret := m.ctrl.Call(m, "GetUserChatDebugLoggingEnabled", ctx, userID)
|
||||
ret0, _ := ret[0].(bool)
|
||||
ret1, _ := ret[1].(error)
|
||||
return ret0, ret1
|
||||
}
|
||||
|
||||
// GetUserChatDebugLoggingEnabled indicates an expected call of GetUserChatDebugLoggingEnabled.
|
||||
func (mr *MockStoreMockRecorder) GetUserChatDebugLoggingEnabled(ctx, userID any) *gomock.Call {
|
||||
mr.mock.ctrl.T.Helper()
|
||||
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetUserChatDebugLoggingEnabled", reflect.TypeOf((*MockStore)(nil).GetUserChatDebugLoggingEnabled), ctx, userID)
|
||||
}
|
||||
|
||||
// GetUserChatProviderKeys mocks base method.
|
||||
func (m *MockStore) GetUserChatProviderKeys(ctx context.Context, userID uuid.UUID) ([]database.UserChatProviderKey, error) {
|
||||
m.ctrl.T.Helper()
|
||||
@@ -6331,36 +6210,6 @@ func (mr *MockStoreMockRecorder) InsertChat(ctx, arg any) *gomock.Call {
|
||||
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "InsertChat", reflect.TypeOf((*MockStore)(nil).InsertChat), ctx, arg)
|
||||
}
|
||||
|
||||
// InsertChatDebugRun mocks base method.
|
||||
func (m *MockStore) InsertChatDebugRun(ctx context.Context, arg database.InsertChatDebugRunParams) (database.ChatDebugRun, error) {
|
||||
m.ctrl.T.Helper()
|
||||
ret := m.ctrl.Call(m, "InsertChatDebugRun", ctx, arg)
|
||||
ret0, _ := ret[0].(database.ChatDebugRun)
|
||||
ret1, _ := ret[1].(error)
|
||||
return ret0, ret1
|
||||
}
|
||||
|
||||
// InsertChatDebugRun indicates an expected call of InsertChatDebugRun.
|
||||
func (mr *MockStoreMockRecorder) InsertChatDebugRun(ctx, arg any) *gomock.Call {
|
||||
mr.mock.ctrl.T.Helper()
|
||||
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "InsertChatDebugRun", reflect.TypeOf((*MockStore)(nil).InsertChatDebugRun), ctx, arg)
|
||||
}
|
||||
|
||||
// InsertChatDebugStep mocks base method.
|
||||
func (m *MockStore) InsertChatDebugStep(ctx context.Context, arg database.InsertChatDebugStepParams) (database.ChatDebugStep, error) {
|
||||
m.ctrl.T.Helper()
|
||||
ret := m.ctrl.Call(m, "InsertChatDebugStep", ctx, arg)
|
||||
ret0, _ := ret[0].(database.ChatDebugStep)
|
||||
ret1, _ := ret[1].(error)
|
||||
return ret0, ret1
|
||||
}
|
||||
|
||||
// InsertChatDebugStep indicates an expected call of InsertChatDebugStep.
|
||||
func (mr *MockStoreMockRecorder) InsertChatDebugStep(ctx, arg any) *gomock.Call {
|
||||
mr.mock.ctrl.T.Helper()
|
||||
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "InsertChatDebugStep", reflect.TypeOf((*MockStore)(nil).InsertChatDebugStep), ctx, arg)
|
||||
}
|
||||
|
||||
// InsertChatFile mocks base method.
|
||||
func (m *MockStore) InsertChatFile(ctx context.Context, arg database.InsertChatFileParams) (database.InsertChatFileRow, error) {
|
||||
m.ctrl.T.Helper()
|
||||
@@ -8119,36 +7968,6 @@ func (mr *MockStoreMockRecorder) UpdateChatByID(ctx, arg any) *gomock.Call {
|
||||
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "UpdateChatByID", reflect.TypeOf((*MockStore)(nil).UpdateChatByID), ctx, arg)
|
||||
}
|
||||
|
||||
// UpdateChatDebugRun mocks base method.
|
||||
func (m *MockStore) UpdateChatDebugRun(ctx context.Context, arg database.UpdateChatDebugRunParams) (database.ChatDebugRun, error) {
|
||||
m.ctrl.T.Helper()
|
||||
ret := m.ctrl.Call(m, "UpdateChatDebugRun", ctx, arg)
|
||||
ret0, _ := ret[0].(database.ChatDebugRun)
|
||||
ret1, _ := ret[1].(error)
|
||||
return ret0, ret1
|
||||
}
|
||||
|
||||
// UpdateChatDebugRun indicates an expected call of UpdateChatDebugRun.
|
||||
func (mr *MockStoreMockRecorder) UpdateChatDebugRun(ctx, arg any) *gomock.Call {
|
||||
mr.mock.ctrl.T.Helper()
|
||||
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "UpdateChatDebugRun", reflect.TypeOf((*MockStore)(nil).UpdateChatDebugRun), ctx, arg)
|
||||
}
|
||||
|
||||
// UpdateChatDebugStep mocks base method.
|
||||
func (m *MockStore) UpdateChatDebugStep(ctx context.Context, arg database.UpdateChatDebugStepParams) (database.ChatDebugStep, error) {
|
||||
m.ctrl.T.Helper()
|
||||
ret := m.ctrl.Call(m, "UpdateChatDebugStep", ctx, arg)
|
||||
ret0, _ := ret[0].(database.ChatDebugStep)
|
||||
ret1, _ := ret[1].(error)
|
||||
return ret0, ret1
|
||||
}
|
||||
|
||||
// UpdateChatDebugStep indicates an expected call of UpdateChatDebugStep.
|
||||
func (mr *MockStoreMockRecorder) UpdateChatDebugStep(ctx, arg any) *gomock.Call {
|
||||
mr.mock.ctrl.T.Helper()
|
||||
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "UpdateChatDebugStep", reflect.TypeOf((*MockStore)(nil).UpdateChatDebugStep), ctx, arg)
|
||||
}
|
||||
|
||||
// UpdateChatHeartbeats mocks base method.
|
||||
func (m *MockStore) UpdateChatHeartbeats(ctx context.Context, arg database.UpdateChatHeartbeatsParams) ([]uuid.UUID, error) {
|
||||
m.ctrl.T.Helper()
|
||||
@@ -9300,20 +9119,6 @@ func (mr *MockStoreMockRecorder) UpdateWorkspaceAgentConnectionByID(ctx, arg any
|
||||
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "UpdateWorkspaceAgentConnectionByID", reflect.TypeOf((*MockStore)(nil).UpdateWorkspaceAgentConnectionByID), ctx, arg)
|
||||
}
|
||||
|
||||
// UpdateWorkspaceAgentDirectoryByID mocks base method.
|
||||
func (m *MockStore) UpdateWorkspaceAgentDirectoryByID(ctx context.Context, arg database.UpdateWorkspaceAgentDirectoryByIDParams) error {
|
||||
m.ctrl.T.Helper()
|
||||
ret := m.ctrl.Call(m, "UpdateWorkspaceAgentDirectoryByID", ctx, arg)
|
||||
ret0, _ := ret[0].(error)
|
||||
return ret0
|
||||
}
|
||||
|
||||
// UpdateWorkspaceAgentDirectoryByID indicates an expected call of UpdateWorkspaceAgentDirectoryByID.
|
||||
func (mr *MockStoreMockRecorder) UpdateWorkspaceAgentDirectoryByID(ctx, arg any) *gomock.Call {
|
||||
mr.mock.ctrl.T.Helper()
|
||||
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "UpdateWorkspaceAgentDirectoryByID", reflect.TypeOf((*MockStore)(nil).UpdateWorkspaceAgentDirectoryByID), ctx, arg)
|
||||
}
|
||||
|
||||
// UpdateWorkspaceAgentDisplayAppsByID mocks base method.
|
||||
func (m *MockStore) UpdateWorkspaceAgentDisplayAppsByID(ctx context.Context, arg database.UpdateWorkspaceAgentDisplayAppsByIDParams) error {
|
||||
m.ctrl.T.Helper()
|
||||
@@ -9669,20 +9474,6 @@ func (mr *MockStoreMockRecorder) UpsertBoundaryUsageStats(ctx, arg any) *gomock.
|
||||
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "UpsertBoundaryUsageStats", reflect.TypeOf((*MockStore)(nil).UpsertBoundaryUsageStats), ctx, arg)
|
||||
}
|
||||
|
||||
// UpsertChatDebugLoggingAllowUsers mocks base method.
|
||||
func (m *MockStore) UpsertChatDebugLoggingAllowUsers(ctx context.Context, allowUsers bool) error {
|
||||
m.ctrl.T.Helper()
|
||||
ret := m.ctrl.Call(m, "UpsertChatDebugLoggingAllowUsers", ctx, allowUsers)
|
||||
ret0, _ := ret[0].(error)
|
||||
return ret0
|
||||
}
|
||||
|
||||
// UpsertChatDebugLoggingAllowUsers indicates an expected call of UpsertChatDebugLoggingAllowUsers.
|
||||
func (mr *MockStoreMockRecorder) UpsertChatDebugLoggingAllowUsers(ctx, allowUsers any) *gomock.Call {
|
||||
mr.mock.ctrl.T.Helper()
|
||||
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "UpsertChatDebugLoggingAllowUsers", reflect.TypeOf((*MockStore)(nil).UpsertChatDebugLoggingAllowUsers), ctx, allowUsers)
|
||||
}
|
||||
|
||||
// UpsertChatDesktopEnabled mocks base method.
|
||||
func (m *MockStore) UpsertChatDesktopEnabled(ctx context.Context, enableDesktop bool) error {
|
||||
m.ctrl.T.Helper()
|
||||
@@ -10100,20 +9891,6 @@ func (mr *MockStoreMockRecorder) UpsertTemplateUsageStats(ctx any) *gomock.Call
|
||||
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "UpsertTemplateUsageStats", reflect.TypeOf((*MockStore)(nil).UpsertTemplateUsageStats), ctx)
|
||||
}
|
||||
|
||||
// UpsertUserChatDebugLoggingEnabled mocks base method.
|
||||
func (m *MockStore) UpsertUserChatDebugLoggingEnabled(ctx context.Context, arg database.UpsertUserChatDebugLoggingEnabledParams) error {
|
||||
m.ctrl.T.Helper()
|
||||
ret := m.ctrl.Call(m, "UpsertUserChatDebugLoggingEnabled", ctx, arg)
|
||||
ret0, _ := ret[0].(error)
|
||||
return ret0
|
||||
}
|
||||
|
||||
// UpsertUserChatDebugLoggingEnabled indicates an expected call of UpsertUserChatDebugLoggingEnabled.
|
||||
func (mr *MockStoreMockRecorder) UpsertUserChatDebugLoggingEnabled(ctx, arg any) *gomock.Call {
|
||||
mr.mock.ctrl.T.Helper()
|
||||
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "UpsertUserChatDebugLoggingEnabled", reflect.TypeOf((*MockStore)(nil).UpsertUserChatDebugLoggingEnabled), ctx, arg)
|
||||
}
|
||||
|
||||
// UpsertUserChatProviderKey mocks base method.
|
||||
func (m *MockStore) UpsertUserChatProviderKey(ctx context.Context, arg database.UpsertUserChatProviderKeyParams) (database.UserChatProviderKey, error) {
|
||||
m.ctrl.T.Helper()
|
||||
|
||||
Generated
+2
-67
@@ -1255,44 +1255,6 @@ COMMENT ON COLUMN boundary_usage_stats.window_start IS 'Start of the time window
|
||||
|
||||
COMMENT ON COLUMN boundary_usage_stats.updated_at IS 'Timestamp of the last update to this row.';
|
||||
|
||||
CREATE TABLE chat_debug_runs (
|
||||
id uuid DEFAULT gen_random_uuid() NOT NULL,
|
||||
chat_id uuid NOT NULL,
|
||||
root_chat_id uuid,
|
||||
parent_chat_id uuid,
|
||||
model_config_id uuid,
|
||||
trigger_message_id bigint,
|
||||
history_tip_message_id bigint,
|
||||
kind text NOT NULL,
|
||||
status text NOT NULL,
|
||||
provider text,
|
||||
model text,
|
||||
summary jsonb DEFAULT '{}'::jsonb NOT NULL,
|
||||
started_at timestamp with time zone DEFAULT now() NOT NULL,
|
||||
updated_at timestamp with time zone DEFAULT now() NOT NULL,
|
||||
finished_at timestamp with time zone
|
||||
);
|
||||
|
||||
CREATE TABLE chat_debug_steps (
|
||||
id uuid DEFAULT gen_random_uuid() NOT NULL,
|
||||
run_id uuid NOT NULL,
|
||||
chat_id uuid NOT NULL,
|
||||
step_number integer NOT NULL,
|
||||
operation text NOT NULL,
|
||||
status text NOT NULL,
|
||||
history_tip_message_id bigint,
|
||||
assistant_message_id bigint,
|
||||
normalized_request jsonb NOT NULL,
|
||||
normalized_response jsonb,
|
||||
usage jsonb,
|
||||
attempts jsonb DEFAULT '[]'::jsonb NOT NULL,
|
||||
error jsonb,
|
||||
metadata jsonb DEFAULT '{}'::jsonb NOT NULL,
|
||||
started_at timestamp with time zone DEFAULT now() NOT NULL,
|
||||
updated_at timestamp with time zone DEFAULT now() NOT NULL,
|
||||
finished_at timestamp with time zone
|
||||
);
|
||||
|
||||
CREATE TABLE chat_diff_statuses (
|
||||
chat_id uuid NOT NULL,
|
||||
url text,
|
||||
@@ -3397,12 +3359,6 @@ ALTER TABLE ONLY audit_logs
|
||||
ALTER TABLE ONLY boundary_usage_stats
|
||||
ADD CONSTRAINT boundary_usage_stats_pkey PRIMARY KEY (replica_id);
|
||||
|
||||
ALTER TABLE ONLY chat_debug_runs
|
||||
ADD CONSTRAINT chat_debug_runs_pkey PRIMARY KEY (id);
|
||||
|
||||
ALTER TABLE ONLY chat_debug_steps
|
||||
ADD CONSTRAINT chat_debug_steps_pkey PRIMARY KEY (id);
|
||||
|
||||
ALTER TABLE ONLY chat_diff_statuses
|
||||
ADD CONSTRAINT chat_diff_statuses_pkey PRIMARY KEY (chat_id);
|
||||
|
||||
@@ -3797,20 +3753,6 @@ CREATE INDEX idx_audit_log_user_id ON audit_logs USING btree (user_id);
|
||||
|
||||
CREATE INDEX idx_audit_logs_time_desc ON audit_logs USING btree ("time" DESC);
|
||||
|
||||
CREATE INDEX idx_chat_debug_runs_chat_started ON chat_debug_runs USING btree (chat_id, started_at DESC);
|
||||
|
||||
CREATE UNIQUE INDEX idx_chat_debug_runs_id_chat ON chat_debug_runs USING btree (id, chat_id);
|
||||
|
||||
CREATE INDEX idx_chat_debug_runs_stale ON chat_debug_runs USING btree (updated_at) WHERE (finished_at IS NULL);
|
||||
|
||||
CREATE INDEX idx_chat_debug_steps_chat_assistant_msg ON chat_debug_steps USING btree (chat_id, assistant_message_id) WHERE (assistant_message_id IS NOT NULL);
|
||||
|
||||
CREATE INDEX idx_chat_debug_steps_chat_tip ON chat_debug_steps USING btree (chat_id, history_tip_message_id);
|
||||
|
||||
CREATE UNIQUE INDEX idx_chat_debug_steps_run_step ON chat_debug_steps USING btree (run_id, step_number);
|
||||
|
||||
CREATE INDEX idx_chat_debug_steps_stale ON chat_debug_steps USING btree (updated_at) WHERE (finished_at IS NULL);
|
||||
|
||||
CREATE INDEX idx_chat_diff_statuses_stale_at ON chat_diff_statuses USING btree (stale_at);
|
||||
|
||||
CREATE INDEX idx_chat_file_links_chat_id ON chat_file_links USING btree (chat_id);
|
||||
@@ -3849,6 +3791,8 @@ CREATE INDEX idx_chats_last_model_config_id ON chats USING btree (last_model_con
|
||||
|
||||
CREATE INDEX idx_chats_owner ON chats USING btree (owner_id);
|
||||
|
||||
CREATE INDEX idx_chats_owner_updated_id ON chats USING btree (owner_id, updated_at DESC, id DESC);
|
||||
|
||||
CREATE INDEX idx_chats_parent_chat_id ON chats USING btree (parent_chat_id);
|
||||
|
||||
CREATE INDEX idx_chats_pending ON chats USING btree (status) WHERE (status = 'pending'::chat_status);
|
||||
@@ -4114,12 +4058,6 @@ ALTER TABLE ONLY aibridge_interceptions
|
||||
ALTER TABLE ONLY api_keys
|
||||
ADD CONSTRAINT api_keys_user_id_uuid_fkey FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE CASCADE;
|
||||
|
||||
ALTER TABLE ONLY chat_debug_runs
|
||||
ADD CONSTRAINT chat_debug_runs_chat_id_fkey FOREIGN KEY (chat_id) REFERENCES chats(id) ON DELETE CASCADE;
|
||||
|
||||
ALTER TABLE ONLY chat_debug_steps
|
||||
ADD CONSTRAINT chat_debug_steps_chat_id_fkey FOREIGN KEY (chat_id) REFERENCES chats(id) ON DELETE CASCADE;
|
||||
|
||||
ALTER TABLE ONLY chat_diff_statuses
|
||||
ADD CONSTRAINT chat_diff_statuses_chat_id_fkey FOREIGN KEY (chat_id) REFERENCES chats(id) ON DELETE CASCADE;
|
||||
|
||||
@@ -4192,9 +4130,6 @@ ALTER TABLE ONLY connection_logs
|
||||
ALTER TABLE ONLY crypto_keys
|
||||
ADD CONSTRAINT crypto_keys_secret_key_id_fkey FOREIGN KEY (secret_key_id) REFERENCES dbcrypt_keys(active_key_digest);
|
||||
|
||||
ALTER TABLE ONLY chat_debug_steps
|
||||
ADD CONSTRAINT fk_chat_debug_steps_run_chat FOREIGN KEY (run_id, chat_id) REFERENCES chat_debug_runs(id, chat_id) ON DELETE CASCADE;
|
||||
|
||||
ALTER TABLE ONLY oauth2_provider_app_tokens
|
||||
ADD CONSTRAINT fk_oauth2_provider_app_tokens_user_id FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE CASCADE;
|
||||
|
||||
|
||||
@@ -9,8 +9,6 @@ const (
|
||||
ForeignKeyAiSeatStateUserID ForeignKeyConstraint = "ai_seat_state_user_id_fkey" // ALTER TABLE ONLY ai_seat_state ADD CONSTRAINT ai_seat_state_user_id_fkey FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE CASCADE;
|
||||
ForeignKeyAibridgeInterceptionsInitiatorID ForeignKeyConstraint = "aibridge_interceptions_initiator_id_fkey" // ALTER TABLE ONLY aibridge_interceptions ADD CONSTRAINT aibridge_interceptions_initiator_id_fkey FOREIGN KEY (initiator_id) REFERENCES users(id);
|
||||
ForeignKeyAPIKeysUserIDUUID ForeignKeyConstraint = "api_keys_user_id_uuid_fkey" // ALTER TABLE ONLY api_keys ADD CONSTRAINT api_keys_user_id_uuid_fkey FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE CASCADE;
|
||||
ForeignKeyChatDebugRunsChatID ForeignKeyConstraint = "chat_debug_runs_chat_id_fkey" // ALTER TABLE ONLY chat_debug_runs ADD CONSTRAINT chat_debug_runs_chat_id_fkey FOREIGN KEY (chat_id) REFERENCES chats(id) ON DELETE CASCADE;
|
||||
ForeignKeyChatDebugStepsChatID ForeignKeyConstraint = "chat_debug_steps_chat_id_fkey" // ALTER TABLE ONLY chat_debug_steps ADD CONSTRAINT chat_debug_steps_chat_id_fkey FOREIGN KEY (chat_id) REFERENCES chats(id) ON DELETE CASCADE;
|
||||
ForeignKeyChatDiffStatusesChatID ForeignKeyConstraint = "chat_diff_statuses_chat_id_fkey" // ALTER TABLE ONLY chat_diff_statuses ADD CONSTRAINT chat_diff_statuses_chat_id_fkey FOREIGN KEY (chat_id) REFERENCES chats(id) ON DELETE CASCADE;
|
||||
ForeignKeyChatFileLinksChatID ForeignKeyConstraint = "chat_file_links_chat_id_fkey" // ALTER TABLE ONLY chat_file_links ADD CONSTRAINT chat_file_links_chat_id_fkey FOREIGN KEY (chat_id) REFERENCES chats(id) ON DELETE CASCADE;
|
||||
ForeignKeyChatFileLinksFileID ForeignKeyConstraint = "chat_file_links_file_id_fkey" // ALTER TABLE ONLY chat_file_links ADD CONSTRAINT chat_file_links_file_id_fkey FOREIGN KEY (file_id) REFERENCES chat_files(id) ON DELETE CASCADE;
|
||||
@@ -35,7 +33,6 @@ const (
|
||||
ForeignKeyConnectionLogsWorkspaceID ForeignKeyConstraint = "connection_logs_workspace_id_fkey" // ALTER TABLE ONLY connection_logs ADD CONSTRAINT connection_logs_workspace_id_fkey FOREIGN KEY (workspace_id) REFERENCES workspaces(id) ON DELETE CASCADE;
|
||||
ForeignKeyConnectionLogsWorkspaceOwnerID ForeignKeyConstraint = "connection_logs_workspace_owner_id_fkey" // ALTER TABLE ONLY connection_logs ADD CONSTRAINT connection_logs_workspace_owner_id_fkey FOREIGN KEY (workspace_owner_id) REFERENCES users(id) ON DELETE CASCADE;
|
||||
ForeignKeyCryptoKeysSecretKeyID ForeignKeyConstraint = "crypto_keys_secret_key_id_fkey" // ALTER TABLE ONLY crypto_keys ADD CONSTRAINT crypto_keys_secret_key_id_fkey FOREIGN KEY (secret_key_id) REFERENCES dbcrypt_keys(active_key_digest);
|
||||
ForeignKeyFkChatDebugStepsRunChat ForeignKeyConstraint = "fk_chat_debug_steps_run_chat" // ALTER TABLE ONLY chat_debug_steps ADD CONSTRAINT fk_chat_debug_steps_run_chat FOREIGN KEY (run_id, chat_id) REFERENCES chat_debug_runs(id, chat_id) ON DELETE CASCADE;
|
||||
ForeignKeyFkOauth2ProviderAppTokensUserID ForeignKeyConstraint = "fk_oauth2_provider_app_tokens_user_id" // ALTER TABLE ONLY oauth2_provider_app_tokens ADD CONSTRAINT fk_oauth2_provider_app_tokens_user_id FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE CASCADE;
|
||||
ForeignKeyGitAuthLinksOauthAccessTokenKeyID ForeignKeyConstraint = "git_auth_links_oauth_access_token_key_id_fkey" // ALTER TABLE ONLY external_auth_links ADD CONSTRAINT git_auth_links_oauth_access_token_key_id_fkey FOREIGN KEY (oauth_access_token_key_id) REFERENCES dbcrypt_keys(active_key_digest);
|
||||
ForeignKeyGitAuthLinksOauthRefreshTokenKeyID ForeignKeyConstraint = "git_auth_links_oauth_refresh_token_key_id_fkey" // ALTER TABLE ONLY external_auth_links ADD CONSTRAINT git_auth_links_oauth_refresh_token_key_id_fkey FOREIGN KEY (oauth_refresh_token_key_id) REFERENCES dbcrypt_keys(active_key_digest);
|
||||
|
||||
@@ -1 +0,0 @@
|
||||
CREATE INDEX idx_chats_owner_updated_id ON chats (owner_id, updated_at DESC, id DESC);
|
||||
@@ -1,5 +0,0 @@
|
||||
-- The GetChats ORDER BY changed from (updated_at, id) DESC to a 4-column
|
||||
-- expression sort (pinned-first flag, negated pin_order, updated_at, id).
|
||||
-- This index was purpose-built for the old sort and no longer provides
|
||||
-- read benefit. The simpler idx_chats_owner covers the owner_id filter.
|
||||
DROP INDEX IF EXISTS idx_chats_owner_updated_id;
|
||||
@@ -1,2 +0,0 @@
|
||||
DROP TABLE IF EXISTS chat_debug_steps;
|
||||
DROP TABLE IF EXISTS chat_debug_runs;
|
||||
@@ -1,59 +0,0 @@
|
||||
CREATE TABLE chat_debug_runs (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
chat_id UUID NOT NULL REFERENCES chats(id) ON DELETE CASCADE,
|
||||
-- root_chat_id and parent_chat_id are intentionally NOT
|
||||
-- foreign-keyed to chats(id). They are snapshot values that
|
||||
-- record the subchat hierarchy at run time. The referenced
|
||||
-- chat may be archived or deleted independently, and we want
|
||||
-- to preserve the historical lineage in debug rows rather
|
||||
-- than cascade-delete them.
|
||||
root_chat_id UUID,
|
||||
parent_chat_id UUID,
|
||||
model_config_id UUID,
|
||||
trigger_message_id BIGINT,
|
||||
history_tip_message_id BIGINT,
|
||||
kind TEXT NOT NULL,
|
||||
status TEXT NOT NULL,
|
||||
provider TEXT,
|
||||
model TEXT,
|
||||
summary JSONB NOT NULL DEFAULT '{}'::jsonb,
|
||||
started_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||
updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||
finished_at TIMESTAMPTZ
|
||||
);
|
||||
|
||||
CREATE UNIQUE INDEX idx_chat_debug_runs_id_chat ON chat_debug_runs(id, chat_id);
|
||||
CREATE INDEX idx_chat_debug_runs_chat_started ON chat_debug_runs(chat_id, started_at DESC);
|
||||
|
||||
CREATE TABLE chat_debug_steps (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
run_id UUID NOT NULL,
|
||||
chat_id UUID NOT NULL REFERENCES chats(id) ON DELETE CASCADE,
|
||||
step_number INT NOT NULL,
|
||||
operation TEXT NOT NULL,
|
||||
status TEXT NOT NULL,
|
||||
history_tip_message_id BIGINT,
|
||||
assistant_message_id BIGINT,
|
||||
normalized_request JSONB NOT NULL,
|
||||
normalized_response JSONB,
|
||||
usage JSONB,
|
||||
attempts JSONB NOT NULL DEFAULT '[]'::jsonb,
|
||||
error JSONB,
|
||||
metadata JSONB NOT NULL DEFAULT '{}'::jsonb,
|
||||
started_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||
updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||
finished_at TIMESTAMPTZ,
|
||||
CONSTRAINT fk_chat_debug_steps_run_chat
|
||||
FOREIGN KEY (run_id, chat_id)
|
||||
REFERENCES chat_debug_runs(id, chat_id)
|
||||
ON DELETE CASCADE
|
||||
);
|
||||
|
||||
CREATE UNIQUE INDEX idx_chat_debug_steps_run_step ON chat_debug_steps(run_id, step_number);
|
||||
CREATE INDEX idx_chat_debug_steps_chat_tip ON chat_debug_steps(chat_id, history_tip_message_id);
|
||||
-- Supports DeleteChatDebugDataAfterMessageID assistant_message_id branch.
|
||||
CREATE INDEX idx_chat_debug_steps_chat_assistant_msg ON chat_debug_steps(chat_id, assistant_message_id) WHERE assistant_message_id IS NOT NULL;
|
||||
|
||||
-- Supports FinalizeStaleChatDebugRows worker query.
|
||||
CREATE INDEX idx_chat_debug_runs_stale ON chat_debug_runs(updated_at) WHERE finished_at IS NULL;
|
||||
CREATE INDEX idx_chat_debug_steps_stale ON chat_debug_steps(updated_at) WHERE finished_at IS NULL;
|
||||
-65
@@ -1,65 +0,0 @@
|
||||
INSERT INTO chat_debug_runs (
|
||||
id,
|
||||
chat_id,
|
||||
model_config_id,
|
||||
history_tip_message_id,
|
||||
kind,
|
||||
status,
|
||||
provider,
|
||||
model,
|
||||
summary,
|
||||
started_at,
|
||||
updated_at,
|
||||
finished_at
|
||||
) VALUES (
|
||||
'c98518f8-9fb3-458b-a642-57552af1db63',
|
||||
'72c0438a-18eb-4688-ab80-e4c6a126ef96',
|
||||
'9af5f8d5-6a57-4505-8a69-3d6c787b95fd',
|
||||
(SELECT MAX(id) FROM chat_messages WHERE chat_id = '72c0438a-18eb-4688-ab80-e4c6a126ef96'),
|
||||
'chat_turn',
|
||||
'completed',
|
||||
'openai',
|
||||
'gpt-5.2',
|
||||
'{"step_count":1,"has_error":false}'::jsonb,
|
||||
'2024-01-01 00:00:00+00',
|
||||
'2024-01-01 00:00:01+00',
|
||||
'2024-01-01 00:00:01+00'
|
||||
);
|
||||
|
||||
INSERT INTO chat_debug_steps (
|
||||
id,
|
||||
run_id,
|
||||
chat_id,
|
||||
step_number,
|
||||
operation,
|
||||
status,
|
||||
history_tip_message_id,
|
||||
assistant_message_id,
|
||||
normalized_request,
|
||||
normalized_response,
|
||||
usage,
|
||||
attempts,
|
||||
error,
|
||||
metadata,
|
||||
started_at,
|
||||
updated_at,
|
||||
finished_at
|
||||
) VALUES (
|
||||
'59471c60-7851-4fa6-bf05-e21dd939721f',
|
||||
'c98518f8-9fb3-458b-a642-57552af1db63',
|
||||
'72c0438a-18eb-4688-ab80-e4c6a126ef96',
|
||||
1,
|
||||
'stream',
|
||||
'completed',
|
||||
(SELECT MAX(id) FROM chat_messages WHERE chat_id = '72c0438a-18eb-4688-ab80-e4c6a126ef96'),
|
||||
(SELECT MAX(id) FROM chat_messages WHERE chat_id = '72c0438a-18eb-4688-ab80-e4c6a126ef96'),
|
||||
'{"messages":[]}'::jsonb,
|
||||
'{"finish_reason":"stop"}'::jsonb,
|
||||
'{"input_tokens":1,"output_tokens":1}'::jsonb,
|
||||
'[]'::jsonb,
|
||||
NULL,
|
||||
'{"provider":"openai"}'::jsonb,
|
||||
'2024-01-01 00:00:00+00',
|
||||
'2024-01-01 00:00:01+00',
|
||||
'2024-01-01 00:00:01+00'
|
||||
);
|
||||
@@ -4248,44 +4248,6 @@ type Chat struct {
|
||||
DynamicTools pqtype.NullRawMessage `db:"dynamic_tools" json:"dynamic_tools"`
|
||||
}
|
||||
|
||||
type ChatDebugRun struct {
|
||||
ID uuid.UUID `db:"id" json:"id"`
|
||||
ChatID uuid.UUID `db:"chat_id" json:"chat_id"`
|
||||
RootChatID uuid.NullUUID `db:"root_chat_id" json:"root_chat_id"`
|
||||
ParentChatID uuid.NullUUID `db:"parent_chat_id" json:"parent_chat_id"`
|
||||
ModelConfigID uuid.NullUUID `db:"model_config_id" json:"model_config_id"`
|
||||
TriggerMessageID sql.NullInt64 `db:"trigger_message_id" json:"trigger_message_id"`
|
||||
HistoryTipMessageID sql.NullInt64 `db:"history_tip_message_id" json:"history_tip_message_id"`
|
||||
Kind string `db:"kind" json:"kind"`
|
||||
Status string `db:"status" json:"status"`
|
||||
Provider sql.NullString `db:"provider" json:"provider"`
|
||||
Model sql.NullString `db:"model" json:"model"`
|
||||
Summary json.RawMessage `db:"summary" json:"summary"`
|
||||
StartedAt time.Time `db:"started_at" json:"started_at"`
|
||||
UpdatedAt time.Time `db:"updated_at" json:"updated_at"`
|
||||
FinishedAt sql.NullTime `db:"finished_at" json:"finished_at"`
|
||||
}
|
||||
|
||||
type ChatDebugStep struct {
|
||||
ID uuid.UUID `db:"id" json:"id"`
|
||||
RunID uuid.UUID `db:"run_id" json:"run_id"`
|
||||
ChatID uuid.UUID `db:"chat_id" json:"chat_id"`
|
||||
StepNumber int32 `db:"step_number" json:"step_number"`
|
||||
Operation string `db:"operation" json:"operation"`
|
||||
Status string `db:"status" json:"status"`
|
||||
HistoryTipMessageID sql.NullInt64 `db:"history_tip_message_id" json:"history_tip_message_id"`
|
||||
AssistantMessageID sql.NullInt64 `db:"assistant_message_id" json:"assistant_message_id"`
|
||||
NormalizedRequest json.RawMessage `db:"normalized_request" json:"normalized_request"`
|
||||
NormalizedResponse pqtype.NullRawMessage `db:"normalized_response" json:"normalized_response"`
|
||||
Usage pqtype.NullRawMessage `db:"usage" json:"usage"`
|
||||
Attempts json.RawMessage `db:"attempts" json:"attempts"`
|
||||
Error pqtype.NullRawMessage `db:"error" json:"error"`
|
||||
Metadata json.RawMessage `db:"metadata" json:"metadata"`
|
||||
StartedAt time.Time `db:"started_at" json:"started_at"`
|
||||
UpdatedAt time.Time `db:"updated_at" json:"updated_at"`
|
||||
FinishedAt sql.NullTime `db:"finished_at" json:"finished_at"`
|
||||
}
|
||||
|
||||
type ChatDiffStatus struct {
|
||||
ChatID uuid.UUID `db:"chat_id" json:"chat_id"`
|
||||
Url sql.NullString `db:"url" json:"url"`
|
||||
|
||||
@@ -102,8 +102,6 @@ type sqlcQuerier interface {
|
||||
// be recreated.
|
||||
DeleteAllWebpushSubscriptions(ctx context.Context) error
|
||||
DeleteApplicationConnectAPIKeysByUserID(ctx context.Context, userID uuid.UUID) error
|
||||
DeleteChatDebugDataAfterMessageID(ctx context.Context, arg DeleteChatDebugDataAfterMessageIDParams) (int64, error)
|
||||
DeleteChatDebugDataByChatID(ctx context.Context, chatID uuid.UUID) (int64, error)
|
||||
DeleteChatModelConfigByID(ctx context.Context, id uuid.UUID) error
|
||||
DeleteChatProviderByID(ctx context.Context, id uuid.UUID) error
|
||||
DeleteChatQueuedMessage(ctx context.Context, arg DeleteChatQueuedMessageParams) error
|
||||
@@ -171,7 +169,7 @@ type sqlcQuerier interface {
|
||||
DeleteTask(ctx context.Context, arg DeleteTaskParams) (uuid.UUID, error)
|
||||
DeleteUserChatCompactionThreshold(ctx context.Context, arg DeleteUserChatCompactionThresholdParams) error
|
||||
DeleteUserChatProviderKey(ctx context.Context, arg DeleteUserChatProviderKeyParams) error
|
||||
DeleteUserSecretByUserIDAndName(ctx context.Context, arg DeleteUserSecretByUserIDAndNameParams) (int64, error)
|
||||
DeleteUserSecretByUserIDAndName(ctx context.Context, arg DeleteUserSecretByUserIDAndNameParams) error
|
||||
DeleteWebpushSubscriptionByUserIDAndEndpoint(ctx context.Context, arg DeleteWebpushSubscriptionByUserIDAndEndpointParams) error
|
||||
DeleteWebpushSubscriptions(ctx context.Context, ids []uuid.UUID) error
|
||||
DeleteWorkspaceACLByID(ctx context.Context, id uuid.UUID) error
|
||||
@@ -196,16 +194,6 @@ type sqlcQuerier interface {
|
||||
FetchNewMessageMetadata(ctx context.Context, arg FetchNewMessageMetadataParams) (FetchNewMessageMetadataRow, error)
|
||||
FetchVolumesResourceMonitorsByAgentID(ctx context.Context, agentID uuid.UUID) ([]WorkspaceAgentVolumeResourceMonitor, error)
|
||||
FetchVolumesResourceMonitorsUpdatedAfter(ctx context.Context, updatedAt time.Time) ([]WorkspaceAgentVolumeResourceMonitor, error)
|
||||
// Marks orphaned in-progress rows as interrupted so they do not stay
|
||||
// in a non-terminal state forever. The NOT IN list must match the
|
||||
// terminal statuses defined by ChatDebugStatus in codersdk/chats.go.
|
||||
//
|
||||
// The steps CTE also catches steps whose parent run was just finalized
|
||||
// (via run_id IN), because PostgreSQL data-modifying CTEs share the
|
||||
// same snapshot and cannot see each other's row updates. Without this,
|
||||
// a step with a recent updated_at would survive its run's finalization
|
||||
// and remain in 'in_progress' state permanently.
|
||||
FinalizeStaleChatDebugRows(ctx context.Context, updatedBefore time.Time) (FinalizeStaleChatDebugRowsRow, error)
|
||||
// FindMatchingPresetID finds a preset ID that is the largest exact subset of the provided parameters.
|
||||
// It returns the preset ID if a match is found, or NULL if no match is found.
|
||||
// The query finds presets where all preset parameters are present in the provided parameters,
|
||||
@@ -270,15 +258,6 @@ type sqlcQuerier interface {
|
||||
// Aggregate cost summary for a single user within a date range.
|
||||
// Only counts assistant-role messages.
|
||||
GetChatCostSummary(ctx context.Context, arg GetChatCostSummaryParams) (GetChatCostSummaryRow, error)
|
||||
// GetChatDebugLoggingAllowUsers returns the runtime admin setting that
|
||||
// allows users to opt into chat debug logging when the deployment does
|
||||
// not already force debug logging on globally.
|
||||
GetChatDebugLoggingAllowUsers(ctx context.Context) (bool, error)
|
||||
GetChatDebugRunByID(ctx context.Context, id uuid.UUID) (ChatDebugRun, error)
|
||||
// Returns the most recent debug runs for a chat, ordered newest-first.
|
||||
// Callers must supply an explicit limit to avoid unbounded result sets.
|
||||
GetChatDebugRunsByChatID(ctx context.Context, arg GetChatDebugRunsByChatIDParams) ([]ChatDebugRun, error)
|
||||
GetChatDebugStepsByRunID(ctx context.Context, runID uuid.UUID) ([]ChatDebugStep, error)
|
||||
GetChatDesktopEnabled(ctx context.Context) (bool, error)
|
||||
GetChatDiffStatusByChatID(ctx context.Context, chatID uuid.UUID) (ChatDiffStatus, error)
|
||||
GetChatDiffStatusesByChatIDs(ctx context.Context, chatIds []uuid.UUID) ([]ChatDiffStatus, error)
|
||||
@@ -439,12 +418,11 @@ type sqlcQuerier interface {
|
||||
// per PR for state/additions/deletions/model (model comes from the
|
||||
// most recent chat).
|
||||
GetPRInsightsPerModel(ctx context.Context, arg GetPRInsightsPerModelParams) ([]GetPRInsightsPerModelRow, error)
|
||||
// Returns all individual PR rows with cost for the selected time range.
|
||||
// Returns individual PR rows with cost for the recent PRs table.
|
||||
// Uses two CTEs: pr_costs sums cost for the PR-linked chat and its
|
||||
// direct children (that lack their own PR), and deduped picks one row
|
||||
// per PR for metadata. A safety-cap LIMIT guards against unexpectedly
|
||||
// large result sets from direct API callers.
|
||||
GetPRInsightsPullRequests(ctx context.Context, arg GetPRInsightsPullRequestsParams) ([]GetPRInsightsPullRequestsRow, error)
|
||||
// per PR for metadata.
|
||||
GetPRInsightsRecentPRs(ctx context.Context, arg GetPRInsightsRecentPRsParams) ([]GetPRInsightsRecentPRsRow, error)
|
||||
// PR Insights queries for the /agents analytics dashboard.
|
||||
// These aggregate data from chat_diff_statuses (PR metadata) joined
|
||||
// with chats and chat_messages (cost) to power the PR Insights view.
|
||||
@@ -640,7 +618,6 @@ type sqlcQuerier interface {
|
||||
GetUserByID(ctx context.Context, id uuid.UUID) (User, error)
|
||||
GetUserChatCompactionThreshold(ctx context.Context, arg GetUserChatCompactionThresholdParams) (string, error)
|
||||
GetUserChatCustomPrompt(ctx context.Context, userID uuid.UUID) (string, error)
|
||||
GetUserChatDebugLoggingEnabled(ctx context.Context, userID uuid.UUID) (bool, error)
|
||||
GetUserChatProviderKeys(ctx context.Context, userID uuid.UUID) ([]UserChatProviderKey, error)
|
||||
GetUserChatSpendInPeriod(ctx context.Context, arg GetUserChatSpendInPeriodParams) (int64, error)
|
||||
GetUserCount(ctx context.Context, includeSystem bool) (int64, error)
|
||||
@@ -760,8 +737,6 @@ type sqlcQuerier interface {
|
||||
InsertAllUsersGroup(ctx context.Context, organizationID uuid.UUID) (Group, error)
|
||||
InsertAuditLog(ctx context.Context, arg InsertAuditLogParams) (AuditLog, error)
|
||||
InsertChat(ctx context.Context, arg InsertChatParams) (Chat, error)
|
||||
InsertChatDebugRun(ctx context.Context, arg InsertChatDebugRunParams) (ChatDebugRun, error)
|
||||
InsertChatDebugStep(ctx context.Context, arg InsertChatDebugStepParams) (ChatDebugStep, error)
|
||||
InsertChatFile(ctx context.Context, arg InsertChatFileParams) (InsertChatFileRow, error)
|
||||
InsertChatMessages(ctx context.Context, arg InsertChatMessagesParams) ([]ChatMessage, error)
|
||||
InsertChatModelConfig(ctx context.Context, arg InsertChatModelConfigParams) (ChatModelConfig, error)
|
||||
@@ -940,16 +915,6 @@ type sqlcQuerier interface {
|
||||
UpdateAPIKeyByID(ctx context.Context, arg UpdateAPIKeyByIDParams) error
|
||||
UpdateChatBuildAgentBinding(ctx context.Context, arg UpdateChatBuildAgentBindingParams) (Chat, error)
|
||||
UpdateChatByID(ctx context.Context, arg UpdateChatByIDParams) (Chat, error)
|
||||
// Uses COALESCE so that passing NULL from Go means "keep the
|
||||
// existing value." This is intentional: debug rows follow a
|
||||
// write-once-finalize pattern where fields are set at creation
|
||||
// or finalization and never cleared back to NULL.
|
||||
UpdateChatDebugRun(ctx context.Context, arg UpdateChatDebugRunParams) (ChatDebugRun, error)
|
||||
// Uses COALESCE so that passing NULL from Go means "keep the
|
||||
// existing value." This is intentional: debug rows follow a
|
||||
// write-once-finalize pattern where fields are set at creation
|
||||
// or finalization and never cleared back to NULL.
|
||||
UpdateChatDebugStep(ctx context.Context, arg UpdateChatDebugStepParams) (ChatDebugStep, error)
|
||||
// Bumps the heartbeat timestamp for the given set of chat IDs,
|
||||
// provided they are still running and owned by the specified
|
||||
// worker. Returns the IDs that were actually updated so the
|
||||
@@ -1046,7 +1011,6 @@ type sqlcQuerier interface {
|
||||
UpdateWorkspace(ctx context.Context, arg UpdateWorkspaceParams) (WorkspaceTable, error)
|
||||
UpdateWorkspaceACLByID(ctx context.Context, arg UpdateWorkspaceACLByIDParams) error
|
||||
UpdateWorkspaceAgentConnectionByID(ctx context.Context, arg UpdateWorkspaceAgentConnectionByIDParams) error
|
||||
UpdateWorkspaceAgentDirectoryByID(ctx context.Context, arg UpdateWorkspaceAgentDirectoryByIDParams) error
|
||||
UpdateWorkspaceAgentDisplayAppsByID(ctx context.Context, arg UpdateWorkspaceAgentDisplayAppsByIDParams) error
|
||||
UpdateWorkspaceAgentLifecycleStateByID(ctx context.Context, arg UpdateWorkspaceAgentLifecycleStateByIDParams) error
|
||||
UpdateWorkspaceAgentLogOverflowByID(ctx context.Context, arg UpdateWorkspaceAgentLogOverflowByIDParams) error
|
||||
@@ -1078,9 +1042,6 @@ type sqlcQuerier interface {
|
||||
// cumulative values for unique counts (accurate period totals). Request counts
|
||||
// are always deltas, accumulated in DB. Returns true if insert, false if update.
|
||||
UpsertBoundaryUsageStats(ctx context.Context, arg UpsertBoundaryUsageStatsParams) (bool, error)
|
||||
// UpsertChatDebugLoggingAllowUsers updates the runtime admin setting that
|
||||
// allows users to opt into chat debug logging.
|
||||
UpsertChatDebugLoggingAllowUsers(ctx context.Context, allowUsers bool) error
|
||||
UpsertChatDesktopEnabled(ctx context.Context, enableDesktop bool) error
|
||||
UpsertChatDiffStatus(ctx context.Context, arg UpsertChatDiffStatusParams) (ChatDiffStatus, error)
|
||||
UpsertChatDiffStatusReference(ctx context.Context, arg UpsertChatDiffStatusReferenceParams) (ChatDiffStatus, error)
|
||||
@@ -1118,7 +1079,6 @@ type sqlcQuerier interface {
|
||||
// used to store the data, and the minutes are summed for each user and template
|
||||
// combination. The result is stored in the template_usage_stats table.
|
||||
UpsertTemplateUsageStats(ctx context.Context) error
|
||||
UpsertUserChatDebugLoggingEnabled(ctx context.Context, arg UpsertUserChatDebugLoggingEnabledParams) error
|
||||
UpsertUserChatProviderKey(ctx context.Context, arg UpsertUserChatProviderKeyParams) (UserChatProviderKey, error)
|
||||
UpsertWebpushVAPIDKeys(ctx context.Context, arg UpsertWebpushVAPIDKeysParams) error
|
||||
UpsertWorkspaceAgentPortShare(ctx context.Context, arg UpsertWorkspaceAgentPortShareParams) (WorkspaceAgentPortShare, error)
|
||||
|
||||
+21
-980
File diff suppressed because it is too large
Load Diff
+53
-741
@@ -2900,583 +2900,6 @@ func (q *sqlQuerier) UpsertBoundaryUsageStats(ctx context.Context, arg UpsertBou
|
||||
return new_period, err
|
||||
}
|
||||
|
||||
const deleteChatDebugDataAfterMessageID = `-- name: DeleteChatDebugDataAfterMessageID :execrows
|
||||
WITH affected_runs AS (
|
||||
SELECT DISTINCT run.id
|
||||
FROM chat_debug_runs run
|
||||
WHERE run.chat_id = $1::uuid
|
||||
AND (
|
||||
run.history_tip_message_id > $2::bigint
|
||||
OR run.trigger_message_id > $2::bigint
|
||||
)
|
||||
|
||||
UNION
|
||||
|
||||
SELECT DISTINCT step.run_id AS id
|
||||
FROM chat_debug_steps step
|
||||
WHERE step.chat_id = $1::uuid
|
||||
AND (
|
||||
step.assistant_message_id > $2::bigint
|
||||
OR step.history_tip_message_id > $2::bigint
|
||||
)
|
||||
)
|
||||
DELETE FROM chat_debug_runs
|
||||
WHERE chat_id = $1::uuid
|
||||
AND id IN (SELECT id FROM affected_runs)
|
||||
`
|
||||
|
||||
type DeleteChatDebugDataAfterMessageIDParams struct {
|
||||
ChatID uuid.UUID `db:"chat_id" json:"chat_id"`
|
||||
MessageID int64 `db:"message_id" json:"message_id"`
|
||||
}
|
||||
|
||||
func (q *sqlQuerier) DeleteChatDebugDataAfterMessageID(ctx context.Context, arg DeleteChatDebugDataAfterMessageIDParams) (int64, error) {
|
||||
result, err := q.db.ExecContext(ctx, deleteChatDebugDataAfterMessageID, arg.ChatID, arg.MessageID)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
return result.RowsAffected()
|
||||
}
|
||||
|
||||
const deleteChatDebugDataByChatID = `-- name: DeleteChatDebugDataByChatID :execrows
|
||||
DELETE FROM chat_debug_runs
|
||||
WHERE chat_id = $1::uuid
|
||||
`
|
||||
|
||||
func (q *sqlQuerier) DeleteChatDebugDataByChatID(ctx context.Context, chatID uuid.UUID) (int64, error) {
|
||||
result, err := q.db.ExecContext(ctx, deleteChatDebugDataByChatID, chatID)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
return result.RowsAffected()
|
||||
}
|
||||
|
||||
const finalizeStaleChatDebugRows = `-- name: FinalizeStaleChatDebugRows :one
|
||||
WITH finalized_runs AS (
|
||||
UPDATE chat_debug_runs
|
||||
SET
|
||||
status = 'interrupted',
|
||||
updated_at = NOW(),
|
||||
finished_at = NOW()
|
||||
WHERE updated_at < $1::timestamptz
|
||||
AND finished_at IS NULL
|
||||
AND status NOT IN ('completed', 'error', 'interrupted')
|
||||
RETURNING id
|
||||
), finalized_steps AS (
|
||||
UPDATE chat_debug_steps
|
||||
SET
|
||||
status = 'interrupted',
|
||||
updated_at = NOW(),
|
||||
finished_at = NOW()
|
||||
WHERE (
|
||||
updated_at < $1::timestamptz
|
||||
OR run_id IN (SELECT id FROM finalized_runs)
|
||||
)
|
||||
AND finished_at IS NULL
|
||||
AND status NOT IN ('completed', 'error', 'interrupted')
|
||||
RETURNING 1
|
||||
)
|
||||
SELECT
|
||||
(SELECT COUNT(*) FROM finalized_runs)::bigint AS runs_finalized,
|
||||
(SELECT COUNT(*) FROM finalized_steps)::bigint AS steps_finalized
|
||||
`
|
||||
|
||||
type FinalizeStaleChatDebugRowsRow struct {
|
||||
RunsFinalized int64 `db:"runs_finalized" json:"runs_finalized"`
|
||||
StepsFinalized int64 `db:"steps_finalized" json:"steps_finalized"`
|
||||
}
|
||||
|
||||
// Marks orphaned in-progress rows as interrupted so they do not stay
|
||||
// in a non-terminal state forever. The NOT IN list must match the
|
||||
// terminal statuses defined by ChatDebugStatus in codersdk/chats.go.
|
||||
//
|
||||
// The steps CTE also catches steps whose parent run was just finalized
|
||||
// (via run_id IN), because PostgreSQL data-modifying CTEs share the
|
||||
// same snapshot and cannot see each other's row updates. Without this,
|
||||
// a step with a recent updated_at would survive its run's finalization
|
||||
// and remain in 'in_progress' state permanently.
|
||||
func (q *sqlQuerier) FinalizeStaleChatDebugRows(ctx context.Context, updatedBefore time.Time) (FinalizeStaleChatDebugRowsRow, error) {
|
||||
row := q.db.QueryRowContext(ctx, finalizeStaleChatDebugRows, updatedBefore)
|
||||
var i FinalizeStaleChatDebugRowsRow
|
||||
err := row.Scan(&i.RunsFinalized, &i.StepsFinalized)
|
||||
return i, err
|
||||
}
|
||||
|
||||
const getChatDebugRunByID = `-- name: GetChatDebugRunByID :one
|
||||
SELECT id, chat_id, root_chat_id, parent_chat_id, model_config_id, trigger_message_id, history_tip_message_id, kind, status, provider, model, summary, started_at, updated_at, finished_at
|
||||
FROM chat_debug_runs
|
||||
WHERE id = $1::uuid
|
||||
`
|
||||
|
||||
func (q *sqlQuerier) GetChatDebugRunByID(ctx context.Context, id uuid.UUID) (ChatDebugRun, error) {
|
||||
row := q.db.QueryRowContext(ctx, getChatDebugRunByID, id)
|
||||
var i ChatDebugRun
|
||||
err := row.Scan(
|
||||
&i.ID,
|
||||
&i.ChatID,
|
||||
&i.RootChatID,
|
||||
&i.ParentChatID,
|
||||
&i.ModelConfigID,
|
||||
&i.TriggerMessageID,
|
||||
&i.HistoryTipMessageID,
|
||||
&i.Kind,
|
||||
&i.Status,
|
||||
&i.Provider,
|
||||
&i.Model,
|
||||
&i.Summary,
|
||||
&i.StartedAt,
|
||||
&i.UpdatedAt,
|
||||
&i.FinishedAt,
|
||||
)
|
||||
return i, err
|
||||
}
|
||||
|
||||
const getChatDebugRunsByChatID = `-- name: GetChatDebugRunsByChatID :many
|
||||
SELECT id, chat_id, root_chat_id, parent_chat_id, model_config_id, trigger_message_id, history_tip_message_id, kind, status, provider, model, summary, started_at, updated_at, finished_at
|
||||
FROM chat_debug_runs
|
||||
WHERE chat_id = $1::uuid
|
||||
ORDER BY started_at DESC, id DESC
|
||||
LIMIT $2::int
|
||||
`
|
||||
|
||||
type GetChatDebugRunsByChatIDParams struct {
|
||||
ChatID uuid.UUID `db:"chat_id" json:"chat_id"`
|
||||
LimitVal int32 `db:"limit_val" json:"limit_val"`
|
||||
}
|
||||
|
||||
// Returns the most recent debug runs for a chat, ordered newest-first.
|
||||
// Callers must supply an explicit limit to avoid unbounded result sets.
|
||||
func (q *sqlQuerier) GetChatDebugRunsByChatID(ctx context.Context, arg GetChatDebugRunsByChatIDParams) ([]ChatDebugRun, error) {
|
||||
rows, err := q.db.QueryContext(ctx, getChatDebugRunsByChatID, arg.ChatID, arg.LimitVal)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
var items []ChatDebugRun
|
||||
for rows.Next() {
|
||||
var i ChatDebugRun
|
||||
if err := rows.Scan(
|
||||
&i.ID,
|
||||
&i.ChatID,
|
||||
&i.RootChatID,
|
||||
&i.ParentChatID,
|
||||
&i.ModelConfigID,
|
||||
&i.TriggerMessageID,
|
||||
&i.HistoryTipMessageID,
|
||||
&i.Kind,
|
||||
&i.Status,
|
||||
&i.Provider,
|
||||
&i.Model,
|
||||
&i.Summary,
|
||||
&i.StartedAt,
|
||||
&i.UpdatedAt,
|
||||
&i.FinishedAt,
|
||||
); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
items = append(items, i)
|
||||
}
|
||||
if err := rows.Close(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if err := rows.Err(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return items, nil
|
||||
}
|
||||
|
||||
const getChatDebugStepsByRunID = `-- name: GetChatDebugStepsByRunID :many
|
||||
SELECT id, run_id, chat_id, step_number, operation, status, history_tip_message_id, assistant_message_id, normalized_request, normalized_response, usage, attempts, error, metadata, started_at, updated_at, finished_at
|
||||
FROM chat_debug_steps
|
||||
WHERE run_id = $1::uuid
|
||||
ORDER BY step_number ASC, started_at ASC
|
||||
`
|
||||
|
||||
func (q *sqlQuerier) GetChatDebugStepsByRunID(ctx context.Context, runID uuid.UUID) ([]ChatDebugStep, error) {
|
||||
rows, err := q.db.QueryContext(ctx, getChatDebugStepsByRunID, runID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
var items []ChatDebugStep
|
||||
for rows.Next() {
|
||||
var i ChatDebugStep
|
||||
if err := rows.Scan(
|
||||
&i.ID,
|
||||
&i.RunID,
|
||||
&i.ChatID,
|
||||
&i.StepNumber,
|
||||
&i.Operation,
|
||||
&i.Status,
|
||||
&i.HistoryTipMessageID,
|
||||
&i.AssistantMessageID,
|
||||
&i.NormalizedRequest,
|
||||
&i.NormalizedResponse,
|
||||
&i.Usage,
|
||||
&i.Attempts,
|
||||
&i.Error,
|
||||
&i.Metadata,
|
||||
&i.StartedAt,
|
||||
&i.UpdatedAt,
|
||||
&i.FinishedAt,
|
||||
); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
items = append(items, i)
|
||||
}
|
||||
if err := rows.Close(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if err := rows.Err(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return items, nil
|
||||
}
|
||||
|
||||
const insertChatDebugRun = `-- name: InsertChatDebugRun :one
|
||||
INSERT INTO chat_debug_runs (
|
||||
chat_id,
|
||||
root_chat_id,
|
||||
parent_chat_id,
|
||||
model_config_id,
|
||||
trigger_message_id,
|
||||
history_tip_message_id,
|
||||
kind,
|
||||
status,
|
||||
provider,
|
||||
model,
|
||||
summary,
|
||||
started_at,
|
||||
updated_at,
|
||||
finished_at
|
||||
)
|
||||
VALUES (
|
||||
$1::uuid,
|
||||
$2::uuid,
|
||||
$3::uuid,
|
||||
$4::uuid,
|
||||
$5::bigint,
|
||||
$6::bigint,
|
||||
$7::text,
|
||||
$8::text,
|
||||
$9::text,
|
||||
$10::text,
|
||||
COALESCE($11::jsonb, '{}'::jsonb),
|
||||
COALESCE($12::timestamptz, NOW()),
|
||||
COALESCE($13::timestamptz, NOW()),
|
||||
$14::timestamptz
|
||||
)
|
||||
RETURNING id, chat_id, root_chat_id, parent_chat_id, model_config_id, trigger_message_id, history_tip_message_id, kind, status, provider, model, summary, started_at, updated_at, finished_at
|
||||
`
|
||||
|
||||
type InsertChatDebugRunParams struct {
|
||||
ChatID uuid.UUID `db:"chat_id" json:"chat_id"`
|
||||
RootChatID uuid.NullUUID `db:"root_chat_id" json:"root_chat_id"`
|
||||
ParentChatID uuid.NullUUID `db:"parent_chat_id" json:"parent_chat_id"`
|
||||
ModelConfigID uuid.NullUUID `db:"model_config_id" json:"model_config_id"`
|
||||
TriggerMessageID sql.NullInt64 `db:"trigger_message_id" json:"trigger_message_id"`
|
||||
HistoryTipMessageID sql.NullInt64 `db:"history_tip_message_id" json:"history_tip_message_id"`
|
||||
Kind string `db:"kind" json:"kind"`
|
||||
Status string `db:"status" json:"status"`
|
||||
Provider sql.NullString `db:"provider" json:"provider"`
|
||||
Model sql.NullString `db:"model" json:"model"`
|
||||
Summary pqtype.NullRawMessage `db:"summary" json:"summary"`
|
||||
StartedAt sql.NullTime `db:"started_at" json:"started_at"`
|
||||
UpdatedAt sql.NullTime `db:"updated_at" json:"updated_at"`
|
||||
FinishedAt sql.NullTime `db:"finished_at" json:"finished_at"`
|
||||
}
|
||||
|
||||
func (q *sqlQuerier) InsertChatDebugRun(ctx context.Context, arg InsertChatDebugRunParams) (ChatDebugRun, error) {
|
||||
row := q.db.QueryRowContext(ctx, insertChatDebugRun,
|
||||
arg.ChatID,
|
||||
arg.RootChatID,
|
||||
arg.ParentChatID,
|
||||
arg.ModelConfigID,
|
||||
arg.TriggerMessageID,
|
||||
arg.HistoryTipMessageID,
|
||||
arg.Kind,
|
||||
arg.Status,
|
||||
arg.Provider,
|
||||
arg.Model,
|
||||
arg.Summary,
|
||||
arg.StartedAt,
|
||||
arg.UpdatedAt,
|
||||
arg.FinishedAt,
|
||||
)
|
||||
var i ChatDebugRun
|
||||
err := row.Scan(
|
||||
&i.ID,
|
||||
&i.ChatID,
|
||||
&i.RootChatID,
|
||||
&i.ParentChatID,
|
||||
&i.ModelConfigID,
|
||||
&i.TriggerMessageID,
|
||||
&i.HistoryTipMessageID,
|
||||
&i.Kind,
|
||||
&i.Status,
|
||||
&i.Provider,
|
||||
&i.Model,
|
||||
&i.Summary,
|
||||
&i.StartedAt,
|
||||
&i.UpdatedAt,
|
||||
&i.FinishedAt,
|
||||
)
|
||||
return i, err
|
||||
}
|
||||
|
||||
const insertChatDebugStep = `-- name: InsertChatDebugStep :one
|
||||
INSERT INTO chat_debug_steps (
|
||||
run_id,
|
||||
chat_id,
|
||||
step_number,
|
||||
operation,
|
||||
status,
|
||||
history_tip_message_id,
|
||||
assistant_message_id,
|
||||
normalized_request,
|
||||
normalized_response,
|
||||
usage,
|
||||
attempts,
|
||||
error,
|
||||
metadata,
|
||||
started_at,
|
||||
updated_at,
|
||||
finished_at
|
||||
)
|
||||
SELECT
|
||||
$1::uuid,
|
||||
run.chat_id,
|
||||
$2::int,
|
||||
$3::text,
|
||||
$4::text,
|
||||
$5::bigint,
|
||||
$6::bigint,
|
||||
COALESCE($7::jsonb, '{}'::jsonb),
|
||||
$8::jsonb,
|
||||
$9::jsonb,
|
||||
COALESCE($10::jsonb, '[]'::jsonb),
|
||||
$11::jsonb,
|
||||
COALESCE($12::jsonb, '{}'::jsonb),
|
||||
COALESCE($13::timestamptz, NOW()),
|
||||
COALESCE($14::timestamptz, NOW()),
|
||||
$15::timestamptz
|
||||
FROM chat_debug_runs run
|
||||
WHERE run.id = $1::uuid
|
||||
AND run.chat_id = $16::uuid
|
||||
RETURNING id, run_id, chat_id, step_number, operation, status, history_tip_message_id, assistant_message_id, normalized_request, normalized_response, usage, attempts, error, metadata, started_at, updated_at, finished_at
|
||||
`
|
||||
|
||||
type InsertChatDebugStepParams struct {
|
||||
RunID uuid.UUID `db:"run_id" json:"run_id"`
|
||||
StepNumber int32 `db:"step_number" json:"step_number"`
|
||||
Operation string `db:"operation" json:"operation"`
|
||||
Status string `db:"status" json:"status"`
|
||||
HistoryTipMessageID sql.NullInt64 `db:"history_tip_message_id" json:"history_tip_message_id"`
|
||||
AssistantMessageID sql.NullInt64 `db:"assistant_message_id" json:"assistant_message_id"`
|
||||
NormalizedRequest pqtype.NullRawMessage `db:"normalized_request" json:"normalized_request"`
|
||||
NormalizedResponse pqtype.NullRawMessage `db:"normalized_response" json:"normalized_response"`
|
||||
Usage pqtype.NullRawMessage `db:"usage" json:"usage"`
|
||||
Attempts pqtype.NullRawMessage `db:"attempts" json:"attempts"`
|
||||
Error pqtype.NullRawMessage `db:"error" json:"error"`
|
||||
Metadata pqtype.NullRawMessage `db:"metadata" json:"metadata"`
|
||||
StartedAt sql.NullTime `db:"started_at" json:"started_at"`
|
||||
UpdatedAt sql.NullTime `db:"updated_at" json:"updated_at"`
|
||||
FinishedAt sql.NullTime `db:"finished_at" json:"finished_at"`
|
||||
ChatID uuid.UUID `db:"chat_id" json:"chat_id"`
|
||||
}
|
||||
|
||||
func (q *sqlQuerier) InsertChatDebugStep(ctx context.Context, arg InsertChatDebugStepParams) (ChatDebugStep, error) {
|
||||
row := q.db.QueryRowContext(ctx, insertChatDebugStep,
|
||||
arg.RunID,
|
||||
arg.StepNumber,
|
||||
arg.Operation,
|
||||
arg.Status,
|
||||
arg.HistoryTipMessageID,
|
||||
arg.AssistantMessageID,
|
||||
arg.NormalizedRequest,
|
||||
arg.NormalizedResponse,
|
||||
arg.Usage,
|
||||
arg.Attempts,
|
||||
arg.Error,
|
||||
arg.Metadata,
|
||||
arg.StartedAt,
|
||||
arg.UpdatedAt,
|
||||
arg.FinishedAt,
|
||||
arg.ChatID,
|
||||
)
|
||||
var i ChatDebugStep
|
||||
err := row.Scan(
|
||||
&i.ID,
|
||||
&i.RunID,
|
||||
&i.ChatID,
|
||||
&i.StepNumber,
|
||||
&i.Operation,
|
||||
&i.Status,
|
||||
&i.HistoryTipMessageID,
|
||||
&i.AssistantMessageID,
|
||||
&i.NormalizedRequest,
|
||||
&i.NormalizedResponse,
|
||||
&i.Usage,
|
||||
&i.Attempts,
|
||||
&i.Error,
|
||||
&i.Metadata,
|
||||
&i.StartedAt,
|
||||
&i.UpdatedAt,
|
||||
&i.FinishedAt,
|
||||
)
|
||||
return i, err
|
||||
}
|
||||
|
||||
const updateChatDebugRun = `-- name: UpdateChatDebugRun :one
|
||||
UPDATE chat_debug_runs
|
||||
SET
|
||||
root_chat_id = COALESCE($1::uuid, root_chat_id),
|
||||
parent_chat_id = COALESCE($2::uuid, parent_chat_id),
|
||||
model_config_id = COALESCE($3::uuid, model_config_id),
|
||||
trigger_message_id = COALESCE($4::bigint, trigger_message_id),
|
||||
history_tip_message_id = COALESCE($5::bigint, history_tip_message_id),
|
||||
status = COALESCE($6::text, status),
|
||||
provider = COALESCE($7::text, provider),
|
||||
model = COALESCE($8::text, model),
|
||||
summary = COALESCE($9::jsonb, summary),
|
||||
finished_at = COALESCE($10::timestamptz, finished_at),
|
||||
updated_at = NOW()
|
||||
WHERE id = $11::uuid
|
||||
AND chat_id = $12::uuid
|
||||
RETURNING id, chat_id, root_chat_id, parent_chat_id, model_config_id, trigger_message_id, history_tip_message_id, kind, status, provider, model, summary, started_at, updated_at, finished_at
|
||||
`
|
||||
|
||||
type UpdateChatDebugRunParams struct {
|
||||
RootChatID uuid.NullUUID `db:"root_chat_id" json:"root_chat_id"`
|
||||
ParentChatID uuid.NullUUID `db:"parent_chat_id" json:"parent_chat_id"`
|
||||
ModelConfigID uuid.NullUUID `db:"model_config_id" json:"model_config_id"`
|
||||
TriggerMessageID sql.NullInt64 `db:"trigger_message_id" json:"trigger_message_id"`
|
||||
HistoryTipMessageID sql.NullInt64 `db:"history_tip_message_id" json:"history_tip_message_id"`
|
||||
Status sql.NullString `db:"status" json:"status"`
|
||||
Provider sql.NullString `db:"provider" json:"provider"`
|
||||
Model sql.NullString `db:"model" json:"model"`
|
||||
Summary pqtype.NullRawMessage `db:"summary" json:"summary"`
|
||||
FinishedAt sql.NullTime `db:"finished_at" json:"finished_at"`
|
||||
ID uuid.UUID `db:"id" json:"id"`
|
||||
ChatID uuid.UUID `db:"chat_id" json:"chat_id"`
|
||||
}
|
||||
|
||||
// Uses COALESCE so that passing NULL from Go means "keep the
|
||||
// existing value." This is intentional: debug rows follow a
|
||||
// write-once-finalize pattern where fields are set at creation
|
||||
// or finalization and never cleared back to NULL.
|
||||
func (q *sqlQuerier) UpdateChatDebugRun(ctx context.Context, arg UpdateChatDebugRunParams) (ChatDebugRun, error) {
|
||||
row := q.db.QueryRowContext(ctx, updateChatDebugRun,
|
||||
arg.RootChatID,
|
||||
arg.ParentChatID,
|
||||
arg.ModelConfigID,
|
||||
arg.TriggerMessageID,
|
||||
arg.HistoryTipMessageID,
|
||||
arg.Status,
|
||||
arg.Provider,
|
||||
arg.Model,
|
||||
arg.Summary,
|
||||
arg.FinishedAt,
|
||||
arg.ID,
|
||||
arg.ChatID,
|
||||
)
|
||||
var i ChatDebugRun
|
||||
err := row.Scan(
|
||||
&i.ID,
|
||||
&i.ChatID,
|
||||
&i.RootChatID,
|
||||
&i.ParentChatID,
|
||||
&i.ModelConfigID,
|
||||
&i.TriggerMessageID,
|
||||
&i.HistoryTipMessageID,
|
||||
&i.Kind,
|
||||
&i.Status,
|
||||
&i.Provider,
|
||||
&i.Model,
|
||||
&i.Summary,
|
||||
&i.StartedAt,
|
||||
&i.UpdatedAt,
|
||||
&i.FinishedAt,
|
||||
)
|
||||
return i, err
|
||||
}
|
||||
|
||||
const updateChatDebugStep = `-- name: UpdateChatDebugStep :one
|
||||
UPDATE chat_debug_steps
|
||||
SET
|
||||
status = COALESCE($1::text, status),
|
||||
history_tip_message_id = COALESCE($2::bigint, history_tip_message_id),
|
||||
assistant_message_id = COALESCE($3::bigint, assistant_message_id),
|
||||
normalized_request = COALESCE($4::jsonb, normalized_request),
|
||||
normalized_response = COALESCE($5::jsonb, normalized_response),
|
||||
usage = COALESCE($6::jsonb, usage),
|
||||
attempts = COALESCE($7::jsonb, attempts),
|
||||
error = COALESCE($8::jsonb, error),
|
||||
metadata = COALESCE($9::jsonb, metadata),
|
||||
finished_at = COALESCE($10::timestamptz, finished_at),
|
||||
updated_at = NOW()
|
||||
WHERE id = $11::uuid
|
||||
AND chat_id = $12::uuid
|
||||
RETURNING id, run_id, chat_id, step_number, operation, status, history_tip_message_id, assistant_message_id, normalized_request, normalized_response, usage, attempts, error, metadata, started_at, updated_at, finished_at
|
||||
`
|
||||
|
||||
type UpdateChatDebugStepParams struct {
|
||||
Status sql.NullString `db:"status" json:"status"`
|
||||
HistoryTipMessageID sql.NullInt64 `db:"history_tip_message_id" json:"history_tip_message_id"`
|
||||
AssistantMessageID sql.NullInt64 `db:"assistant_message_id" json:"assistant_message_id"`
|
||||
NormalizedRequest pqtype.NullRawMessage `db:"normalized_request" json:"normalized_request"`
|
||||
NormalizedResponse pqtype.NullRawMessage `db:"normalized_response" json:"normalized_response"`
|
||||
Usage pqtype.NullRawMessage `db:"usage" json:"usage"`
|
||||
Attempts pqtype.NullRawMessage `db:"attempts" json:"attempts"`
|
||||
Error pqtype.NullRawMessage `db:"error" json:"error"`
|
||||
Metadata pqtype.NullRawMessage `db:"metadata" json:"metadata"`
|
||||
FinishedAt sql.NullTime `db:"finished_at" json:"finished_at"`
|
||||
ID uuid.UUID `db:"id" json:"id"`
|
||||
ChatID uuid.UUID `db:"chat_id" json:"chat_id"`
|
||||
}
|
||||
|
||||
// Uses COALESCE so that passing NULL from Go means "keep the
|
||||
// existing value." This is intentional: debug rows follow a
|
||||
// write-once-finalize pattern where fields are set at creation
|
||||
// or finalization and never cleared back to NULL.
|
||||
func (q *sqlQuerier) UpdateChatDebugStep(ctx context.Context, arg UpdateChatDebugStepParams) (ChatDebugStep, error) {
|
||||
row := q.db.QueryRowContext(ctx, updateChatDebugStep,
|
||||
arg.Status,
|
||||
arg.HistoryTipMessageID,
|
||||
arg.AssistantMessageID,
|
||||
arg.NormalizedRequest,
|
||||
arg.NormalizedResponse,
|
||||
arg.Usage,
|
||||
arg.Attempts,
|
||||
arg.Error,
|
||||
arg.Metadata,
|
||||
arg.FinishedAt,
|
||||
arg.ID,
|
||||
arg.ChatID,
|
||||
)
|
||||
var i ChatDebugStep
|
||||
err := row.Scan(
|
||||
&i.ID,
|
||||
&i.RunID,
|
||||
&i.ChatID,
|
||||
&i.StepNumber,
|
||||
&i.Operation,
|
||||
&i.Status,
|
||||
&i.HistoryTipMessageID,
|
||||
&i.AssistantMessageID,
|
||||
&i.NormalizedRequest,
|
||||
&i.NormalizedResponse,
|
||||
&i.Usage,
|
||||
&i.Attempts,
|
||||
&i.Error,
|
||||
&i.Metadata,
|
||||
&i.StartedAt,
|
||||
&i.UpdatedAt,
|
||||
&i.FinishedAt,
|
||||
)
|
||||
return i, err
|
||||
}
|
||||
|
||||
const deleteOldChatFiles = `-- name: DeleteOldChatFiles :execrows
|
||||
WITH kept_file_ids AS (
|
||||
-- NOTE: This uses updated_at as a proxy for archive time
|
||||
@@ -3795,7 +3218,7 @@ func (q *sqlQuerier) GetPRInsightsPerModel(ctx context.Context, arg GetPRInsight
|
||||
return items, nil
|
||||
}
|
||||
|
||||
const getPRInsightsPullRequests = `-- name: GetPRInsightsPullRequests :many
|
||||
const getPRInsightsRecentPRs = `-- name: GetPRInsightsRecentPRs :many
|
||||
WITH pr_costs AS (
|
||||
SELECT
|
||||
prc.pr_key,
|
||||
@@ -3815,9 +3238,9 @@ WITH pr_costs AS (
|
||||
AND cds2.pull_request_state IS NOT NULL
|
||||
))
|
||||
WHERE cds.pull_request_state IS NOT NULL
|
||||
AND c.created_at >= $1::timestamptz
|
||||
AND c.created_at < $2::timestamptz
|
||||
AND ($3::uuid IS NULL OR c.owner_id = $3::uuid)
|
||||
AND c.created_at >= $2::timestamptz
|
||||
AND c.created_at < $3::timestamptz
|
||||
AND ($4::uuid IS NULL OR c.owner_id = $4::uuid)
|
||||
) prc
|
||||
LEFT JOIN LATERAL (
|
||||
SELECT COALESCE(SUM(cm.total_cost_micros), 0) AS cost_micros
|
||||
@@ -3852,9 +3275,9 @@ deduped AS (
|
||||
JOIN chats c ON c.id = cds.chat_id
|
||||
LEFT JOIN chat_model_configs cmc ON cmc.id = c.last_model_config_id
|
||||
WHERE cds.pull_request_state IS NOT NULL
|
||||
AND c.created_at >= $1::timestamptz
|
||||
AND c.created_at < $2::timestamptz
|
||||
AND ($3::uuid IS NULL OR c.owner_id = $3::uuid)
|
||||
AND c.created_at >= $2::timestamptz
|
||||
AND c.created_at < $3::timestamptz
|
||||
AND ($4::uuid IS NULL OR c.owner_id = $4::uuid)
|
||||
ORDER BY COALESCE(NULLIF(cds.url, ''), c.id::text), c.created_at DESC, c.id DESC
|
||||
)
|
||||
SELECT chat_id, pr_title, pr_url, pr_number, state, draft, additions, deletions, changed_files, commits, approved, changes_requested, reviewer_count, author_login, author_avatar_url, base_branch, model_display_name, cost_micros, created_at FROM (
|
||||
@@ -3882,16 +3305,17 @@ SELECT chat_id, pr_title, pr_url, pr_number, state, draft, additions, deletions,
|
||||
JOIN pr_costs pc ON pc.pr_key = d.pr_key
|
||||
) sub
|
||||
ORDER BY sub.created_at DESC
|
||||
LIMIT 500
|
||||
LIMIT $1::int
|
||||
`
|
||||
|
||||
type GetPRInsightsPullRequestsParams struct {
|
||||
type GetPRInsightsRecentPRsParams struct {
|
||||
LimitVal int32 `db:"limit_val" json:"limit_val"`
|
||||
StartDate time.Time `db:"start_date" json:"start_date"`
|
||||
EndDate time.Time `db:"end_date" json:"end_date"`
|
||||
OwnerID uuid.NullUUID `db:"owner_id" json:"owner_id"`
|
||||
}
|
||||
|
||||
type GetPRInsightsPullRequestsRow struct {
|
||||
type GetPRInsightsRecentPRsRow struct {
|
||||
ChatID uuid.UUID `db:"chat_id" json:"chat_id"`
|
||||
PrTitle string `db:"pr_title" json:"pr_title"`
|
||||
PrUrl sql.NullString `db:"pr_url" json:"pr_url"`
|
||||
@@ -3913,20 +3337,24 @@ type GetPRInsightsPullRequestsRow struct {
|
||||
CreatedAt time.Time `db:"created_at" json:"created_at"`
|
||||
}
|
||||
|
||||
// Returns all individual PR rows with cost for the selected time range.
|
||||
// Returns individual PR rows with cost for the recent PRs table.
|
||||
// Uses two CTEs: pr_costs sums cost for the PR-linked chat and its
|
||||
// direct children (that lack their own PR), and deduped picks one row
|
||||
// per PR for metadata. A safety-cap LIMIT guards against unexpectedly
|
||||
// large result sets from direct API callers.
|
||||
func (q *sqlQuerier) GetPRInsightsPullRequests(ctx context.Context, arg GetPRInsightsPullRequestsParams) ([]GetPRInsightsPullRequestsRow, error) {
|
||||
rows, err := q.db.QueryContext(ctx, getPRInsightsPullRequests, arg.StartDate, arg.EndDate, arg.OwnerID)
|
||||
// per PR for metadata.
|
||||
func (q *sqlQuerier) GetPRInsightsRecentPRs(ctx context.Context, arg GetPRInsightsRecentPRsParams) ([]GetPRInsightsRecentPRsRow, error) {
|
||||
rows, err := q.db.QueryContext(ctx, getPRInsightsRecentPRs,
|
||||
arg.LimitVal,
|
||||
arg.StartDate,
|
||||
arg.EndDate,
|
||||
arg.OwnerID,
|
||||
)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
var items []GetPRInsightsPullRequestsRow
|
||||
var items []GetPRInsightsRecentPRsRow
|
||||
for rows.Next() {
|
||||
var i GetPRInsightsPullRequestsRow
|
||||
var i GetPRInsightsRecentPRsRow
|
||||
if err := rows.Scan(
|
||||
&i.ChatID,
|
||||
&i.PrTitle,
|
||||
@@ -6395,18 +5823,20 @@ WHERE
|
||||
ELSE chats.archived = $2 :: boolean
|
||||
END
|
||||
AND CASE
|
||||
-- Cursor pagination: the last element on a page acts as the cursor.
|
||||
-- The 4-tuple matches the ORDER BY below. All columns sort DESC
|
||||
-- (pin_order is negated so lower values sort first in DESC order),
|
||||
-- which lets us use a single tuple < comparison.
|
||||
-- This allows using the last element on a page as effectively a cursor.
|
||||
-- This is an important option for scripts that need to paginate without
|
||||
-- duplicating or missing data.
|
||||
WHEN $3 :: uuid != '00000000-0000-0000-0000-000000000000'::uuid THEN (
|
||||
(CASE WHEN pin_order > 0 THEN 1 ELSE 0 END, -pin_order, updated_at, id) < (
|
||||
-- The pagination cursor is the last ID of the previous page.
|
||||
-- The query is ordered by the updated_at field, so select all
|
||||
-- rows before the cursor.
|
||||
(updated_at, id) < (
|
||||
SELECT
|
||||
CASE WHEN c2.pin_order > 0 THEN 1 ELSE 0 END, -c2.pin_order, c2.updated_at, c2.id
|
||||
updated_at, id
|
||||
FROM
|
||||
chats c2
|
||||
chats
|
||||
WHERE
|
||||
c2.id = $3
|
||||
id = $3
|
||||
)
|
||||
)
|
||||
ELSE true
|
||||
@@ -6418,15 +5848,9 @@ WHERE
|
||||
-- Authorize Filter clause will be injected below in GetAuthorizedChats
|
||||
-- @authorize_filter
|
||||
ORDER BY
|
||||
-- Pinned chats (pin_order > 0) sort before unpinned ones. Within
|
||||
-- pinned chats, lower pin_order values come first. The negation
|
||||
-- trick (-pin_order) keeps all sort columns DESC so the cursor
|
||||
-- tuple < comparison works with uniform direction.
|
||||
CASE WHEN pin_order > 0 THEN 1 ELSE 0 END DESC,
|
||||
-pin_order DESC,
|
||||
updated_at DESC,
|
||||
id DESC
|
||||
OFFSET $5
|
||||
-- Deterministic and consistent ordering of all rows, even if they share
|
||||
-- a timestamp. This is to ensure consistent pagination.
|
||||
(updated_at, id) DESC OFFSET $5
|
||||
LIMIT
|
||||
-- The chat list is unbounded and expected to grow large.
|
||||
-- Default to 50 to prevent accidental excessively large queries.
|
||||
@@ -18095,8 +17519,7 @@ SELECT
|
||||
w.id AS workspace_id,
|
||||
COALESCE(w.name, '') AS workspace_name,
|
||||
-- Include the name of the provisioner_daemon associated to the job
|
||||
COALESCE(pd.name, '') AS worker_name,
|
||||
wb.transition as workspace_build_transition
|
||||
COALESCE(pd.name, '') AS worker_name
|
||||
FROM
|
||||
provisioner_jobs pj
|
||||
LEFT JOIN
|
||||
@@ -18141,8 +17564,7 @@ GROUP BY
|
||||
t.icon,
|
||||
w.id,
|
||||
w.name,
|
||||
pd.name,
|
||||
wb.transition
|
||||
pd.name
|
||||
ORDER BY
|
||||
pj.created_at DESC
|
||||
LIMIT
|
||||
@@ -18159,19 +17581,18 @@ type GetProvisionerJobsByOrganizationAndStatusWithQueuePositionAndProvisionerPar
|
||||
}
|
||||
|
||||
type GetProvisionerJobsByOrganizationAndStatusWithQueuePositionAndProvisionerRow struct {
|
||||
ProvisionerJob ProvisionerJob `db:"provisioner_job" json:"provisioner_job"`
|
||||
QueuePosition int64 `db:"queue_position" json:"queue_position"`
|
||||
QueueSize int64 `db:"queue_size" json:"queue_size"`
|
||||
AvailableWorkers []uuid.UUID `db:"available_workers" json:"available_workers"`
|
||||
TemplateVersionName string `db:"template_version_name" json:"template_version_name"`
|
||||
TemplateID uuid.NullUUID `db:"template_id" json:"template_id"`
|
||||
TemplateName string `db:"template_name" json:"template_name"`
|
||||
TemplateDisplayName string `db:"template_display_name" json:"template_display_name"`
|
||||
TemplateIcon string `db:"template_icon" json:"template_icon"`
|
||||
WorkspaceID uuid.NullUUID `db:"workspace_id" json:"workspace_id"`
|
||||
WorkspaceName string `db:"workspace_name" json:"workspace_name"`
|
||||
WorkerName string `db:"worker_name" json:"worker_name"`
|
||||
WorkspaceBuildTransition NullWorkspaceTransition `db:"workspace_build_transition" json:"workspace_build_transition"`
|
||||
ProvisionerJob ProvisionerJob `db:"provisioner_job" json:"provisioner_job"`
|
||||
QueuePosition int64 `db:"queue_position" json:"queue_position"`
|
||||
QueueSize int64 `db:"queue_size" json:"queue_size"`
|
||||
AvailableWorkers []uuid.UUID `db:"available_workers" json:"available_workers"`
|
||||
TemplateVersionName string `db:"template_version_name" json:"template_version_name"`
|
||||
TemplateID uuid.NullUUID `db:"template_id" json:"template_id"`
|
||||
TemplateName string `db:"template_name" json:"template_name"`
|
||||
TemplateDisplayName string `db:"template_display_name" json:"template_display_name"`
|
||||
TemplateIcon string `db:"template_icon" json:"template_icon"`
|
||||
WorkspaceID uuid.NullUUID `db:"workspace_id" json:"workspace_id"`
|
||||
WorkspaceName string `db:"workspace_name" json:"workspace_name"`
|
||||
WorkerName string `db:"worker_name" json:"worker_name"`
|
||||
}
|
||||
|
||||
func (q *sqlQuerier) GetProvisionerJobsByOrganizationAndStatusWithQueuePositionAndProvisioner(ctx context.Context, arg GetProvisionerJobsByOrganizationAndStatusWithQueuePositionAndProvisionerParams) ([]GetProvisionerJobsByOrganizationAndStatusWithQueuePositionAndProvisionerRow, error) {
|
||||
@@ -18223,7 +17644,6 @@ func (q *sqlQuerier) GetProvisionerJobsByOrganizationAndStatusWithQueuePositionA
|
||||
&i.WorkspaceID,
|
||||
&i.WorkspaceName,
|
||||
&i.WorkerName,
|
||||
&i.WorkspaceBuildTransition,
|
||||
); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -19722,21 +19142,6 @@ func (q *sqlQuerier) GetApplicationName(ctx context.Context) (string, error) {
|
||||
return value, err
|
||||
}
|
||||
|
||||
const getChatDebugLoggingAllowUsers = `-- name: GetChatDebugLoggingAllowUsers :one
|
||||
SELECT
|
||||
COALESCE((SELECT value = 'true' FROM site_configs WHERE key = 'agents_chat_debug_logging_allow_users'), false) :: boolean AS allow_users
|
||||
`
|
||||
|
||||
// GetChatDebugLoggingAllowUsers returns the runtime admin setting that
|
||||
// allows users to opt into chat debug logging when the deployment does
|
||||
// not already force debug logging on globally.
|
||||
func (q *sqlQuerier) GetChatDebugLoggingAllowUsers(ctx context.Context) (bool, error) {
|
||||
row := q.db.QueryRowContext(ctx, getChatDebugLoggingAllowUsers)
|
||||
var allow_users bool
|
||||
err := row.Scan(&allow_users)
|
||||
return allow_users, err
|
||||
}
|
||||
|
||||
const getChatDesktopEnabled = `-- name: GetChatDesktopEnabled :one
|
||||
SELECT
|
||||
COALESCE((SELECT value = 'true' FROM site_configs WHERE key = 'agents_desktop_enabled'), false) :: boolean AS enable_desktop
|
||||
@@ -20048,30 +19453,6 @@ func (q *sqlQuerier) UpsertApplicationName(ctx context.Context, value string) er
|
||||
return err
|
||||
}
|
||||
|
||||
const upsertChatDebugLoggingAllowUsers = `-- name: UpsertChatDebugLoggingAllowUsers :exec
|
||||
INSERT INTO site_configs (key, value)
|
||||
VALUES (
|
||||
'agents_chat_debug_logging_allow_users',
|
||||
CASE
|
||||
WHEN $1::bool THEN 'true'
|
||||
ELSE 'false'
|
||||
END
|
||||
)
|
||||
ON CONFLICT (key) DO UPDATE
|
||||
SET value = CASE
|
||||
WHEN $1::bool THEN 'true'
|
||||
ELSE 'false'
|
||||
END
|
||||
WHERE site_configs.key = 'agents_chat_debug_logging_allow_users'
|
||||
`
|
||||
|
||||
// UpsertChatDebugLoggingAllowUsers updates the runtime admin setting that
|
||||
// allows users to opt into chat debug logging.
|
||||
func (q *sqlQuerier) UpsertChatDebugLoggingAllowUsers(ctx context.Context, allowUsers bool) error {
|
||||
_, err := q.db.ExecContext(ctx, upsertChatDebugLoggingAllowUsers, allowUsers)
|
||||
return err
|
||||
}
|
||||
|
||||
const upsertChatDesktopEnabled = `-- name: UpsertChatDesktopEnabled :exec
|
||||
INSERT INTO site_configs (key, value)
|
||||
VALUES (
|
||||
@@ -23746,7 +23127,7 @@ func (q *sqlQuerier) CreateUserSecret(ctx context.Context, arg CreateUserSecretP
|
||||
return i, err
|
||||
}
|
||||
|
||||
const deleteUserSecretByUserIDAndName = `-- name: DeleteUserSecretByUserIDAndName :execrows
|
||||
const deleteUserSecretByUserIDAndName = `-- name: DeleteUserSecretByUserIDAndName :exec
|
||||
DELETE FROM user_secrets
|
||||
WHERE user_id = $1 AND name = $2
|
||||
`
|
||||
@@ -23756,12 +23137,9 @@ type DeleteUserSecretByUserIDAndNameParams struct {
|
||||
Name string `db:"name" json:"name"`
|
||||
}
|
||||
|
||||
func (q *sqlQuerier) DeleteUserSecretByUserIDAndName(ctx context.Context, arg DeleteUserSecretByUserIDAndNameParams) (int64, error) {
|
||||
result, err := q.db.ExecContext(ctx, deleteUserSecretByUserIDAndName, arg.UserID, arg.Name)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
return result.RowsAffected()
|
||||
func (q *sqlQuerier) DeleteUserSecretByUserIDAndName(ctx context.Context, arg DeleteUserSecretByUserIDAndNameParams) error {
|
||||
_, err := q.db.ExecContext(ctx, deleteUserSecretByUserIDAndName, arg.UserID, arg.Name)
|
||||
return err
|
||||
}
|
||||
|
||||
const getUserSecretByUserIDAndName = `-- name: GetUserSecretByUserIDAndName :one
|
||||
@@ -24326,23 +23704,6 @@ func (q *sqlQuerier) GetUserChatCustomPrompt(ctx context.Context, userID uuid.UU
|
||||
return chat_custom_prompt, err
|
||||
}
|
||||
|
||||
const getUserChatDebugLoggingEnabled = `-- name: GetUserChatDebugLoggingEnabled :one
|
||||
SELECT
|
||||
COALESCE((
|
||||
SELECT value = 'true'
|
||||
FROM user_configs
|
||||
WHERE user_id = $1
|
||||
AND key = 'chat_debug_logging_enabled'
|
||||
), false) :: boolean AS debug_logging_enabled
|
||||
`
|
||||
|
||||
func (q *sqlQuerier) GetUserChatDebugLoggingEnabled(ctx context.Context, userID uuid.UUID) (bool, error) {
|
||||
row := q.db.QueryRowContext(ctx, getUserChatDebugLoggingEnabled, userID)
|
||||
var debug_logging_enabled bool
|
||||
err := row.Scan(&debug_logging_enabled)
|
||||
return debug_logging_enabled, err
|
||||
}
|
||||
|
||||
const getUserCount = `-- name: GetUserCount :one
|
||||
SELECT
|
||||
COUNT(*)
|
||||
@@ -25337,35 +24698,6 @@ func (q *sqlQuerier) UpdateUserThemePreference(ctx context.Context, arg UpdateUs
|
||||
return i, err
|
||||
}
|
||||
|
||||
const upsertUserChatDebugLoggingEnabled = `-- name: UpsertUserChatDebugLoggingEnabled :exec
|
||||
INSERT INTO user_configs (user_id, key, value)
|
||||
VALUES (
|
||||
$1,
|
||||
'chat_debug_logging_enabled',
|
||||
CASE
|
||||
WHEN $2::bool THEN 'true'
|
||||
ELSE 'false'
|
||||
END
|
||||
)
|
||||
ON CONFLICT ON CONSTRAINT user_configs_pkey
|
||||
DO UPDATE SET value = CASE
|
||||
WHEN $2::bool THEN 'true'
|
||||
ELSE 'false'
|
||||
END
|
||||
WHERE user_configs.user_id = $1
|
||||
AND user_configs.key = 'chat_debug_logging_enabled'
|
||||
`
|
||||
|
||||
type UpsertUserChatDebugLoggingEnabledParams struct {
|
||||
UserID uuid.UUID `db:"user_id" json:"user_id"`
|
||||
DebugLoggingEnabled bool `db:"debug_logging_enabled" json:"debug_logging_enabled"`
|
||||
}
|
||||
|
||||
func (q *sqlQuerier) UpsertUserChatDebugLoggingEnabled(ctx context.Context, arg UpsertUserChatDebugLoggingEnabledParams) error {
|
||||
_, err := q.db.ExecContext(ctx, upsertUserChatDebugLoggingEnabled, arg.UserID, arg.DebugLoggingEnabled)
|
||||
return err
|
||||
}
|
||||
|
||||
const validateUserIDs = `-- name: ValidateUserIDs :one
|
||||
WITH input AS (
|
||||
SELECT
|
||||
@@ -27481,26 +26813,6 @@ func (q *sqlQuerier) UpdateWorkspaceAgentConnectionByID(ctx context.Context, arg
|
||||
return err
|
||||
}
|
||||
|
||||
const updateWorkspaceAgentDirectoryByID = `-- name: UpdateWorkspaceAgentDirectoryByID :exec
|
||||
UPDATE
|
||||
workspace_agents
|
||||
SET
|
||||
directory = $2, updated_at = $3
|
||||
WHERE
|
||||
id = $1
|
||||
`
|
||||
|
||||
type UpdateWorkspaceAgentDirectoryByIDParams struct {
|
||||
ID uuid.UUID `db:"id" json:"id"`
|
||||
Directory string `db:"directory" json:"directory"`
|
||||
UpdatedAt time.Time `db:"updated_at" json:"updated_at"`
|
||||
}
|
||||
|
||||
func (q *sqlQuerier) UpdateWorkspaceAgentDirectoryByID(ctx context.Context, arg UpdateWorkspaceAgentDirectoryByIDParams) error {
|
||||
_, err := q.db.ExecContext(ctx, updateWorkspaceAgentDirectoryByID, arg.ID, arg.Directory, arg.UpdatedAt)
|
||||
return err
|
||||
}
|
||||
|
||||
const updateWorkspaceAgentDisplayAppsByID = `-- name: UpdateWorkspaceAgentDisplayAppsByID :exec
|
||||
UPDATE
|
||||
workspace_agents
|
||||
|
||||
@@ -1,205 +0,0 @@
|
||||
-- name: InsertChatDebugRun :one
|
||||
INSERT INTO chat_debug_runs (
|
||||
chat_id,
|
||||
root_chat_id,
|
||||
parent_chat_id,
|
||||
model_config_id,
|
||||
trigger_message_id,
|
||||
history_tip_message_id,
|
||||
kind,
|
||||
status,
|
||||
provider,
|
||||
model,
|
||||
summary,
|
||||
started_at,
|
||||
updated_at,
|
||||
finished_at
|
||||
)
|
||||
VALUES (
|
||||
@chat_id::uuid,
|
||||
sqlc.narg('root_chat_id')::uuid,
|
||||
sqlc.narg('parent_chat_id')::uuid,
|
||||
sqlc.narg('model_config_id')::uuid,
|
||||
sqlc.narg('trigger_message_id')::bigint,
|
||||
sqlc.narg('history_tip_message_id')::bigint,
|
||||
@kind::text,
|
||||
@status::text,
|
||||
sqlc.narg('provider')::text,
|
||||
sqlc.narg('model')::text,
|
||||
COALESCE(sqlc.narg('summary')::jsonb, '{}'::jsonb),
|
||||
COALESCE(sqlc.narg('started_at')::timestamptz, NOW()),
|
||||
COALESCE(sqlc.narg('updated_at')::timestamptz, NOW()),
|
||||
sqlc.narg('finished_at')::timestamptz
|
||||
)
|
||||
RETURNING *;
|
||||
|
||||
-- name: UpdateChatDebugRun :one
|
||||
-- Uses COALESCE so that passing NULL from Go means "keep the
|
||||
-- existing value." This is intentional: debug rows follow a
|
||||
-- write-once-finalize pattern where fields are set at creation
|
||||
-- or finalization and never cleared back to NULL.
|
||||
UPDATE chat_debug_runs
|
||||
SET
|
||||
root_chat_id = COALESCE(sqlc.narg('root_chat_id')::uuid, root_chat_id),
|
||||
parent_chat_id = COALESCE(sqlc.narg('parent_chat_id')::uuid, parent_chat_id),
|
||||
model_config_id = COALESCE(sqlc.narg('model_config_id')::uuid, model_config_id),
|
||||
trigger_message_id = COALESCE(sqlc.narg('trigger_message_id')::bigint, trigger_message_id),
|
||||
history_tip_message_id = COALESCE(sqlc.narg('history_tip_message_id')::bigint, history_tip_message_id),
|
||||
status = COALESCE(sqlc.narg('status')::text, status),
|
||||
provider = COALESCE(sqlc.narg('provider')::text, provider),
|
||||
model = COALESCE(sqlc.narg('model')::text, model),
|
||||
summary = COALESCE(sqlc.narg('summary')::jsonb, summary),
|
||||
finished_at = COALESCE(sqlc.narg('finished_at')::timestamptz, finished_at),
|
||||
updated_at = NOW()
|
||||
WHERE id = @id::uuid
|
||||
AND chat_id = @chat_id::uuid
|
||||
RETURNING *;
|
||||
|
||||
-- name: InsertChatDebugStep :one
|
||||
INSERT INTO chat_debug_steps (
|
||||
run_id,
|
||||
chat_id,
|
||||
step_number,
|
||||
operation,
|
||||
status,
|
||||
history_tip_message_id,
|
||||
assistant_message_id,
|
||||
normalized_request,
|
||||
normalized_response,
|
||||
usage,
|
||||
attempts,
|
||||
error,
|
||||
metadata,
|
||||
started_at,
|
||||
updated_at,
|
||||
finished_at
|
||||
)
|
||||
SELECT
|
||||
@run_id::uuid,
|
||||
run.chat_id,
|
||||
@step_number::int,
|
||||
@operation::text,
|
||||
@status::text,
|
||||
sqlc.narg('history_tip_message_id')::bigint,
|
||||
sqlc.narg('assistant_message_id')::bigint,
|
||||
COALESCE(sqlc.narg('normalized_request')::jsonb, '{}'::jsonb),
|
||||
sqlc.narg('normalized_response')::jsonb,
|
||||
sqlc.narg('usage')::jsonb,
|
||||
COALESCE(sqlc.narg('attempts')::jsonb, '[]'::jsonb),
|
||||
sqlc.narg('error')::jsonb,
|
||||
COALESCE(sqlc.narg('metadata')::jsonb, '{}'::jsonb),
|
||||
COALESCE(sqlc.narg('started_at')::timestamptz, NOW()),
|
||||
COALESCE(sqlc.narg('updated_at')::timestamptz, NOW()),
|
||||
sqlc.narg('finished_at')::timestamptz
|
||||
FROM chat_debug_runs run
|
||||
WHERE run.id = @run_id::uuid
|
||||
AND run.chat_id = @chat_id::uuid
|
||||
RETURNING *;
|
||||
|
||||
-- name: UpdateChatDebugStep :one
|
||||
-- Uses COALESCE so that passing NULL from Go means "keep the
|
||||
-- existing value." This is intentional: debug rows follow a
|
||||
-- write-once-finalize pattern where fields are set at creation
|
||||
-- or finalization and never cleared back to NULL.
|
||||
UPDATE chat_debug_steps
|
||||
SET
|
||||
status = COALESCE(sqlc.narg('status')::text, status),
|
||||
history_tip_message_id = COALESCE(sqlc.narg('history_tip_message_id')::bigint, history_tip_message_id),
|
||||
assistant_message_id = COALESCE(sqlc.narg('assistant_message_id')::bigint, assistant_message_id),
|
||||
normalized_request = COALESCE(sqlc.narg('normalized_request')::jsonb, normalized_request),
|
||||
normalized_response = COALESCE(sqlc.narg('normalized_response')::jsonb, normalized_response),
|
||||
usage = COALESCE(sqlc.narg('usage')::jsonb, usage),
|
||||
attempts = COALESCE(sqlc.narg('attempts')::jsonb, attempts),
|
||||
error = COALESCE(sqlc.narg('error')::jsonb, error),
|
||||
metadata = COALESCE(sqlc.narg('metadata')::jsonb, metadata),
|
||||
finished_at = COALESCE(sqlc.narg('finished_at')::timestamptz, finished_at),
|
||||
updated_at = NOW()
|
||||
WHERE id = @id::uuid
|
||||
AND chat_id = @chat_id::uuid
|
||||
RETURNING *;
|
||||
|
||||
-- name: GetChatDebugRunsByChatID :many
|
||||
-- Returns the most recent debug runs for a chat, ordered newest-first.
|
||||
-- Callers must supply an explicit limit to avoid unbounded result sets.
|
||||
SELECT *
|
||||
FROM chat_debug_runs
|
||||
WHERE chat_id = @chat_id::uuid
|
||||
ORDER BY started_at DESC, id DESC
|
||||
LIMIT @limit_val::int;
|
||||
|
||||
-- name: GetChatDebugRunByID :one
|
||||
SELECT *
|
||||
FROM chat_debug_runs
|
||||
WHERE id = @id::uuid;
|
||||
|
||||
-- name: GetChatDebugStepsByRunID :many
|
||||
SELECT *
|
||||
FROM chat_debug_steps
|
||||
WHERE run_id = @run_id::uuid
|
||||
ORDER BY step_number ASC, started_at ASC;
|
||||
|
||||
-- name: DeleteChatDebugDataByChatID :execrows
|
||||
DELETE FROM chat_debug_runs
|
||||
WHERE chat_id = @chat_id::uuid;
|
||||
|
||||
-- name: DeleteChatDebugDataAfterMessageID :execrows
|
||||
WITH affected_runs AS (
|
||||
SELECT DISTINCT run.id
|
||||
FROM chat_debug_runs run
|
||||
WHERE run.chat_id = @chat_id::uuid
|
||||
AND (
|
||||
run.history_tip_message_id > @message_id::bigint
|
||||
OR run.trigger_message_id > @message_id::bigint
|
||||
)
|
||||
|
||||
UNION
|
||||
|
||||
SELECT DISTINCT step.run_id AS id
|
||||
FROM chat_debug_steps step
|
||||
WHERE step.chat_id = @chat_id::uuid
|
||||
AND (
|
||||
step.assistant_message_id > @message_id::bigint
|
||||
OR step.history_tip_message_id > @message_id::bigint
|
||||
)
|
||||
)
|
||||
DELETE FROM chat_debug_runs
|
||||
WHERE chat_id = @chat_id::uuid
|
||||
AND id IN (SELECT id FROM affected_runs);
|
||||
|
||||
-- name: FinalizeStaleChatDebugRows :one
|
||||
-- Marks orphaned in-progress rows as interrupted so they do not stay
|
||||
-- in a non-terminal state forever. The NOT IN list must match the
|
||||
-- terminal statuses defined by ChatDebugStatus in codersdk/chats.go.
|
||||
--
|
||||
-- The steps CTE also catches steps whose parent run was just finalized
|
||||
-- (via run_id IN), because PostgreSQL data-modifying CTEs share the
|
||||
-- same snapshot and cannot see each other's row updates. Without this,
|
||||
-- a step with a recent updated_at would survive its run's finalization
|
||||
-- and remain in 'in_progress' state permanently.
|
||||
WITH finalized_runs AS (
|
||||
UPDATE chat_debug_runs
|
||||
SET
|
||||
status = 'interrupted',
|
||||
updated_at = NOW(),
|
||||
finished_at = NOW()
|
||||
WHERE updated_at < @updated_before::timestamptz
|
||||
AND finished_at IS NULL
|
||||
AND status NOT IN ('completed', 'error', 'interrupted')
|
||||
RETURNING id
|
||||
), finalized_steps AS (
|
||||
UPDATE chat_debug_steps
|
||||
SET
|
||||
status = 'interrupted',
|
||||
updated_at = NOW(),
|
||||
finished_at = NOW()
|
||||
WHERE (
|
||||
updated_at < @updated_before::timestamptz
|
||||
OR run_id IN (SELECT id FROM finalized_runs)
|
||||
)
|
||||
AND finished_at IS NULL
|
||||
AND status NOT IN ('completed', 'error', 'interrupted')
|
||||
RETURNING 1
|
||||
)
|
||||
SELECT
|
||||
(SELECT COUNT(*) FROM finalized_runs)::bigint AS runs_finalized,
|
||||
(SELECT COUNT(*) FROM finalized_steps)::bigint AS steps_finalized;
|
||||
@@ -173,12 +173,11 @@ JOIN pr_costs pc ON pc.pr_key = d.pr_key
|
||||
GROUP BY d.model_config_id, d.display_name, d.model, d.provider
|
||||
ORDER BY total_prs DESC;
|
||||
|
||||
-- name: GetPRInsightsPullRequests :many
|
||||
-- Returns all individual PR rows with cost for the selected time range.
|
||||
-- name: GetPRInsightsRecentPRs :many
|
||||
-- Returns individual PR rows with cost for the recent PRs table.
|
||||
-- Uses two CTEs: pr_costs sums cost for the PR-linked chat and its
|
||||
-- direct children (that lack their own PR), and deduped picks one row
|
||||
-- per PR for metadata. A safety-cap LIMIT guards against unexpectedly
|
||||
-- large result sets from direct API callers.
|
||||
-- per PR for metadata.
|
||||
WITH pr_costs AS (
|
||||
SELECT
|
||||
prc.pr_key,
|
||||
@@ -265,4 +264,4 @@ SELECT * FROM (
|
||||
JOIN pr_costs pc ON pc.pr_key = d.pr_key
|
||||
) sub
|
||||
ORDER BY sub.created_at DESC
|
||||
LIMIT 500;
|
||||
LIMIT @limit_val::int;
|
||||
|
||||
@@ -353,18 +353,20 @@ WHERE
|
||||
ELSE chats.archived = sqlc.narg('archived') :: boolean
|
||||
END
|
||||
AND CASE
|
||||
-- Cursor pagination: the last element on a page acts as the cursor.
|
||||
-- The 4-tuple matches the ORDER BY below. All columns sort DESC
|
||||
-- (pin_order is negated so lower values sort first in DESC order),
|
||||
-- which lets us use a single tuple < comparison.
|
||||
-- This allows using the last element on a page as effectively a cursor.
|
||||
-- This is an important option for scripts that need to paginate without
|
||||
-- duplicating or missing data.
|
||||
WHEN @after_id :: uuid != '00000000-0000-0000-0000-000000000000'::uuid THEN (
|
||||
(CASE WHEN pin_order > 0 THEN 1 ELSE 0 END, -pin_order, updated_at, id) < (
|
||||
-- The pagination cursor is the last ID of the previous page.
|
||||
-- The query is ordered by the updated_at field, so select all
|
||||
-- rows before the cursor.
|
||||
(updated_at, id) < (
|
||||
SELECT
|
||||
CASE WHEN c2.pin_order > 0 THEN 1 ELSE 0 END, -c2.pin_order, c2.updated_at, c2.id
|
||||
updated_at, id
|
||||
FROM
|
||||
chats c2
|
||||
chats
|
||||
WHERE
|
||||
c2.id = @after_id
|
||||
id = @after_id
|
||||
)
|
||||
)
|
||||
ELSE true
|
||||
@@ -376,15 +378,9 @@ WHERE
|
||||
-- Authorize Filter clause will be injected below in GetAuthorizedChats
|
||||
-- @authorize_filter
|
||||
ORDER BY
|
||||
-- Pinned chats (pin_order > 0) sort before unpinned ones. Within
|
||||
-- pinned chats, lower pin_order values come first. The negation
|
||||
-- trick (-pin_order) keeps all sort columns DESC so the cursor
|
||||
-- tuple < comparison works with uniform direction.
|
||||
CASE WHEN pin_order > 0 THEN 1 ELSE 0 END DESC,
|
||||
-pin_order DESC,
|
||||
updated_at DESC,
|
||||
id DESC
|
||||
OFFSET @offset_opt
|
||||
-- Deterministic and consistent ordering of all rows, even if they share
|
||||
-- a timestamp. This is to ensure consistent pagination.
|
||||
(updated_at, id) DESC OFFSET @offset_opt
|
||||
LIMIT
|
||||
-- The chat list is unbounded and expected to grow large.
|
||||
-- Default to 50 to prevent accidental excessively large queries.
|
||||
|
||||
@@ -195,8 +195,7 @@ SELECT
|
||||
w.id AS workspace_id,
|
||||
COALESCE(w.name, '') AS workspace_name,
|
||||
-- Include the name of the provisioner_daemon associated to the job
|
||||
COALESCE(pd.name, '') AS worker_name,
|
||||
wb.transition as workspace_build_transition
|
||||
COALESCE(pd.name, '') AS worker_name
|
||||
FROM
|
||||
provisioner_jobs pj
|
||||
LEFT JOIN
|
||||
@@ -241,8 +240,7 @@ GROUP BY
|
||||
t.icon,
|
||||
w.id,
|
||||
w.name,
|
||||
pd.name,
|
||||
wb.transition
|
||||
pd.name
|
||||
ORDER BY
|
||||
pj.created_at DESC
|
||||
LIMIT
|
||||
|
||||
@@ -179,31 +179,6 @@ SET value = CASE
|
||||
END
|
||||
WHERE site_configs.key = 'agents_desktop_enabled';
|
||||
|
||||
-- GetChatDebugLoggingAllowUsers returns the runtime admin setting that
|
||||
-- allows users to opt into chat debug logging when the deployment does
|
||||
-- not already force debug logging on globally.
|
||||
-- name: GetChatDebugLoggingAllowUsers :one
|
||||
SELECT
|
||||
COALESCE((SELECT value = 'true' FROM site_configs WHERE key = 'agents_chat_debug_logging_allow_users'), false) :: boolean AS allow_users;
|
||||
|
||||
-- UpsertChatDebugLoggingAllowUsers updates the runtime admin setting that
|
||||
-- allows users to opt into chat debug logging.
|
||||
-- name: UpsertChatDebugLoggingAllowUsers :exec
|
||||
INSERT INTO site_configs (key, value)
|
||||
VALUES (
|
||||
'agents_chat_debug_logging_allow_users',
|
||||
CASE
|
||||
WHEN sqlc.arg(allow_users)::bool THEN 'true'
|
||||
ELSE 'false'
|
||||
END
|
||||
)
|
||||
ON CONFLICT (key) DO UPDATE
|
||||
SET value = CASE
|
||||
WHEN sqlc.arg(allow_users)::bool THEN 'true'
|
||||
ELSE 'false'
|
||||
END
|
||||
WHERE site_configs.key = 'agents_chat_debug_logging_allow_users';
|
||||
|
||||
-- GetChatTemplateAllowlist returns the JSON-encoded template allowlist.
|
||||
-- Returns an empty string when no allowlist has been configured (all templates allowed).
|
||||
-- name: GetChatTemplateAllowlist :one
|
||||
|
||||
@@ -56,6 +56,6 @@ SET
|
||||
WHERE user_id = @user_id AND name = @name
|
||||
RETURNING *;
|
||||
|
||||
-- name: DeleteUserSecretByUserIDAndName :execrows
|
||||
-- name: DeleteUserSecretByUserIDAndName :exec
|
||||
DELETE FROM user_secrets
|
||||
WHERE user_id = @user_id AND name = @name;
|
||||
|
||||
@@ -213,33 +213,6 @@ RETURNING *;
|
||||
-- name: DeleteUserChatCompactionThreshold :exec
|
||||
DELETE FROM user_configs WHERE user_id = @user_id AND key = @key;
|
||||
|
||||
-- name: GetUserChatDebugLoggingEnabled :one
|
||||
SELECT
|
||||
COALESCE((
|
||||
SELECT value = 'true'
|
||||
FROM user_configs
|
||||
WHERE user_id = @user_id
|
||||
AND key = 'chat_debug_logging_enabled'
|
||||
), false) :: boolean AS debug_logging_enabled;
|
||||
|
||||
-- name: UpsertUserChatDebugLoggingEnabled :exec
|
||||
INSERT INTO user_configs (user_id, key, value)
|
||||
VALUES (
|
||||
@user_id,
|
||||
'chat_debug_logging_enabled',
|
||||
CASE
|
||||
WHEN sqlc.arg(debug_logging_enabled)::bool THEN 'true'
|
||||
ELSE 'false'
|
||||
END
|
||||
)
|
||||
ON CONFLICT ON CONSTRAINT user_configs_pkey
|
||||
DO UPDATE SET value = CASE
|
||||
WHEN sqlc.arg(debug_logging_enabled)::bool THEN 'true'
|
||||
ELSE 'false'
|
||||
END
|
||||
WHERE user_configs.user_id = @user_id
|
||||
AND user_configs.key = 'chat_debug_logging_enabled';
|
||||
|
||||
-- name: GetUserTaskNotificationAlertDismissed :one
|
||||
SELECT
|
||||
value::boolean as task_notification_alert_dismissed
|
||||
|
||||
@@ -190,14 +190,6 @@ SET
|
||||
WHERE
|
||||
id = $1;
|
||||
|
||||
-- name: UpdateWorkspaceAgentDirectoryByID :exec
|
||||
UPDATE
|
||||
workspace_agents
|
||||
SET
|
||||
directory = $2, updated_at = $3
|
||||
WHERE
|
||||
id = $1;
|
||||
|
||||
-- name: GetWorkspaceAgentLogsAfter :many
|
||||
SELECT
|
||||
*
|
||||
|
||||
@@ -15,8 +15,6 @@ const (
|
||||
UniqueAPIKeysPkey UniqueConstraint = "api_keys_pkey" // ALTER TABLE ONLY api_keys ADD CONSTRAINT api_keys_pkey PRIMARY KEY (id);
|
||||
UniqueAuditLogsPkey UniqueConstraint = "audit_logs_pkey" // ALTER TABLE ONLY audit_logs ADD CONSTRAINT audit_logs_pkey PRIMARY KEY (id);
|
||||
UniqueBoundaryUsageStatsPkey UniqueConstraint = "boundary_usage_stats_pkey" // ALTER TABLE ONLY boundary_usage_stats ADD CONSTRAINT boundary_usage_stats_pkey PRIMARY KEY (replica_id);
|
||||
UniqueChatDebugRunsPkey UniqueConstraint = "chat_debug_runs_pkey" // ALTER TABLE ONLY chat_debug_runs ADD CONSTRAINT chat_debug_runs_pkey PRIMARY KEY (id);
|
||||
UniqueChatDebugStepsPkey UniqueConstraint = "chat_debug_steps_pkey" // ALTER TABLE ONLY chat_debug_steps ADD CONSTRAINT chat_debug_steps_pkey PRIMARY KEY (id);
|
||||
UniqueChatDiffStatusesPkey UniqueConstraint = "chat_diff_statuses_pkey" // ALTER TABLE ONLY chat_diff_statuses ADD CONSTRAINT chat_diff_statuses_pkey PRIMARY KEY (chat_id);
|
||||
UniqueChatFileLinksChatIDFileIDKey UniqueConstraint = "chat_file_links_chat_id_file_id_key" // ALTER TABLE ONLY chat_file_links ADD CONSTRAINT chat_file_links_chat_id_file_id_key UNIQUE (chat_id, file_id);
|
||||
UniqueChatFilesPkey UniqueConstraint = "chat_files_pkey" // ALTER TABLE ONLY chat_files ADD CONSTRAINT chat_files_pkey PRIMARY KEY (id);
|
||||
@@ -130,8 +128,6 @@ const (
|
||||
UniqueWorkspaceResourcesPkey UniqueConstraint = "workspace_resources_pkey" // ALTER TABLE ONLY workspace_resources ADD CONSTRAINT workspace_resources_pkey PRIMARY KEY (id);
|
||||
UniqueWorkspacesPkey UniqueConstraint = "workspaces_pkey" // ALTER TABLE ONLY workspaces ADD CONSTRAINT workspaces_pkey PRIMARY KEY (id);
|
||||
UniqueIndexAPIKeyName UniqueConstraint = "idx_api_key_name" // CREATE UNIQUE INDEX idx_api_key_name ON api_keys USING btree (user_id, token_name) WHERE (login_type = 'token'::login_type);
|
||||
UniqueIndexChatDebugRunsIDChat UniqueConstraint = "idx_chat_debug_runs_id_chat" // CREATE UNIQUE INDEX idx_chat_debug_runs_id_chat ON chat_debug_runs USING btree (id, chat_id);
|
||||
UniqueIndexChatDebugStepsRunStep UniqueConstraint = "idx_chat_debug_steps_run_step" // CREATE UNIQUE INDEX idx_chat_debug_steps_run_step ON chat_debug_steps USING btree (run_id, step_number);
|
||||
UniqueIndexChatModelConfigsSingleDefault UniqueConstraint = "idx_chat_model_configs_single_default" // CREATE UNIQUE INDEX idx_chat_model_configs_single_default ON chat_model_configs USING btree ((1)) WHERE ((is_default = true) AND (deleted = false));
|
||||
UniqueIndexConnectionLogsConnectionIDWorkspaceIDAgentName UniqueConstraint = "idx_connection_logs_connection_id_workspace_id_agent_name" // CREATE UNIQUE INDEX idx_connection_logs_connection_id_workspace_id_agent_name ON connection_logs USING btree (connection_id, workspace_id, agent_name);
|
||||
UniqueIndexCustomRolesNameLowerOrganizationID UniqueConstraint = "idx_custom_roles_name_lower_organization_id" // CREATE UNIQUE INDEX idx_custom_roles_name_lower_organization_id ON custom_roles USING btree (lower(name), COALESCE(organization_id, '00000000-0000-0000-0000-000000000000'::uuid));
|
||||
|
||||
+77
-70
@@ -137,9 +137,8 @@ func publishChatConfigEvent(logger slog.Logger, ps dbpubsub.Pubsub, kind pubsub.
|
||||
func (api *API) watchChats(rw http.ResponseWriter, r *http.Request) {
|
||||
ctx := r.Context()
|
||||
apiKey := httpmw.APIKey(r)
|
||||
logger := api.Logger.Named("chat_watcher")
|
||||
|
||||
conn, err := websocket.Accept(rw, r, nil)
|
||||
sendEvent, senderClosed, err := httpapi.OneWayWebSocketEventSender(api.Logger)(rw, r)
|
||||
if err != nil {
|
||||
httpapi.Write(ctx, rw, http.StatusInternalServerError, codersdk.Response{
|
||||
Message: "Failed to open chat watch stream.",
|
||||
@@ -147,44 +146,54 @@ func (api *API) watchChats(rw http.ResponseWriter, r *http.Request) {
|
||||
})
|
||||
return
|
||||
}
|
||||
defer func() {
|
||||
<-senderClosed
|
||||
}()
|
||||
|
||||
ctx, cancel := context.WithCancel(ctx)
|
||||
defer cancel()
|
||||
|
||||
_ = conn.CloseRead(context.Background())
|
||||
|
||||
ctx, wsNetConn := codersdk.WebsocketNetConn(ctx, conn, websocket.MessageText)
|
||||
defer wsNetConn.Close()
|
||||
|
||||
go httpapi.HeartbeatClose(ctx, logger, cancel, conn)
|
||||
|
||||
// The encoder is only written from the SubscribeWithErr callback,
|
||||
// which delivers serially per subscription. Do not add a second
|
||||
// write path without introducing synchronization.
|
||||
encoder := json.NewEncoder(wsNetConn)
|
||||
|
||||
cancelSubscribe, err := api.Pubsub.SubscribeWithErr(pubsub.ChatWatchEventChannel(apiKey.UserID),
|
||||
pubsub.HandleChatWatchEvent(
|
||||
func(ctx context.Context, payload codersdk.ChatWatchEvent, err error) {
|
||||
cancelSubscribe, err := api.Pubsub.SubscribeWithErr(pubsub.ChatEventChannel(apiKey.UserID),
|
||||
pubsub.HandleChatEvent(
|
||||
func(ctx context.Context, payload pubsub.ChatEvent, err error) {
|
||||
if err != nil {
|
||||
logger.Error(ctx, "chat watch event subscription error", slog.Error(err))
|
||||
api.Logger.Error(ctx, "chat event subscription error", slog.Error(err))
|
||||
return
|
||||
}
|
||||
if err := encoder.Encode(payload); err != nil {
|
||||
logger.Debug(ctx, "failed to send chat watch event", slog.Error(err))
|
||||
cancel()
|
||||
return
|
||||
if err := sendEvent(codersdk.ServerSentEvent{
|
||||
Type: codersdk.ServerSentEventTypeData,
|
||||
Data: payload,
|
||||
}); err != nil {
|
||||
api.Logger.Debug(ctx, "failed to send chat event", slog.Error(err))
|
||||
}
|
||||
},
|
||||
))
|
||||
if err != nil {
|
||||
logger.Error(ctx, "failed to subscribe to chat watch events", slog.Error(err))
|
||||
_ = conn.Close(websocket.StatusInternalError, "Failed to subscribe to chat events.")
|
||||
if err := sendEvent(codersdk.ServerSentEvent{
|
||||
Type: codersdk.ServerSentEventTypeError,
|
||||
Data: codersdk.Response{
|
||||
Message: "Internal error subscribing to chat events.",
|
||||
Detail: err.Error(),
|
||||
},
|
||||
}); err != nil {
|
||||
api.Logger.Debug(ctx, "failed to send chat subscribe error event", slog.Error(err))
|
||||
}
|
||||
return
|
||||
}
|
||||
defer cancelSubscribe()
|
||||
|
||||
<-ctx.Done()
|
||||
// Send initial ping to signal the connection is ready.
|
||||
if err := sendEvent(codersdk.ServerSentEvent{
|
||||
Type: codersdk.ServerSentEventTypePing,
|
||||
}); err != nil {
|
||||
api.Logger.Debug(ctx, "failed to send chat ping event", slog.Error(err))
|
||||
}
|
||||
|
||||
for {
|
||||
select {
|
||||
case <-ctx.Done():
|
||||
return
|
||||
case <-senderClosed:
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// EXPERIMENTAL: chatsByWorkspace returns a mapping of workspace ID to
|
||||
@@ -1810,9 +1819,9 @@ func (api *API) patchChat(rw http.ResponseWriter, r *http.Request) {
|
||||
// - pinOrder > 0 && already pinned: reorder (shift
|
||||
// neighbors, clamp to [1, count]).
|
||||
// - pinOrder > 0 && not pinned: append to end. The
|
||||
// requested value is intentionally ignored; the
|
||||
// SQL ORDER BY sorts pinned chats first so they
|
||||
// appear on page 1 of the paginated sidebar.
|
||||
// requested value is intentionally ignored because
|
||||
// PinChatByID also bumps updated_at to keep the
|
||||
// chat visible in the paginated sidebar.
|
||||
var err error
|
||||
errMsg := "Failed to pin chat."
|
||||
switch {
|
||||
@@ -2167,7 +2176,6 @@ func (api *API) streamChat(rw http.ResponseWriter, r *http.Request) {
|
||||
ctx := r.Context()
|
||||
chat := httpmw.ChatParam(r)
|
||||
chatID := chat.ID
|
||||
logger := api.Logger.Named("chat_streamer").With(slog.F("chat_id", chatID))
|
||||
|
||||
if api.chatDaemon == nil {
|
||||
httpapi.Write(ctx, rw, http.StatusInternalServerError, codersdk.Response{
|
||||
@@ -2190,22 +2198,7 @@ func (api *API) streamChat(rw http.ResponseWriter, r *http.Request) {
|
||||
}
|
||||
}
|
||||
|
||||
// Subscribe before accepting the WebSocket so that failures
|
||||
// can still be reported as normal HTTP errors.
|
||||
snapshot, events, cancelSub, ok := api.chatDaemon.Subscribe(ctx, chatID, r.Header, afterMessageID)
|
||||
// Subscribe only fails today when the receiver is nil, which
|
||||
// the chatDaemon == nil guard above already catches. This is
|
||||
// defensive against future Subscribe failure modes.
|
||||
if !ok {
|
||||
httpapi.Write(ctx, rw, http.StatusInternalServerError, codersdk.Response{
|
||||
Message: "Chat streaming is not available.",
|
||||
Detail: "Chat stream state is not configured.",
|
||||
})
|
||||
return
|
||||
}
|
||||
defer cancelSub()
|
||||
|
||||
conn, err := websocket.Accept(rw, r, nil)
|
||||
sendEvent, senderClosed, err := httpapi.OneWayWebSocketEventSender(api.Logger)(rw, r)
|
||||
if err != nil {
|
||||
httpapi.Write(ctx, rw, http.StatusInternalServerError, codersdk.Response{
|
||||
Message: "Failed to open chat stream.",
|
||||
@@ -2213,30 +2206,41 @@ func (api *API) streamChat(rw http.ResponseWriter, r *http.Request) {
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
ctx, cancel := context.WithCancel(ctx)
|
||||
snapshot, events, cancel, ok := api.chatDaemon.Subscribe(ctx, chatID, r.Header, afterMessageID)
|
||||
if !ok {
|
||||
if err := sendEvent(codersdk.ServerSentEvent{
|
||||
Type: codersdk.ServerSentEventTypeError,
|
||||
Data: codersdk.Response{
|
||||
Message: "Chat streaming is not available.",
|
||||
Detail: "Chat stream state is not configured.",
|
||||
},
|
||||
}); err != nil {
|
||||
api.Logger.Debug(ctx, "failed to send chat stream unavailable event", slog.Error(err))
|
||||
}
|
||||
// Ensure the WebSocket is closed so senderClosed
|
||||
// completes and the handler can return.
|
||||
<-senderClosed
|
||||
return
|
||||
}
|
||||
defer func() {
|
||||
<-senderClosed
|
||||
}()
|
||||
defer cancel()
|
||||
|
||||
_ = conn.CloseRead(context.Background())
|
||||
|
||||
ctx, wsNetConn := codersdk.WebsocketNetConn(ctx, conn, websocket.MessageText)
|
||||
defer wsNetConn.Close()
|
||||
|
||||
go httpapi.HeartbeatClose(ctx, logger, cancel, conn)
|
||||
|
||||
// Mark the chat as read when the stream connects and again
|
||||
// when it disconnects so we avoid per-message API calls while
|
||||
// messages are actively streaming.
|
||||
api.markChatAsRead(ctx, chatID)
|
||||
defer api.markChatAsRead(context.WithoutCancel(ctx), chatID)
|
||||
|
||||
encoder := json.NewEncoder(wsNetConn)
|
||||
|
||||
sendChatStreamBatch := func(batch []codersdk.ChatStreamEvent) error {
|
||||
if len(batch) == 0 {
|
||||
return nil
|
||||
}
|
||||
return encoder.Encode(batch)
|
||||
return sendEvent(codersdk.ServerSentEvent{
|
||||
Type: codersdk.ServerSentEventTypeData,
|
||||
Data: batch,
|
||||
})
|
||||
}
|
||||
|
||||
drainChatStreamBatch := func(
|
||||
@@ -2269,7 +2273,7 @@ func (api *API) streamChat(rw http.ResponseWriter, r *http.Request) {
|
||||
end = len(snapshot)
|
||||
}
|
||||
if err := sendChatStreamBatch(snapshot[start:end]); err != nil {
|
||||
logger.Debug(ctx, "failed to send chat stream snapshot", slog.Error(err))
|
||||
api.Logger.Debug(ctx, "failed to send chat stream snapshot", slog.Error(err))
|
||||
return
|
||||
}
|
||||
}
|
||||
@@ -2278,6 +2282,8 @@ func (api *API) streamChat(rw http.ResponseWriter, r *http.Request) {
|
||||
select {
|
||||
case <-ctx.Done():
|
||||
return
|
||||
case <-senderClosed:
|
||||
return
|
||||
case firstEvent, ok := <-events:
|
||||
if !ok {
|
||||
return
|
||||
@@ -2287,7 +2293,7 @@ func (api *API) streamChat(rw http.ResponseWriter, r *http.Request) {
|
||||
chatStreamBatchSize,
|
||||
)
|
||||
if err := sendChatStreamBatch(batch); err != nil {
|
||||
logger.Debug(ctx, "failed to send chat stream event", slog.Error(err))
|
||||
api.Logger.Debug(ctx, "failed to send chat stream event", slog.Error(err))
|
||||
return
|
||||
}
|
||||
if streamClosed {
|
||||
@@ -2302,7 +2308,6 @@ func (api *API) interruptChat(rw http.ResponseWriter, r *http.Request) {
|
||||
ctx := r.Context()
|
||||
chat := httpmw.ChatParam(r)
|
||||
chatID := chat.ID
|
||||
logger := api.Logger.Named("chat_interrupt").With(slog.F("chat_id", chatID))
|
||||
|
||||
if api.chatDaemon != nil {
|
||||
chat = api.chatDaemon.InterruptChat(ctx, chat)
|
||||
@@ -2316,7 +2321,8 @@ func (api *API) interruptChat(rw http.ResponseWriter, r *http.Request) {
|
||||
LastError: sql.NullString{},
|
||||
})
|
||||
if updateErr != nil {
|
||||
logger.Error(ctx, "failed to mark chat as waiting", slog.Error(updateErr))
|
||||
api.Logger.Error(ctx, "failed to mark chat as waiting",
|
||||
slog.F("chat_id", chatID), slog.Error(updateErr))
|
||||
httpapi.Write(ctx, rw, http.StatusInternalServerError, codersdk.Response{
|
||||
Message: "Failed to interrupt chat.",
|
||||
Detail: updateErr.Error(),
|
||||
@@ -5626,7 +5632,7 @@ func (api *API) prInsights(rw http.ResponseWriter, r *http.Request) {
|
||||
previousSummary database.GetPRInsightsSummaryRow
|
||||
timeSeries []database.GetPRInsightsTimeSeriesRow
|
||||
byModel []database.GetPRInsightsPerModelRow
|
||||
recentPRs []database.GetPRInsightsPullRequestsRow
|
||||
recentPRs []database.GetPRInsightsRecentPRsRow
|
||||
)
|
||||
|
||||
eg, egCtx := errgroup.WithContext(ctx)
|
||||
@@ -5674,10 +5680,11 @@ func (api *API) prInsights(rw http.ResponseWriter, r *http.Request) {
|
||||
|
||||
eg.Go(func() error {
|
||||
var err error
|
||||
recentPRs, err = api.Database.GetPRInsightsPullRequests(egCtx, database.GetPRInsightsPullRequestsParams{
|
||||
recentPRs, err = api.Database.GetPRInsightsRecentPRs(egCtx, database.GetPRInsightsRecentPRsParams{
|
||||
StartDate: startDate,
|
||||
EndDate: endDate,
|
||||
OwnerID: ownerID,
|
||||
LimitVal: 20,
|
||||
})
|
||||
return err
|
||||
})
|
||||
@@ -5787,10 +5794,10 @@ func (api *API) prInsights(rw http.ResponseWriter, r *http.Request) {
|
||||
}
|
||||
|
||||
httpapi.Write(ctx, rw, http.StatusOK, codersdk.PRInsightsResponse{
|
||||
Summary: summary,
|
||||
TimeSeries: tsEntries,
|
||||
ByModel: modelEntries,
|
||||
PullRequests: prEntries,
|
||||
Summary: summary,
|
||||
TimeSeries: tsEntries,
|
||||
ByModel: modelEntries,
|
||||
RecentPRs: prEntries,
|
||||
})
|
||||
}
|
||||
|
||||
|
||||
+106
-207
@@ -876,186 +876,6 @@ func TestListChats(t *testing.T) {
|
||||
require.NoError(t, err)
|
||||
require.Len(t, allChats, totalChats)
|
||||
})
|
||||
|
||||
// Test that a pinned chat with an old updated_at appears on page 1.
|
||||
t.Run("PinnedOnFirstPage", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
ctx := testutil.Context(t, testutil.WaitLong)
|
||||
client, _ := newChatClientWithDatabase(t)
|
||||
_ = coderdtest.CreateFirstUser(t, client.Client)
|
||||
_ = createChatModelConfig(t, client)
|
||||
|
||||
// Create the chat that will later be pinned. It gets the
|
||||
// earliest updated_at because it is inserted first.
|
||||
pinnedChat, err := client.CreateChat(ctx, codersdk.CreateChatRequest{
|
||||
Content: []codersdk.ChatInputPart{{
|
||||
Type: codersdk.ChatInputPartTypeText,
|
||||
Text: "pinned-chat",
|
||||
}},
|
||||
})
|
||||
require.NoError(t, err)
|
||||
|
||||
// Fill page 1 with newer chats so the pinned chat would
|
||||
// normally be pushed off the first page (default limit 50).
|
||||
const fillerCount = 51
|
||||
fillerChats := make([]codersdk.Chat, 0, fillerCount)
|
||||
for i := range fillerCount {
|
||||
c, createErr := client.CreateChat(ctx, codersdk.CreateChatRequest{
|
||||
Content: []codersdk.ChatInputPart{{
|
||||
Type: codersdk.ChatInputPartTypeText,
|
||||
Text: fmt.Sprintf("filler-%d", i),
|
||||
}},
|
||||
})
|
||||
require.NoError(t, createErr)
|
||||
fillerChats = append(fillerChats, c)
|
||||
}
|
||||
|
||||
// Wait for all chats to reach a terminal status so
|
||||
// updated_at is stable before paginating. A single
|
||||
// polling loop checks every chat per tick to avoid
|
||||
// O(N) separate Eventually loops.
|
||||
allCreated := append([]codersdk.Chat{pinnedChat}, fillerChats...)
|
||||
pending := make(map[uuid.UUID]struct{}, len(allCreated))
|
||||
for _, c := range allCreated {
|
||||
pending[c.ID] = struct{}{}
|
||||
}
|
||||
testutil.Eventually(ctx, t, func(_ context.Context) bool {
|
||||
all, listErr := client.ListChats(ctx, &codersdk.ListChatsOptions{
|
||||
Pagination: codersdk.Pagination{Limit: fillerCount + 10},
|
||||
})
|
||||
if listErr != nil {
|
||||
return false
|
||||
}
|
||||
for _, ch := range all {
|
||||
if _, ok := pending[ch.ID]; ok && ch.Status != codersdk.ChatStatusPending && ch.Status != codersdk.ChatStatusRunning {
|
||||
delete(pending, ch.ID)
|
||||
}
|
||||
}
|
||||
return len(pending) == 0
|
||||
}, testutil.IntervalFast)
|
||||
|
||||
// Pin the earliest chat.
|
||||
err = client.UpdateChat(ctx, pinnedChat.ID, codersdk.UpdateChatRequest{
|
||||
PinOrder: ptr.Ref(int32(1)),
|
||||
})
|
||||
require.NoError(t, err)
|
||||
|
||||
// Fetch page 1 with default limit (50).
|
||||
page1, err := client.ListChats(ctx, &codersdk.ListChatsOptions{
|
||||
Pagination: codersdk.Pagination{Limit: 50},
|
||||
})
|
||||
require.NoError(t, err)
|
||||
|
||||
// The pinned chat must appear on page 1.
|
||||
page1IDs := make(map[uuid.UUID]struct{}, len(page1))
|
||||
for _, c := range page1 {
|
||||
page1IDs[c.ID] = struct{}{}
|
||||
}
|
||||
_, found := page1IDs[pinnedChat.ID]
|
||||
require.True(t, found, "pinned chat should appear on page 1")
|
||||
|
||||
// The pinned chat should be the first item in the list.
|
||||
require.Equal(t, pinnedChat.ID, page1[0].ID, "pinned chat should be first")
|
||||
})
|
||||
|
||||
// Test cursor pagination with a mix of pinned and unpinned chats.
|
||||
t.Run("CursorWithPins", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
ctx := testutil.Context(t, testutil.WaitLong)
|
||||
client, _ := newChatClientWithDatabase(t)
|
||||
_ = coderdtest.CreateFirstUser(t, client.Client)
|
||||
_ = createChatModelConfig(t, client)
|
||||
|
||||
// Create 5 chats: 2 will be pinned, 3 unpinned.
|
||||
const totalChats = 5
|
||||
createdChats := make([]codersdk.Chat, 0, totalChats)
|
||||
for i := range totalChats {
|
||||
c, createErr := client.CreateChat(ctx, codersdk.CreateChatRequest{
|
||||
Content: []codersdk.ChatInputPart{{
|
||||
Type: codersdk.ChatInputPartTypeText,
|
||||
Text: fmt.Sprintf("cursor-pin-chat-%d", i),
|
||||
}},
|
||||
})
|
||||
require.NoError(t, createErr)
|
||||
createdChats = append(createdChats, c)
|
||||
}
|
||||
|
||||
// Wait for all chats to reach terminal status.
|
||||
// Check each chat by ID rather than fetching the full list.
|
||||
testutil.Eventually(ctx, t, func(_ context.Context) bool {
|
||||
for _, c := range createdChats {
|
||||
ch, err := client.GetChat(ctx, c.ID)
|
||||
require.NoError(t, err, "GetChat should succeed for just-created chat %s", c.ID)
|
||||
if ch.Status == codersdk.ChatStatusPending || ch.Status == codersdk.ChatStatusRunning {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
}, testutil.IntervalFast)
|
||||
|
||||
// Pin the first two chats (oldest updated_at).
|
||||
err := client.UpdateChat(ctx, createdChats[0].ID, codersdk.UpdateChatRequest{
|
||||
PinOrder: ptr.Ref(int32(1)),
|
||||
})
|
||||
require.NoError(t, err)
|
||||
err = client.UpdateChat(ctx, createdChats[1].ID, codersdk.UpdateChatRequest{
|
||||
PinOrder: ptr.Ref(int32(1)),
|
||||
})
|
||||
require.NoError(t, err)
|
||||
|
||||
// Paginate with limit=2 using cursor (after_id).
|
||||
const pageSize = 2
|
||||
maxPages := totalChats/pageSize + 2
|
||||
var allPaginated []codersdk.Chat
|
||||
var afterID uuid.UUID
|
||||
for range maxPages {
|
||||
opts := &codersdk.ListChatsOptions{
|
||||
Pagination: codersdk.Pagination{Limit: pageSize},
|
||||
}
|
||||
if afterID != uuid.Nil {
|
||||
opts.Pagination.AfterID = afterID
|
||||
}
|
||||
page, listErr := client.ListChats(ctx, opts)
|
||||
require.NoError(t, listErr)
|
||||
if len(page) == 0 {
|
||||
break
|
||||
}
|
||||
allPaginated = append(allPaginated, page...)
|
||||
afterID = page[len(page)-1].ID
|
||||
}
|
||||
|
||||
// All chats should appear exactly once.
|
||||
seenIDs := make(map[uuid.UUID]struct{}, len(allPaginated))
|
||||
for _, c := range allPaginated {
|
||||
_, dup := seenIDs[c.ID]
|
||||
require.False(t, dup, "chat %s appeared more than once", c.ID)
|
||||
seenIDs[c.ID] = struct{}{}
|
||||
}
|
||||
require.Len(t, seenIDs, totalChats, "all chats should appear in paginated results")
|
||||
|
||||
// Pinned chats should come before unpinned ones, and
|
||||
// within the pinned group, lower pin_order sorts first.
|
||||
pinnedSeen := false
|
||||
unpinnedSeen := false
|
||||
for _, c := range allPaginated {
|
||||
if c.PinOrder > 0 {
|
||||
require.False(t, unpinnedSeen, "pinned chat %s appeared after unpinned chat", c.ID)
|
||||
pinnedSeen = true
|
||||
} else {
|
||||
unpinnedSeen = true
|
||||
}
|
||||
}
|
||||
require.True(t, pinnedSeen, "at least one pinned chat should exist")
|
||||
|
||||
// Verify within-pinned ordering: pin_order=1 before
|
||||
// pin_order=2 (the -pin_order DESC column).
|
||||
require.Equal(t, createdChats[0].ID, allPaginated[0].ID,
|
||||
"pin_order=1 chat should be first")
|
||||
require.Equal(t, createdChats[1].ID, allPaginated[1].ID,
|
||||
"pin_order=2 chat should be second")
|
||||
})
|
||||
}
|
||||
|
||||
func TestListChatModels(t *testing.T) {
|
||||
@@ -1242,7 +1062,7 @@ func TestListChatModels(t *testing.T) {
|
||||
|
||||
ctx := testutil.Context(t, testutil.WaitLong)
|
||||
values := chatDeploymentValues(t)
|
||||
values.AI.BridgeConfig.OpenAI.Key = serpent.String("deployment-openai-key")
|
||||
values.AI.BridgeConfig.LegacyOpenAI.Key = serpent.String("deployment-openai-key")
|
||||
client := newChatClientWithDeploymentValues(t, values)
|
||||
_ = coderdtest.CreateFirstUser(t, client.Client)
|
||||
|
||||
@@ -1294,6 +1114,17 @@ func TestWatchChats(t *testing.T) {
|
||||
require.NoError(t, err)
|
||||
defer conn.Close(websocket.StatusNormalClosure, "done")
|
||||
|
||||
type watchEvent struct {
|
||||
Type codersdk.ServerSentEventType `json:"type"`
|
||||
Data json.RawMessage `json:"data,omitempty"`
|
||||
}
|
||||
|
||||
var event watchEvent
|
||||
err = wsjson.Read(ctx, conn, &event)
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, codersdk.ServerSentEventTypePing, event.Type)
|
||||
require.True(t, len(event.Data) == 0 || string(event.Data) == "null")
|
||||
|
||||
createdChat, err := client.CreateChat(ctx, codersdk.CreateChatRequest{
|
||||
Content: []codersdk.ChatInputPart{
|
||||
{
|
||||
@@ -1305,16 +1136,25 @@ func TestWatchChats(t *testing.T) {
|
||||
require.NoError(t, err)
|
||||
|
||||
for {
|
||||
var payload codersdk.ChatWatchEvent
|
||||
err = wsjson.Read(ctx, conn, &payload)
|
||||
var update watchEvent
|
||||
err = wsjson.Read(ctx, conn, &update)
|
||||
require.NoError(t, err)
|
||||
|
||||
if payload.Kind == codersdk.ChatWatchEventKindCreated &&
|
||||
if update.Type == codersdk.ServerSentEventTypePing {
|
||||
continue
|
||||
}
|
||||
require.Equal(t, codersdk.ServerSentEventTypeData, update.Type)
|
||||
|
||||
var payload coderdpubsub.ChatEvent
|
||||
err = json.Unmarshal(update.Data, &payload)
|
||||
require.NoError(t, err)
|
||||
if payload.Kind == coderdpubsub.ChatEventKindCreated &&
|
||||
payload.Chat.ID == createdChat.ID {
|
||||
break
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("CreatedEventIncludesAllChatFields", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
@@ -1334,6 +1174,18 @@ func TestWatchChats(t *testing.T) {
|
||||
require.NoError(t, err)
|
||||
defer conn.Close(websocket.StatusNormalClosure, "done")
|
||||
|
||||
type watchEvent struct {
|
||||
Type codersdk.ServerSentEventType `json:"type"`
|
||||
Data json.RawMessage `json:"data,omitempty"`
|
||||
}
|
||||
|
||||
// Skip the initial ping.
|
||||
var event watchEvent
|
||||
err = wsjson.Read(ctx, conn, &event)
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, codersdk.ServerSentEventTypePing, event.Type)
|
||||
require.True(t, len(event.Data) == 0 || string(event.Data) == "null")
|
||||
|
||||
createdChat, err := client.CreateChat(ctx, codersdk.CreateChatRequest{
|
||||
Content: []codersdk.ChatInputPart{
|
||||
{
|
||||
@@ -1346,11 +1198,18 @@ func TestWatchChats(t *testing.T) {
|
||||
|
||||
var got codersdk.Chat
|
||||
testutil.Eventually(ctx, t, func(_ context.Context) bool {
|
||||
var payload codersdk.ChatWatchEvent
|
||||
if readErr := wsjson.Read(ctx, conn, &payload); readErr != nil {
|
||||
var update watchEvent
|
||||
if readErr := wsjson.Read(ctx, conn, &update); readErr != nil {
|
||||
return false
|
||||
}
|
||||
if payload.Kind == codersdk.ChatWatchEventKindCreated &&
|
||||
if update.Type != codersdk.ServerSentEventTypeData {
|
||||
return false
|
||||
}
|
||||
var payload coderdpubsub.ChatEvent
|
||||
if unmarshalErr := json.Unmarshal(update.Data, &payload); unmarshalErr != nil {
|
||||
return false
|
||||
}
|
||||
if payload.Kind == coderdpubsub.ChatEventKindCreated &&
|
||||
payload.Chat.ID == createdChat.ID {
|
||||
got = payload.Chat
|
||||
return true
|
||||
@@ -1423,14 +1282,25 @@ func TestWatchChats(t *testing.T) {
|
||||
require.NoError(t, err)
|
||||
defer conn.Close(websocket.StatusNormalClosure, "done")
|
||||
|
||||
type watchEvent struct {
|
||||
Type codersdk.ServerSentEventType `json:"type"`
|
||||
Data json.RawMessage `json:"data,omitempty"`
|
||||
}
|
||||
|
||||
// Read the initial ping.
|
||||
var ping watchEvent
|
||||
err = wsjson.Read(ctx, conn, &ping)
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, codersdk.ServerSentEventTypePing, ping.Type)
|
||||
|
||||
// Publish a diff_status_change event via pubsub,
|
||||
// mimicking what PublishDiffStatusChange does after
|
||||
// it reads the diff status from the DB.
|
||||
dbStatus, err := db.GetChatDiffStatusByChatID(dbauthz.AsSystemRestricted(ctx), chat.ID)
|
||||
require.NoError(t, err)
|
||||
sdkDiffStatus := db2sdk.ChatDiffStatus(chat.ID, &dbStatus)
|
||||
event := codersdk.ChatWatchEvent{
|
||||
Kind: codersdk.ChatWatchEventKindDiffStatusChange,
|
||||
event := coderdpubsub.ChatEvent{
|
||||
Kind: coderdpubsub.ChatEventKindDiffStatusChange,
|
||||
Chat: codersdk.Chat{
|
||||
ID: chat.ID,
|
||||
OwnerID: chat.OwnerID,
|
||||
@@ -1443,15 +1313,25 @@ func TestWatchChats(t *testing.T) {
|
||||
}
|
||||
payload, err := json.Marshal(event)
|
||||
require.NoError(t, err)
|
||||
err = api.Pubsub.Publish(coderdpubsub.ChatWatchEventChannel(user.UserID), payload)
|
||||
err = api.Pubsub.Publish(coderdpubsub.ChatEventChannel(user.UserID), payload)
|
||||
require.NoError(t, err)
|
||||
|
||||
// Read events until we find the diff_status_change.
|
||||
for {
|
||||
var received codersdk.ChatWatchEvent
|
||||
err = wsjson.Read(ctx, conn, &received)
|
||||
var update watchEvent
|
||||
err = wsjson.Read(ctx, conn, &update)
|
||||
require.NoError(t, err)
|
||||
|
||||
if received.Kind != codersdk.ChatWatchEventKindDiffStatusChange ||
|
||||
if update.Type == codersdk.ServerSentEventTypePing {
|
||||
continue
|
||||
}
|
||||
require.Equal(t, codersdk.ServerSentEventTypeData, update.Type)
|
||||
|
||||
var received coderdpubsub.ChatEvent
|
||||
err = json.Unmarshal(update.Data, &received)
|
||||
require.NoError(t, err)
|
||||
|
||||
if received.Kind != coderdpubsub.ChatEventKindDiffStatusChange ||
|
||||
received.Chat.ID != chat.ID {
|
||||
continue
|
||||
}
|
||||
@@ -1470,6 +1350,7 @@ func TestWatchChats(t *testing.T) {
|
||||
break
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("ArchiveAndUnarchiveEmitEventsForDescendants", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
@@ -1512,13 +1393,31 @@ func TestWatchChats(t *testing.T) {
|
||||
require.NoError(t, err)
|
||||
defer conn.Close(websocket.StatusNormalClosure, "done")
|
||||
|
||||
collectLifecycleEvents := func(expectedKind codersdk.ChatWatchEventKind) map[uuid.UUID]codersdk.ChatWatchEvent {
|
||||
type watchEvent struct {
|
||||
Type codersdk.ServerSentEventType `json:"type"`
|
||||
Data json.RawMessage `json:"data,omitempty"`
|
||||
}
|
||||
|
||||
var ping watchEvent
|
||||
err = wsjson.Read(ctx, conn, &ping)
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, codersdk.ServerSentEventTypePing, ping.Type)
|
||||
|
||||
collectLifecycleEvents := func(expectedKind coderdpubsub.ChatEventKind) map[uuid.UUID]coderdpubsub.ChatEvent {
|
||||
t.Helper()
|
||||
|
||||
events := make(map[uuid.UUID]codersdk.ChatWatchEvent, 3)
|
||||
events := make(map[uuid.UUID]coderdpubsub.ChatEvent, 3)
|
||||
for len(events) < 3 {
|
||||
var payload codersdk.ChatWatchEvent
|
||||
err = wsjson.Read(ctx, conn, &payload)
|
||||
var update watchEvent
|
||||
err = wsjson.Read(ctx, conn, &update)
|
||||
require.NoError(t, err)
|
||||
if update.Type == codersdk.ServerSentEventTypePing {
|
||||
continue
|
||||
}
|
||||
require.Equal(t, codersdk.ServerSentEventTypeData, update.Type)
|
||||
|
||||
var payload coderdpubsub.ChatEvent
|
||||
err = json.Unmarshal(update.Data, &payload)
|
||||
require.NoError(t, err)
|
||||
if payload.Kind != expectedKind {
|
||||
continue
|
||||
@@ -1528,7 +1427,7 @@ func TestWatchChats(t *testing.T) {
|
||||
return events
|
||||
}
|
||||
|
||||
assertLifecycleEvents := func(events map[uuid.UUID]codersdk.ChatWatchEvent, archived bool) {
|
||||
assertLifecycleEvents := func(events map[uuid.UUID]coderdpubsub.ChatEvent, archived bool) {
|
||||
t.Helper()
|
||||
|
||||
require.Len(t, events, 3)
|
||||
@@ -1541,12 +1440,12 @@ func TestWatchChats(t *testing.T) {
|
||||
|
||||
err = client.UpdateChat(ctx, parentChat.ID, codersdk.UpdateChatRequest{Archived: ptr.Ref(true)})
|
||||
require.NoError(t, err)
|
||||
deletedEvents := collectLifecycleEvents(codersdk.ChatWatchEventKindDeleted)
|
||||
deletedEvents := collectLifecycleEvents(coderdpubsub.ChatEventKindDeleted)
|
||||
assertLifecycleEvents(deletedEvents, true)
|
||||
|
||||
err = client.UpdateChat(ctx, parentChat.ID, codersdk.UpdateChatRequest{Archived: ptr.Ref(false)})
|
||||
require.NoError(t, err)
|
||||
createdEvents := collectLifecycleEvents(codersdk.ChatWatchEventKindCreated)
|
||||
createdEvents := collectLifecycleEvents(coderdpubsub.ChatEventKindCreated)
|
||||
assertLifecycleEvents(createdEvents, false)
|
||||
})
|
||||
|
||||
@@ -1602,7 +1501,7 @@ func TestListChatProviders(t *testing.T) {
|
||||
|
||||
ctx := testutil.Context(t, testutil.WaitLong)
|
||||
values := chatDeploymentValues(t)
|
||||
values.AI.BridgeConfig.OpenAI.Key = serpent.String("deployment-openai-key")
|
||||
values.AI.BridgeConfig.LegacyOpenAI.Key = serpent.String("deployment-openai-key")
|
||||
client := newChatClientWithDeploymentValues(t, values)
|
||||
_ = coderdtest.CreateFirstUser(t, client.Client)
|
||||
|
||||
@@ -1756,7 +1655,7 @@ func TestCreateChatProvider(t *testing.T) {
|
||||
|
||||
ctx := testutil.Context(t, testutil.WaitLong)
|
||||
values := chatDeploymentValues(t)
|
||||
values.AI.BridgeConfig.OpenAI.Key = serpent.String("deployment-openai-key")
|
||||
values.AI.BridgeConfig.LegacyOpenAI.Key = serpent.String("deployment-openai-key")
|
||||
client := newChatClientWithDeploymentValues(t, values)
|
||||
_ = coderdtest.CreateFirstUser(t, client.Client)
|
||||
|
||||
@@ -1958,7 +1857,7 @@ func TestUpdateChatProvider(t *testing.T) {
|
||||
|
||||
ctx := testutil.Context(t, testutil.WaitLong)
|
||||
values := chatDeploymentValues(t)
|
||||
values.AI.BridgeConfig.OpenAI.Key = serpent.String("deployment-openai-key")
|
||||
values.AI.BridgeConfig.LegacyOpenAI.Key = serpent.String("deployment-openai-key")
|
||||
client := newChatClientWithDeploymentValues(t, values)
|
||||
_ = coderdtest.CreateFirstUser(t, client.Client)
|
||||
|
||||
@@ -2194,9 +2093,9 @@ func TestChatProviderAPIKeysFromDeploymentValues(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
values := chatDeploymentValues(t)
|
||||
values.AI.BridgeConfig.OpenAI.Key = serpent.String("deployment-openai-key")
|
||||
values.AI.BridgeConfig.Anthropic.Key = serpent.String("deployment-anthropic-key")
|
||||
values.AI.BridgeConfig.OpenAI.BaseURL = serpent.String("https://custom-openai.example.com")
|
||||
values.AI.BridgeConfig.LegacyOpenAI.Key = serpent.String("deployment-openai-key")
|
||||
values.AI.BridgeConfig.LegacyAnthropic.Key = serpent.String("deployment-anthropic-key")
|
||||
values.AI.BridgeConfig.LegacyOpenAI.BaseURL = serpent.String("https://custom-openai.example.com")
|
||||
|
||||
keys := coderd.ChatProviderAPIKeysFromDeploymentValues(values)
|
||||
require.Equal(t, chatprovider.ProviderAPIKeys{}, keys)
|
||||
@@ -2399,7 +2298,7 @@ func TestUserChatProviderConfigs(t *testing.T) {
|
||||
|
||||
ctx := testutil.Context(t, testutil.WaitLong)
|
||||
values := chatDeploymentValues(t)
|
||||
values.AI.BridgeConfig.OpenAI.Key = serpent.String("deployment-openai-key")
|
||||
values.AI.BridgeConfig.LegacyOpenAI.Key = serpent.String("deployment-openai-key")
|
||||
client := newChatClientWithDeploymentValues(t, values)
|
||||
_ = coderdtest.CreateFirstUser(t, client.Client)
|
||||
|
||||
|
||||
@@ -5,7 +5,6 @@ import (
|
||||
"database/sql"
|
||||
"encoding/hex"
|
||||
"errors"
|
||||
htmltemplate "html/template"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"strings"
|
||||
@@ -147,35 +146,12 @@ func ShowAuthorizePage(accessURL *url.URL) http.HandlerFunc {
|
||||
cancel := params.redirectURL
|
||||
cancelQuery := params.redirectURL.Query()
|
||||
cancelQuery.Add("error", "access_denied")
|
||||
cancelQuery.Add("error_description", "The resource owner or authorization server denied the request")
|
||||
if params.state != "" {
|
||||
cancelQuery.Add("state", params.state)
|
||||
}
|
||||
cancel.RawQuery = cancelQuery.Encode()
|
||||
|
||||
cancelURI := cancel.String()
|
||||
if err := codersdk.ValidateRedirectURIScheme(cancel); err != nil {
|
||||
site.RenderStaticErrorPage(rw, r, site.ErrorPageData{
|
||||
Status: http.StatusBadRequest,
|
||||
HideStatus: false,
|
||||
Title: "Invalid Callback URL",
|
||||
Description: "The application's registered callback URL has an invalid scheme.",
|
||||
Actions: []site.Action{
|
||||
{
|
||||
URL: accessURL.String(),
|
||||
Text: "Back to site",
|
||||
},
|
||||
},
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
site.RenderOAuthAllowPage(rw, r, site.RenderOAuthAllowData{
|
||||
AppIcon: app.Icon,
|
||||
AppName: app.Name,
|
||||
// #nosec G203 -- The scheme is validated by
|
||||
// codersdk.ValidateRedirectURIScheme above.
|
||||
CancelURI: htmltemplate.URL(cancelURI),
|
||||
AppIcon: app.Icon,
|
||||
AppName: app.Name,
|
||||
CancelURI: cancel.String(),
|
||||
RedirectURI: r.URL.String(),
|
||||
CSRFToken: nosurf.Token(r),
|
||||
Username: ua.FriendlyName,
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
package oauth2provider_test
|
||||
|
||||
import (
|
||||
htmltemplate "html/template"
|
||||
"net/http"
|
||||
"net/http/httptest"
|
||||
"testing"
|
||||
@@ -21,7 +20,7 @@ func TestOAuthConsentFormIncludesCSRFToken(t *testing.T) {
|
||||
|
||||
site.RenderOAuthAllowPage(rec, req, site.RenderOAuthAllowData{
|
||||
AppName: "Test OAuth App",
|
||||
CancelURI: htmltemplate.URL("https://coder.com/cancel"),
|
||||
CancelURI: "https://coder.com/cancel",
|
||||
RedirectURI: "https://coder.com/oauth2/authorize?client_id=test",
|
||||
CSRFToken: csrfFieldValue,
|
||||
Username: "test-user",
|
||||
|
||||
@@ -435,9 +435,6 @@ func convertProvisionerJobWithQueuePosition(pj database.GetProvisionerJobsByOrga
|
||||
if pj.WorkspaceID.Valid {
|
||||
job.Metadata.WorkspaceID = &pj.WorkspaceID.UUID
|
||||
}
|
||||
if pj.WorkspaceBuildTransition.Valid {
|
||||
job.Metadata.WorkspaceBuildTransition = codersdk.WorkspaceTransition(pj.WorkspaceBuildTransition.WorkspaceTransition)
|
||||
}
|
||||
return job
|
||||
}
|
||||
|
||||
|
||||
@@ -97,14 +97,13 @@ func TestProvisionerJobs(t *testing.T) {
|
||||
|
||||
// Verify that job metadata is correct.
|
||||
assert.Equal(t, job2.Metadata, codersdk.ProvisionerJobMetadata{
|
||||
TemplateVersionName: version.Name,
|
||||
TemplateID: template.ID,
|
||||
TemplateName: template.Name,
|
||||
TemplateDisplayName: template.DisplayName,
|
||||
TemplateIcon: template.Icon,
|
||||
WorkspaceID: &w.ID,
|
||||
WorkspaceName: w.Name,
|
||||
WorkspaceBuildTransition: codersdk.WorkspaceTransitionStart,
|
||||
TemplateVersionName: version.Name,
|
||||
TemplateID: template.ID,
|
||||
TemplateName: template.Name,
|
||||
TemplateDisplayName: template.DisplayName,
|
||||
TemplateIcon: template.Icon,
|
||||
WorkspaceID: &w.ID,
|
||||
WorkspaceName: w.Name,
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
@@ -14,7 +14,7 @@ import (
|
||||
const ChatConfigEventChannel = "chat:config_change"
|
||||
|
||||
// HandleChatConfigEvent wraps a typed callback for ChatConfigEvent
|
||||
// messages, following the same pattern as HandleChatWatchEvent.
|
||||
// messages, following the same pattern as HandleChatEvent.
|
||||
func HandleChatConfigEvent(cb func(ctx context.Context, payload ChatConfigEvent, err error)) func(ctx context.Context, message []byte, err error) {
|
||||
return func(ctx context.Context, message []byte, err error) {
|
||||
if err != nil {
|
||||
|
||||
@@ -0,0 +1,49 @@
|
||||
package pubsub
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
|
||||
"github.com/google/uuid"
|
||||
"golang.org/x/xerrors"
|
||||
|
||||
"github.com/coder/coder/v2/codersdk"
|
||||
)
|
||||
|
||||
func ChatEventChannel(ownerID uuid.UUID) string {
|
||||
return fmt.Sprintf("chat:owner:%s", ownerID)
|
||||
}
|
||||
|
||||
func HandleChatEvent(cb func(ctx context.Context, payload ChatEvent, err error)) func(ctx context.Context, message []byte, err error) {
|
||||
return func(ctx context.Context, message []byte, err error) {
|
||||
if err != nil {
|
||||
cb(ctx, ChatEvent{}, xerrors.Errorf("chat event pubsub: %w", err))
|
||||
return
|
||||
}
|
||||
var payload ChatEvent
|
||||
if err := json.Unmarshal(message, &payload); err != nil {
|
||||
cb(ctx, ChatEvent{}, xerrors.Errorf("unmarshal chat event: %w", err))
|
||||
return
|
||||
}
|
||||
|
||||
cb(ctx, payload, err)
|
||||
}
|
||||
}
|
||||
|
||||
type ChatEvent struct {
|
||||
Kind ChatEventKind `json:"kind"`
|
||||
Chat codersdk.Chat `json:"chat"`
|
||||
ToolCalls []codersdk.ChatStreamToolCall `json:"tool_calls,omitempty"`
|
||||
}
|
||||
|
||||
type ChatEventKind string
|
||||
|
||||
const (
|
||||
ChatEventKindStatusChange ChatEventKind = "status_change"
|
||||
ChatEventKindTitleChange ChatEventKind = "title_change"
|
||||
ChatEventKindCreated ChatEventKind = "created"
|
||||
ChatEventKindDeleted ChatEventKind = "deleted"
|
||||
ChatEventKindDiffStatusChange ChatEventKind = "diff_status_change"
|
||||
ChatEventKindActionRequired ChatEventKind = "action_required"
|
||||
)
|
||||
@@ -1,36 +0,0 @@
|
||||
package pubsub
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
|
||||
"github.com/google/uuid"
|
||||
"golang.org/x/xerrors"
|
||||
|
||||
"github.com/coder/coder/v2/codersdk"
|
||||
)
|
||||
|
||||
// ChatWatchEventChannel returns the pubsub channel for chat
|
||||
// lifecycle events scoped to a single user.
|
||||
func ChatWatchEventChannel(ownerID uuid.UUID) string {
|
||||
return fmt.Sprintf("chat:owner:%s", ownerID)
|
||||
}
|
||||
|
||||
// HandleChatWatchEvent wraps a typed callback for
|
||||
// ChatWatchEvent messages delivered via pubsub.
|
||||
func HandleChatWatchEvent(cb func(ctx context.Context, payload codersdk.ChatWatchEvent, err error)) func(ctx context.Context, message []byte, err error) {
|
||||
return func(ctx context.Context, message []byte, err error) {
|
||||
if err != nil {
|
||||
cb(ctx, codersdk.ChatWatchEvent{}, xerrors.Errorf("chat watch event pubsub: %w", err))
|
||||
return
|
||||
}
|
||||
var payload codersdk.ChatWatchEvent
|
||||
if err := json.Unmarshal(message, &payload); err != nil {
|
||||
cb(ctx, codersdk.ChatWatchEvent{}, xerrors.Errorf("unmarshal chat watch event: %w", err))
|
||||
return
|
||||
}
|
||||
|
||||
cb(ctx, payload, err)
|
||||
}
|
||||
}
|
||||
@@ -1,280 +0,0 @@
|
||||
package coderd
|
||||
|
||||
import (
|
||||
"database/sql"
|
||||
"errors"
|
||||
"net/http"
|
||||
|
||||
"github.com/go-chi/chi/v5"
|
||||
"github.com/google/uuid"
|
||||
|
||||
"github.com/coder/coder/v2/coderd/database"
|
||||
"github.com/coder/coder/v2/coderd/database/db2sdk"
|
||||
"github.com/coder/coder/v2/coderd/httpapi"
|
||||
"github.com/coder/coder/v2/coderd/httpmw"
|
||||
"github.com/coder/coder/v2/codersdk"
|
||||
)
|
||||
|
||||
// @Summary Create a new user secret
|
||||
// @ID create-a-new-user-secret
|
||||
// @Security CoderSessionToken
|
||||
// @Accept json
|
||||
// @Produce json
|
||||
// @Tags Secrets
|
||||
// @Param user path string true "User ID, username, or me"
|
||||
// @Param request body codersdk.CreateUserSecretRequest true "Create secret request"
|
||||
// @Success 201 {object} codersdk.UserSecret
|
||||
// @Router /users/{user}/secrets [post]
|
||||
func (api *API) postUserSecret(rw http.ResponseWriter, r *http.Request) {
|
||||
ctx := r.Context()
|
||||
user := httpmw.UserParam(r)
|
||||
|
||||
var req codersdk.CreateUserSecretRequest
|
||||
if !httpapi.Read(ctx, rw, r, &req) {
|
||||
return
|
||||
}
|
||||
|
||||
if req.Name == "" {
|
||||
httpapi.Write(ctx, rw, http.StatusBadRequest, codersdk.Response{
|
||||
Message: "Name is required.",
|
||||
})
|
||||
return
|
||||
}
|
||||
if req.Value == "" {
|
||||
httpapi.Write(ctx, rw, http.StatusBadRequest, codersdk.Response{
|
||||
Message: "Value is required.",
|
||||
})
|
||||
return
|
||||
}
|
||||
envOpts := codersdk.UserSecretEnvValidationOptions{
|
||||
AIGatewayEnabled: api.DeploymentValues.AI.BridgeConfig.Enabled.Value(),
|
||||
}
|
||||
if err := codersdk.UserSecretEnvNameValid(req.EnvName, envOpts); err != nil {
|
||||
httpapi.Write(ctx, rw, http.StatusBadRequest, codersdk.Response{
|
||||
Message: "Invalid environment variable name.",
|
||||
Detail: err.Error(),
|
||||
})
|
||||
return
|
||||
}
|
||||
if err := codersdk.UserSecretFilePathValid(req.FilePath); err != nil {
|
||||
httpapi.Write(ctx, rw, http.StatusBadRequest, codersdk.Response{
|
||||
Message: "Invalid file path.",
|
||||
Detail: err.Error(),
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
secret, err := api.Database.CreateUserSecret(ctx, database.CreateUserSecretParams{
|
||||
ID: uuid.New(),
|
||||
UserID: user.ID,
|
||||
Name: req.Name,
|
||||
Description: req.Description,
|
||||
Value: req.Value,
|
||||
ValueKeyID: sql.NullString{},
|
||||
EnvName: req.EnvName,
|
||||
FilePath: req.FilePath,
|
||||
})
|
||||
if err != nil {
|
||||
if database.IsUniqueViolation(err) {
|
||||
httpapi.Write(ctx, rw, http.StatusConflict, codersdk.Response{
|
||||
Message: "A secret with that name, environment variable, or file path already exists.",
|
||||
Detail: err.Error(),
|
||||
})
|
||||
return
|
||||
}
|
||||
httpapi.Write(ctx, rw, http.StatusInternalServerError, codersdk.Response{
|
||||
Message: "Internal error creating secret.",
|
||||
Detail: err.Error(),
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
httpapi.Write(ctx, rw, http.StatusCreated, db2sdk.UserSecretFromFull(secret))
|
||||
}
|
||||
|
||||
// @Summary List user secrets
|
||||
// @ID list-user-secrets
|
||||
// @Security CoderSessionToken
|
||||
// @Produce json
|
||||
// @Tags Secrets
|
||||
// @Param user path string true "User ID, username, or me"
|
||||
// @Success 200 {array} codersdk.UserSecret
|
||||
// @Router /users/{user}/secrets [get]
|
||||
func (api *API) getUserSecrets(rw http.ResponseWriter, r *http.Request) { //nolint:revive // Method name matches route.
|
||||
ctx := r.Context()
|
||||
user := httpmw.UserParam(r)
|
||||
|
||||
secrets, err := api.Database.ListUserSecrets(ctx, user.ID)
|
||||
if err != nil {
|
||||
httpapi.Write(ctx, rw, http.StatusInternalServerError, codersdk.Response{
|
||||
Message: "Internal error listing secrets.",
|
||||
Detail: err.Error(),
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
httpapi.Write(ctx, rw, http.StatusOK, db2sdk.UserSecrets(secrets))
|
||||
}
|
||||
|
||||
// @Summary Get a user secret by name
|
||||
// @ID get-a-user-secret-by-name
|
||||
// @Security CoderSessionToken
|
||||
// @Produce json
|
||||
// @Tags Secrets
|
||||
// @Param user path string true "User ID, username, or me"
|
||||
// @Param name path string true "Secret name"
|
||||
// @Success 200 {object} codersdk.UserSecret
|
||||
// @Router /users/{user}/secrets/{name} [get]
|
||||
func (api *API) getUserSecret(rw http.ResponseWriter, r *http.Request) { //nolint:revive // Method name matches route.
|
||||
ctx := r.Context()
|
||||
user := httpmw.UserParam(r)
|
||||
name := chi.URLParam(r, "name")
|
||||
|
||||
secret, err := api.Database.GetUserSecretByUserIDAndName(ctx, database.GetUserSecretByUserIDAndNameParams{
|
||||
UserID: user.ID,
|
||||
Name: name,
|
||||
})
|
||||
if err != nil {
|
||||
if errors.Is(err, sql.ErrNoRows) {
|
||||
httpapi.ResourceNotFound(rw)
|
||||
return
|
||||
}
|
||||
httpapi.Write(ctx, rw, http.StatusInternalServerError, codersdk.Response{
|
||||
Message: "Internal error fetching secret.",
|
||||
Detail: err.Error(),
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
httpapi.Write(ctx, rw, http.StatusOK, db2sdk.UserSecretFromFull(secret))
|
||||
}
|
||||
|
||||
// @Summary Update a user secret
|
||||
// @ID update-a-user-secret
|
||||
// @Security CoderSessionToken
|
||||
// @Accept json
|
||||
// @Produce json
|
||||
// @Tags Secrets
|
||||
// @Param user path string true "User ID, username, or me"
|
||||
// @Param name path string true "Secret name"
|
||||
// @Param request body codersdk.UpdateUserSecretRequest true "Update secret request"
|
||||
// @Success 200 {object} codersdk.UserSecret
|
||||
// @Router /users/{user}/secrets/{name} [patch]
|
||||
func (api *API) patchUserSecret(rw http.ResponseWriter, r *http.Request) {
|
||||
ctx := r.Context()
|
||||
user := httpmw.UserParam(r)
|
||||
name := chi.URLParam(r, "name")
|
||||
|
||||
var req codersdk.UpdateUserSecretRequest
|
||||
if !httpapi.Read(ctx, rw, r, &req) {
|
||||
return
|
||||
}
|
||||
|
||||
if req.Value == nil && req.Description == nil && req.EnvName == nil && req.FilePath == nil {
|
||||
httpapi.Write(ctx, rw, http.StatusBadRequest, codersdk.Response{
|
||||
Message: "At least one field must be provided.",
|
||||
})
|
||||
return
|
||||
}
|
||||
if req.EnvName != nil {
|
||||
envOpts := codersdk.UserSecretEnvValidationOptions{
|
||||
AIGatewayEnabled: api.DeploymentValues.AI.BridgeConfig.Enabled.Value(),
|
||||
}
|
||||
if err := codersdk.UserSecretEnvNameValid(*req.EnvName, envOpts); err != nil {
|
||||
httpapi.Write(ctx, rw, http.StatusBadRequest, codersdk.Response{
|
||||
Message: "Invalid environment variable name.",
|
||||
Detail: err.Error(),
|
||||
})
|
||||
return
|
||||
}
|
||||
}
|
||||
if req.FilePath != nil {
|
||||
if err := codersdk.UserSecretFilePathValid(*req.FilePath); err != nil {
|
||||
httpapi.Write(ctx, rw, http.StatusBadRequest, codersdk.Response{
|
||||
Message: "Invalid file path.",
|
||||
Detail: err.Error(),
|
||||
})
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
params := database.UpdateUserSecretByUserIDAndNameParams{
|
||||
UserID: user.ID,
|
||||
Name: name,
|
||||
UpdateValue: req.Value != nil,
|
||||
Value: "",
|
||||
ValueKeyID: sql.NullString{},
|
||||
UpdateDescription: req.Description != nil,
|
||||
Description: "",
|
||||
UpdateEnvName: req.EnvName != nil,
|
||||
EnvName: "",
|
||||
UpdateFilePath: req.FilePath != nil,
|
||||
FilePath: "",
|
||||
}
|
||||
if req.Value != nil {
|
||||
params.Value = *req.Value
|
||||
}
|
||||
if req.Description != nil {
|
||||
params.Description = *req.Description
|
||||
}
|
||||
if req.EnvName != nil {
|
||||
params.EnvName = *req.EnvName
|
||||
}
|
||||
if req.FilePath != nil {
|
||||
params.FilePath = *req.FilePath
|
||||
}
|
||||
|
||||
secret, err := api.Database.UpdateUserSecretByUserIDAndName(ctx, params)
|
||||
if err != nil {
|
||||
if errors.Is(err, sql.ErrNoRows) {
|
||||
httpapi.ResourceNotFound(rw)
|
||||
return
|
||||
}
|
||||
if database.IsUniqueViolation(err) {
|
||||
httpapi.Write(ctx, rw, http.StatusConflict, codersdk.Response{
|
||||
Message: "Update would conflict with an existing secret.",
|
||||
Detail: err.Error(),
|
||||
})
|
||||
return
|
||||
}
|
||||
httpapi.Write(ctx, rw, http.StatusInternalServerError, codersdk.Response{
|
||||
Message: "Internal error updating secret.",
|
||||
Detail: err.Error(),
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
httpapi.Write(ctx, rw, http.StatusOK, db2sdk.UserSecretFromFull(secret))
|
||||
}
|
||||
|
||||
// @Summary Delete a user secret
|
||||
// @ID delete-a-user-secret
|
||||
// @Security CoderSessionToken
|
||||
// @Tags Secrets
|
||||
// @Param user path string true "User ID, username, or me"
|
||||
// @Param name path string true "Secret name"
|
||||
// @Success 204
|
||||
// @Router /users/{user}/secrets/{name} [delete]
|
||||
func (api *API) deleteUserSecret(rw http.ResponseWriter, r *http.Request) {
|
||||
ctx := r.Context()
|
||||
user := httpmw.UserParam(r)
|
||||
name := chi.URLParam(r, "name")
|
||||
|
||||
rowsAffected, err := api.Database.DeleteUserSecretByUserIDAndName(ctx, database.DeleteUserSecretByUserIDAndNameParams{
|
||||
UserID: user.ID,
|
||||
Name: name,
|
||||
})
|
||||
if err != nil {
|
||||
httpapi.Write(ctx, rw, http.StatusInternalServerError, codersdk.Response{
|
||||
Message: "Internal error deleting secret.",
|
||||
Detail: err.Error(),
|
||||
})
|
||||
return
|
||||
}
|
||||
if rowsAffected == 0 {
|
||||
httpapi.ResourceNotFound(rw)
|
||||
return
|
||||
}
|
||||
|
||||
rw.WriteHeader(http.StatusNoContent)
|
||||
}
|
||||
@@ -1,413 +0,0 @@
|
||||
package coderd_test
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
|
||||
"github.com/coder/coder/v2/coderd/coderdtest"
|
||||
"github.com/coder/coder/v2/codersdk"
|
||||
"github.com/coder/coder/v2/testutil"
|
||||
)
|
||||
|
||||
func TestPostUserSecret(t *testing.T) {
|
||||
t.Parallel()
|
||||
client := coderdtest.New(t, nil)
|
||||
_ = coderdtest.CreateFirstUser(t, client)
|
||||
|
||||
t.Run("Success", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
ctx := testutil.Context(t, testutil.WaitMedium)
|
||||
|
||||
secret, err := client.CreateUserSecret(ctx, codersdk.Me, codersdk.CreateUserSecretRequest{
|
||||
Name: "github-token",
|
||||
Value: "ghp_xxxxxxxxxxxx",
|
||||
Description: "Personal GitHub PAT",
|
||||
EnvName: "GITHUB_TOKEN",
|
||||
FilePath: "~/.github-token",
|
||||
})
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, "github-token", secret.Name)
|
||||
assert.Equal(t, "Personal GitHub PAT", secret.Description)
|
||||
assert.Equal(t, "GITHUB_TOKEN", secret.EnvName)
|
||||
assert.Equal(t, "~/.github-token", secret.FilePath)
|
||||
assert.NotZero(t, secret.ID)
|
||||
assert.NotZero(t, secret.CreatedAt)
|
||||
})
|
||||
|
||||
t.Run("MissingName", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
ctx := testutil.Context(t, testutil.WaitMedium)
|
||||
|
||||
_, err := client.CreateUserSecret(ctx, codersdk.Me, codersdk.CreateUserSecretRequest{
|
||||
Value: "some-value",
|
||||
})
|
||||
require.Error(t, err)
|
||||
var sdkErr *codersdk.Error
|
||||
require.ErrorAs(t, err, &sdkErr)
|
||||
assert.Equal(t, http.StatusBadRequest, sdkErr.StatusCode())
|
||||
assert.Contains(t, sdkErr.Message, "Name is required")
|
||||
})
|
||||
|
||||
t.Run("MissingValue", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
ctx := testutil.Context(t, testutil.WaitMedium)
|
||||
|
||||
_, err := client.CreateUserSecret(ctx, codersdk.Me, codersdk.CreateUserSecretRequest{
|
||||
Name: "missing-value-secret",
|
||||
})
|
||||
require.Error(t, err)
|
||||
var sdkErr *codersdk.Error
|
||||
require.ErrorAs(t, err, &sdkErr)
|
||||
assert.Equal(t, http.StatusBadRequest, sdkErr.StatusCode())
|
||||
assert.Contains(t, sdkErr.Message, "Value is required")
|
||||
})
|
||||
|
||||
t.Run("DuplicateName", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
ctx := testutil.Context(t, testutil.WaitMedium)
|
||||
|
||||
_, err := client.CreateUserSecret(ctx, codersdk.Me, codersdk.CreateUserSecretRequest{
|
||||
Name: "dup-secret",
|
||||
Value: "value1",
|
||||
})
|
||||
require.NoError(t, err)
|
||||
|
||||
_, err = client.CreateUserSecret(ctx, codersdk.Me, codersdk.CreateUserSecretRequest{
|
||||
Name: "dup-secret",
|
||||
Value: "value2",
|
||||
})
|
||||
require.Error(t, err)
|
||||
var sdkErr *codersdk.Error
|
||||
require.ErrorAs(t, err, &sdkErr)
|
||||
assert.Equal(t, http.StatusConflict, sdkErr.StatusCode())
|
||||
})
|
||||
|
||||
t.Run("DuplicateEnvName", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
ctx := testutil.Context(t, testutil.WaitMedium)
|
||||
|
||||
_, err := client.CreateUserSecret(ctx, codersdk.Me, codersdk.CreateUserSecretRequest{
|
||||
Name: "env-dup-1",
|
||||
Value: "value1",
|
||||
EnvName: "DUPLICATE_ENV",
|
||||
})
|
||||
require.NoError(t, err)
|
||||
|
||||
_, err = client.CreateUserSecret(ctx, codersdk.Me, codersdk.CreateUserSecretRequest{
|
||||
Name: "env-dup-2",
|
||||
Value: "value2",
|
||||
EnvName: "DUPLICATE_ENV",
|
||||
})
|
||||
require.Error(t, err)
|
||||
var sdkErr *codersdk.Error
|
||||
require.ErrorAs(t, err, &sdkErr)
|
||||
assert.Equal(t, http.StatusConflict, sdkErr.StatusCode())
|
||||
})
|
||||
|
||||
t.Run("DuplicateFilePath", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
ctx := testutil.Context(t, testutil.WaitMedium)
|
||||
|
||||
_, err := client.CreateUserSecret(ctx, codersdk.Me, codersdk.CreateUserSecretRequest{
|
||||
Name: "fp-dup-1",
|
||||
Value: "value1",
|
||||
FilePath: "/tmp/dup-file",
|
||||
})
|
||||
require.NoError(t, err)
|
||||
|
||||
_, err = client.CreateUserSecret(ctx, codersdk.Me, codersdk.CreateUserSecretRequest{
|
||||
Name: "fp-dup-2",
|
||||
Value: "value2",
|
||||
FilePath: "/tmp/dup-file",
|
||||
})
|
||||
require.Error(t, err)
|
||||
var sdkErr *codersdk.Error
|
||||
require.ErrorAs(t, err, &sdkErr)
|
||||
assert.Equal(t, http.StatusConflict, sdkErr.StatusCode())
|
||||
})
|
||||
|
||||
t.Run("InvalidEnvName", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
ctx := testutil.Context(t, testutil.WaitMedium)
|
||||
|
||||
_, err := client.CreateUserSecret(ctx, codersdk.Me, codersdk.CreateUserSecretRequest{
|
||||
Name: "invalid-env-secret",
|
||||
Value: "value",
|
||||
EnvName: "1INVALID",
|
||||
})
|
||||
require.Error(t, err)
|
||||
var sdkErr *codersdk.Error
|
||||
require.ErrorAs(t, err, &sdkErr)
|
||||
assert.Equal(t, http.StatusBadRequest, sdkErr.StatusCode())
|
||||
})
|
||||
|
||||
t.Run("ReservedEnvName", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
ctx := testutil.Context(t, testutil.WaitMedium)
|
||||
|
||||
_, err := client.CreateUserSecret(ctx, codersdk.Me, codersdk.CreateUserSecretRequest{
|
||||
Name: "reserved-env-secret",
|
||||
Value: "value",
|
||||
EnvName: "PATH",
|
||||
})
|
||||
require.Error(t, err)
|
||||
var sdkErr *codersdk.Error
|
||||
require.ErrorAs(t, err, &sdkErr)
|
||||
assert.Equal(t, http.StatusBadRequest, sdkErr.StatusCode())
|
||||
})
|
||||
|
||||
t.Run("CoderPrefixEnvName", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
ctx := testutil.Context(t, testutil.WaitMedium)
|
||||
|
||||
_, err := client.CreateUserSecret(ctx, codersdk.Me, codersdk.CreateUserSecretRequest{
|
||||
Name: "coder-prefix-secret",
|
||||
Value: "value",
|
||||
EnvName: "CODER_AGENT_TOKEN",
|
||||
})
|
||||
require.Error(t, err)
|
||||
var sdkErr *codersdk.Error
|
||||
require.ErrorAs(t, err, &sdkErr)
|
||||
assert.Equal(t, http.StatusBadRequest, sdkErr.StatusCode())
|
||||
})
|
||||
|
||||
t.Run("InvalidFilePath", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
ctx := testutil.Context(t, testutil.WaitMedium)
|
||||
|
||||
_, err := client.CreateUserSecret(ctx, codersdk.Me, codersdk.CreateUserSecretRequest{
|
||||
Name: "bad-path-secret",
|
||||
Value: "value",
|
||||
FilePath: "relative/path",
|
||||
})
|
||||
require.Error(t, err)
|
||||
var sdkErr *codersdk.Error
|
||||
require.ErrorAs(t, err, &sdkErr)
|
||||
assert.Equal(t, http.StatusBadRequest, sdkErr.StatusCode())
|
||||
})
|
||||
}
|
||||
|
||||
func TestGetUserSecrets(t *testing.T) {
|
||||
t.Parallel()
|
||||
client := coderdtest.New(t, nil)
|
||||
_ = coderdtest.CreateFirstUser(t, client)
|
||||
|
||||
// Verify no secrets exist on a fresh user.
|
||||
ctx := testutil.Context(t, testutil.WaitMedium)
|
||||
secrets, err := client.UserSecrets(ctx, codersdk.Me)
|
||||
require.NoError(t, err)
|
||||
assert.Empty(t, secrets)
|
||||
|
||||
t.Run("WithSecrets", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
ctx := testutil.Context(t, testutil.WaitMedium)
|
||||
|
||||
_, err := client.CreateUserSecret(ctx, codersdk.Me, codersdk.CreateUserSecretRequest{
|
||||
Name: "list-secret-a",
|
||||
Value: "value-a",
|
||||
})
|
||||
require.NoError(t, err)
|
||||
|
||||
_, err = client.CreateUserSecret(ctx, codersdk.Me, codersdk.CreateUserSecretRequest{
|
||||
Name: "list-secret-b",
|
||||
Value: "value-b",
|
||||
})
|
||||
require.NoError(t, err)
|
||||
|
||||
secrets, err := client.UserSecrets(ctx, codersdk.Me)
|
||||
require.NoError(t, err)
|
||||
require.Len(t, secrets, 2)
|
||||
// Sorted by name.
|
||||
assert.Equal(t, "list-secret-a", secrets[0].Name)
|
||||
assert.Equal(t, "list-secret-b", secrets[1].Name)
|
||||
})
|
||||
}
|
||||
|
||||
func TestGetUserSecret(t *testing.T) {
|
||||
t.Parallel()
|
||||
client := coderdtest.New(t, nil)
|
||||
_ = coderdtest.CreateFirstUser(t, client)
|
||||
|
||||
t.Run("Found", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
ctx := testutil.Context(t, testutil.WaitMedium)
|
||||
|
||||
created, err := client.CreateUserSecret(ctx, codersdk.Me, codersdk.CreateUserSecretRequest{
|
||||
Name: "get-found-secret",
|
||||
Value: "my-value",
|
||||
EnvName: "GET_FOUND_SECRET",
|
||||
})
|
||||
require.NoError(t, err)
|
||||
|
||||
got, err := client.UserSecretByName(ctx, codersdk.Me, "get-found-secret")
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, created.ID, got.ID)
|
||||
assert.Equal(t, "get-found-secret", got.Name)
|
||||
assert.Equal(t, "GET_FOUND_SECRET", got.EnvName)
|
||||
})
|
||||
|
||||
t.Run("NotFound", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
ctx := testutil.Context(t, testutil.WaitMedium)
|
||||
|
||||
_, err := client.UserSecretByName(ctx, codersdk.Me, "nonexistent")
|
||||
require.Error(t, err)
|
||||
var sdkErr *codersdk.Error
|
||||
require.ErrorAs(t, err, &sdkErr)
|
||||
assert.Equal(t, http.StatusNotFound, sdkErr.StatusCode())
|
||||
})
|
||||
}
|
||||
|
||||
func TestPatchUserSecret(t *testing.T) {
|
||||
t.Parallel()
|
||||
client := coderdtest.New(t, nil)
|
||||
_ = coderdtest.CreateFirstUser(t, client)
|
||||
|
||||
t.Run("UpdateDescription", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
ctx := testutil.Context(t, testutil.WaitMedium)
|
||||
|
||||
_, err := client.CreateUserSecret(ctx, codersdk.Me, codersdk.CreateUserSecretRequest{
|
||||
Name: "patch-desc-secret",
|
||||
Value: "my-value",
|
||||
Description: "original",
|
||||
EnvName: "PATCH_DESC_ENV",
|
||||
})
|
||||
require.NoError(t, err)
|
||||
|
||||
newDesc := "updated"
|
||||
updated, err := client.UpdateUserSecret(ctx, codersdk.Me, "patch-desc-secret", codersdk.UpdateUserSecretRequest{
|
||||
Description: &newDesc,
|
||||
})
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, "updated", updated.Description)
|
||||
// Other fields unchanged.
|
||||
assert.Equal(t, "PATCH_DESC_ENV", updated.EnvName)
|
||||
})
|
||||
|
||||
t.Run("NoFields", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
ctx := testutil.Context(t, testutil.WaitMedium)
|
||||
|
||||
_, err := client.CreateUserSecret(ctx, codersdk.Me, codersdk.CreateUserSecretRequest{
|
||||
Name: "patch-nofields-secret",
|
||||
Value: "my-value",
|
||||
})
|
||||
require.NoError(t, err)
|
||||
|
||||
_, err = client.UpdateUserSecret(ctx, codersdk.Me, "patch-nofields-secret", codersdk.UpdateUserSecretRequest{})
|
||||
require.Error(t, err)
|
||||
var sdkErr *codersdk.Error
|
||||
require.ErrorAs(t, err, &sdkErr)
|
||||
assert.Equal(t, http.StatusBadRequest, sdkErr.StatusCode())
|
||||
})
|
||||
|
||||
t.Run("NotFound", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
ctx := testutil.Context(t, testutil.WaitMedium)
|
||||
|
||||
newVal := "new-value"
|
||||
_, err := client.UpdateUserSecret(ctx, codersdk.Me, "nonexistent", codersdk.UpdateUserSecretRequest{
|
||||
Value: &newVal,
|
||||
})
|
||||
require.Error(t, err)
|
||||
var sdkErr *codersdk.Error
|
||||
require.ErrorAs(t, err, &sdkErr)
|
||||
assert.Equal(t, http.StatusNotFound, sdkErr.StatusCode())
|
||||
})
|
||||
|
||||
t.Run("ConflictEnvName", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
ctx := testutil.Context(t, testutil.WaitMedium)
|
||||
|
||||
_, err := client.CreateUserSecret(ctx, codersdk.Me, codersdk.CreateUserSecretRequest{
|
||||
Name: "conflict-env-1",
|
||||
Value: "value1",
|
||||
EnvName: "CONFLICT_TAKEN_ENV",
|
||||
})
|
||||
require.NoError(t, err)
|
||||
|
||||
_, err = client.CreateUserSecret(ctx, codersdk.Me, codersdk.CreateUserSecretRequest{
|
||||
Name: "conflict-env-2",
|
||||
Value: "value2",
|
||||
})
|
||||
require.NoError(t, err)
|
||||
|
||||
taken := "CONFLICT_TAKEN_ENV"
|
||||
_, err = client.UpdateUserSecret(ctx, codersdk.Me, "conflict-env-2", codersdk.UpdateUserSecretRequest{
|
||||
EnvName: &taken,
|
||||
})
|
||||
require.Error(t, err)
|
||||
var sdkErr *codersdk.Error
|
||||
require.ErrorAs(t, err, &sdkErr)
|
||||
assert.Equal(t, http.StatusConflict, sdkErr.StatusCode())
|
||||
})
|
||||
|
||||
t.Run("ConflictFilePath", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
ctx := testutil.Context(t, testutil.WaitMedium)
|
||||
|
||||
_, err := client.CreateUserSecret(ctx, codersdk.Me, codersdk.CreateUserSecretRequest{
|
||||
Name: "conflict-fp-1",
|
||||
Value: "value1",
|
||||
FilePath: "/tmp/conflict-taken",
|
||||
})
|
||||
require.NoError(t, err)
|
||||
|
||||
_, err = client.CreateUserSecret(ctx, codersdk.Me, codersdk.CreateUserSecretRequest{
|
||||
Name: "conflict-fp-2",
|
||||
Value: "value2",
|
||||
})
|
||||
require.NoError(t, err)
|
||||
|
||||
taken := "/tmp/conflict-taken"
|
||||
_, err = client.UpdateUserSecret(ctx, codersdk.Me, "conflict-fp-2", codersdk.UpdateUserSecretRequest{
|
||||
FilePath: &taken,
|
||||
})
|
||||
require.Error(t, err)
|
||||
var sdkErr *codersdk.Error
|
||||
require.ErrorAs(t, err, &sdkErr)
|
||||
assert.Equal(t, http.StatusConflict, sdkErr.StatusCode())
|
||||
})
|
||||
}
|
||||
|
||||
func TestDeleteUserSecret(t *testing.T) {
|
||||
t.Parallel()
|
||||
client := coderdtest.New(t, nil)
|
||||
_ = coderdtest.CreateFirstUser(t, client)
|
||||
|
||||
t.Run("Success", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
ctx := testutil.Context(t, testutil.WaitMedium)
|
||||
|
||||
_, err := client.CreateUserSecret(ctx, codersdk.Me, codersdk.CreateUserSecretRequest{
|
||||
Name: "delete-me-secret",
|
||||
Value: "my-value",
|
||||
})
|
||||
require.NoError(t, err)
|
||||
|
||||
err = client.DeleteUserSecret(ctx, codersdk.Me, "delete-me-secret")
|
||||
require.NoError(t, err)
|
||||
|
||||
// Verify it's gone.
|
||||
_, err = client.UserSecretByName(ctx, codersdk.Me, "delete-me-secret")
|
||||
require.Error(t, err)
|
||||
var sdkErr *codersdk.Error
|
||||
require.ErrorAs(t, err, &sdkErr)
|
||||
assert.Equal(t, http.StatusNotFound, sdkErr.StatusCode())
|
||||
})
|
||||
|
||||
t.Run("NotFound", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
ctx := testutil.Context(t, testutil.WaitMedium)
|
||||
|
||||
err := client.DeleteUserSecret(ctx, codersdk.Me, "nonexistent")
|
||||
require.Error(t, err)
|
||||
var sdkErr *codersdk.Error
|
||||
require.ErrorAs(t, err, &sdkErr)
|
||||
assert.Equal(t, http.StatusNotFound, sdkErr.StatusCode())
|
||||
})
|
||||
}
|
||||
+19
-19
@@ -996,7 +996,7 @@ func (p *Server) CreateChat(ctx context.Context, opts CreateOptions) (database.C
|
||||
return database.Chat{}, txErr
|
||||
}
|
||||
|
||||
p.publishChatPubsubEvent(chat, codersdk.ChatWatchEventKindCreated, nil)
|
||||
p.publishChatPubsubEvent(chat, coderdpubsub.ChatEventKindCreated, nil)
|
||||
p.signalWake()
|
||||
return chat, nil
|
||||
}
|
||||
@@ -1158,7 +1158,7 @@ func (p *Server) SendMessage(
|
||||
|
||||
p.publishMessage(opts.ChatID, result.Message)
|
||||
p.publishStatus(opts.ChatID, result.Chat.Status, result.Chat.WorkerID)
|
||||
p.publishChatPubsubEvent(result.Chat, codersdk.ChatWatchEventKindStatusChange, nil)
|
||||
p.publishChatPubsubEvent(result.Chat, coderdpubsub.ChatEventKindStatusChange, nil)
|
||||
p.signalWake()
|
||||
return result, nil
|
||||
}
|
||||
@@ -1301,7 +1301,7 @@ func (p *Server) EditMessage(
|
||||
QueueUpdate: true,
|
||||
})
|
||||
p.publishStatus(opts.ChatID, result.Chat.Status, result.Chat.WorkerID)
|
||||
p.publishChatPubsubEvent(result.Chat, codersdk.ChatWatchEventKindStatusChange, nil)
|
||||
p.publishChatPubsubEvent(result.Chat, coderdpubsub.ChatEventKindStatusChange, nil)
|
||||
p.signalWake()
|
||||
|
||||
return result, nil
|
||||
@@ -1355,10 +1355,10 @@ func (p *Server) ArchiveChat(ctx context.Context, chat database.Chat) error {
|
||||
|
||||
if interrupted {
|
||||
p.publishStatus(chat.ID, statusChat.Status, statusChat.WorkerID)
|
||||
p.publishChatPubsubEvent(statusChat, codersdk.ChatWatchEventKindStatusChange, nil)
|
||||
p.publishChatPubsubEvent(statusChat, coderdpubsub.ChatEventKindStatusChange, nil)
|
||||
}
|
||||
|
||||
p.publishChatPubsubEvents(archivedChats, codersdk.ChatWatchEventKindDeleted)
|
||||
p.publishChatPubsubEvents(archivedChats, coderdpubsub.ChatEventKindDeleted)
|
||||
return nil
|
||||
}
|
||||
|
||||
@@ -1373,7 +1373,7 @@ func (p *Server) UnarchiveChat(ctx context.Context, chat database.Chat) error {
|
||||
ctx,
|
||||
chat.ID,
|
||||
"unarchive",
|
||||
codersdk.ChatWatchEventKindCreated,
|
||||
coderdpubsub.ChatEventKindCreated,
|
||||
p.db.UnarchiveChatByID,
|
||||
)
|
||||
}
|
||||
@@ -1382,7 +1382,7 @@ func (p *Server) applyChatLifecycleTransition(
|
||||
ctx context.Context,
|
||||
chatID uuid.UUID,
|
||||
action string,
|
||||
kind codersdk.ChatWatchEventKind,
|
||||
kind coderdpubsub.ChatEventKind,
|
||||
transition func(context.Context, uuid.UUID) ([]database.Chat, error),
|
||||
) error {
|
||||
updatedChats, err := transition(ctx, chatID)
|
||||
@@ -1545,7 +1545,7 @@ func (p *Server) PromoteQueued(
|
||||
})
|
||||
p.publishMessage(opts.ChatID, promoted)
|
||||
p.publishStatus(opts.ChatID, updatedChat.Status, updatedChat.WorkerID)
|
||||
p.publishChatPubsubEvent(updatedChat, codersdk.ChatWatchEventKindStatusChange, nil)
|
||||
p.publishChatPubsubEvent(updatedChat, coderdpubsub.ChatEventKindStatusChange, nil)
|
||||
p.signalWake()
|
||||
|
||||
return result, nil
|
||||
@@ -2092,7 +2092,7 @@ func (p *Server) regenerateChatTitleWithStore(
|
||||
return updatedChat, nil
|
||||
}
|
||||
|
||||
p.publishChatPubsubEvent(updatedChat, codersdk.ChatWatchEventKindTitleChange, nil)
|
||||
p.publishChatPubsubEvent(updatedChat, coderdpubsub.ChatEventKindTitleChange, nil)
|
||||
return updatedChat, nil
|
||||
}
|
||||
|
||||
@@ -2347,7 +2347,7 @@ func (p *Server) setChatWaiting(ctx context.Context, chatID uuid.UUID) (database
|
||||
return database.Chat{}, err
|
||||
}
|
||||
p.publishStatus(chatID, updatedChat.Status, updatedChat.WorkerID)
|
||||
p.publishChatPubsubEvent(updatedChat, codersdk.ChatWatchEventKindStatusChange, nil)
|
||||
p.publishChatPubsubEvent(updatedChat, coderdpubsub.ChatEventKindStatusChange, nil)
|
||||
return updatedChat, nil
|
||||
}
|
||||
|
||||
@@ -3627,7 +3627,7 @@ func (p *Server) publishChatStreamNotify(chatID uuid.UUID, notify coderdpubsub.C
|
||||
}
|
||||
|
||||
// publishChatPubsubEvents broadcasts a lifecycle event for each affected chat.
|
||||
func (p *Server) publishChatPubsubEvents(chats []database.Chat, kind codersdk.ChatWatchEventKind) {
|
||||
func (p *Server) publishChatPubsubEvents(chats []database.Chat, kind coderdpubsub.ChatEventKind) {
|
||||
for _, chat := range chats {
|
||||
p.publishChatPubsubEvent(chat, kind, nil)
|
||||
}
|
||||
@@ -3635,7 +3635,7 @@ func (p *Server) publishChatPubsubEvents(chats []database.Chat, kind codersdk.Ch
|
||||
|
||||
// publishChatPubsubEvent broadcasts a chat lifecycle event via PostgreSQL
|
||||
// pubsub so that all replicas can push updates to watching clients.
|
||||
func (p *Server) publishChatPubsubEvent(chat database.Chat, kind codersdk.ChatWatchEventKind, diffStatus *codersdk.ChatDiffStatus) {
|
||||
func (p *Server) publishChatPubsubEvent(chat database.Chat, kind coderdpubsub.ChatEventKind, diffStatus *codersdk.ChatDiffStatus) {
|
||||
if p.pubsub == nil {
|
||||
return
|
||||
}
|
||||
@@ -3647,7 +3647,7 @@ func (p *Server) publishChatPubsubEvent(chat database.Chat, kind codersdk.ChatWa
|
||||
if diffStatus != nil {
|
||||
sdkChat.DiffStatus = diffStatus
|
||||
}
|
||||
event := codersdk.ChatWatchEvent{
|
||||
event := coderdpubsub.ChatEvent{
|
||||
Kind: kind,
|
||||
Chat: sdkChat,
|
||||
}
|
||||
@@ -3659,7 +3659,7 @@ func (p *Server) publishChatPubsubEvent(chat database.Chat, kind codersdk.ChatWa
|
||||
)
|
||||
return
|
||||
}
|
||||
if err := p.pubsub.Publish(coderdpubsub.ChatWatchEventChannel(chat.OwnerID), payload); err != nil {
|
||||
if err := p.pubsub.Publish(coderdpubsub.ChatEventChannel(chat.OwnerID), payload); err != nil {
|
||||
p.logger.Error(context.Background(), "failed to publish chat pubsub event",
|
||||
slog.F("chat_id", chat.ID),
|
||||
slog.F("kind", kind),
|
||||
@@ -3692,8 +3692,8 @@ func (p *Server) publishChatActionRequired(chat database.Chat, pending []chatloo
|
||||
toolCalls := pendingToStreamToolCalls(pending)
|
||||
sdkChat := db2sdk.Chat(chat, nil, nil)
|
||||
|
||||
event := codersdk.ChatWatchEvent{
|
||||
Kind: codersdk.ChatWatchEventKindActionRequired,
|
||||
event := coderdpubsub.ChatEvent{
|
||||
Kind: coderdpubsub.ChatEventKindActionRequired,
|
||||
Chat: sdkChat,
|
||||
ToolCalls: toolCalls,
|
||||
}
|
||||
@@ -3705,7 +3705,7 @@ func (p *Server) publishChatActionRequired(chat database.Chat, pending []chatloo
|
||||
)
|
||||
return
|
||||
}
|
||||
if err := p.pubsub.Publish(coderdpubsub.ChatWatchEventChannel(chat.OwnerID), payload); err != nil {
|
||||
if err := p.pubsub.Publish(coderdpubsub.ChatEventChannel(chat.OwnerID), payload); err != nil {
|
||||
p.logger.Error(context.Background(), "failed to publish chat action_required pubsub event",
|
||||
slog.F("chat_id", chat.ID),
|
||||
slog.Error(err),
|
||||
@@ -3733,7 +3733,7 @@ func (p *Server) PublishDiffStatusChange(ctx context.Context, chatID uuid.UUID)
|
||||
}
|
||||
|
||||
sdkStatus := db2sdk.ChatDiffStatus(chatID, &dbStatus)
|
||||
p.publishChatPubsubEvent(chat, codersdk.ChatWatchEventKindDiffStatusChange, &sdkStatus)
|
||||
p.publishChatPubsubEvent(chat, coderdpubsub.ChatEventKindDiffStatusChange, &sdkStatus)
|
||||
return nil
|
||||
}
|
||||
|
||||
@@ -4215,7 +4215,7 @@ func (p *Server) processChat(ctx context.Context, chat database.Chat) {
|
||||
if title, ok := generatedTitle.Load(); ok {
|
||||
updatedChat.Title = title
|
||||
}
|
||||
p.publishChatPubsubEvent(updatedChat, codersdk.ChatWatchEventKindStatusChange, nil)
|
||||
p.publishChatPubsubEvent(updatedChat, coderdpubsub.ChatEventKindStatusChange, nil)
|
||||
|
||||
// When the chat is parked in requires_action,
|
||||
// publish the stream event and global pubsub event
|
||||
|
||||
@@ -71,14 +71,14 @@ func TestRegenerateChatTitle_PersistsAndBroadcasts(t *testing.T) {
|
||||
updatedChat.Title = wantTitle
|
||||
|
||||
messageEvents := make(chan struct {
|
||||
payload codersdk.ChatWatchEvent
|
||||
payload coderdpubsub.ChatEvent
|
||||
err error
|
||||
}, 1)
|
||||
cancelSub, err := pubsub.SubscribeWithErr(
|
||||
coderdpubsub.ChatWatchEventChannel(ownerID),
|
||||
coderdpubsub.HandleChatWatchEvent(func(_ context.Context, payload codersdk.ChatWatchEvent, err error) {
|
||||
coderdpubsub.ChatEventChannel(ownerID),
|
||||
coderdpubsub.HandleChatEvent(func(_ context.Context, payload coderdpubsub.ChatEvent, err error) {
|
||||
messageEvents <- struct {
|
||||
payload codersdk.ChatWatchEvent
|
||||
payload coderdpubsub.ChatEvent
|
||||
err error
|
||||
}{payload: payload, err: err}
|
||||
}),
|
||||
@@ -184,7 +184,7 @@ func TestRegenerateChatTitle_PersistsAndBroadcasts(t *testing.T) {
|
||||
select {
|
||||
case event := <-messageEvents:
|
||||
require.NoError(t, event.err)
|
||||
require.Equal(t, codersdk.ChatWatchEventKindTitleChange, event.payload.Kind)
|
||||
require.Equal(t, coderdpubsub.ChatEventKindTitleChange, event.payload.Kind)
|
||||
require.Equal(t, chatID, event.payload.Chat.ID)
|
||||
require.Equal(t, wantTitle, event.payload.Chat.Title)
|
||||
case <-time.After(time.Second):
|
||||
@@ -234,14 +234,14 @@ func TestRegenerateChatTitle_PersistsAndBroadcasts_IdleChatReleasesManualLock(t
|
||||
unlockedChat.StartedAt = sql.NullTime{}
|
||||
|
||||
messageEvents := make(chan struct {
|
||||
payload codersdk.ChatWatchEvent
|
||||
payload coderdpubsub.ChatEvent
|
||||
err error
|
||||
}, 1)
|
||||
cancelSub, err := pubsub.SubscribeWithErr(
|
||||
coderdpubsub.ChatWatchEventChannel(ownerID),
|
||||
coderdpubsub.HandleChatWatchEvent(func(_ context.Context, payload codersdk.ChatWatchEvent, err error) {
|
||||
coderdpubsub.ChatEventChannel(ownerID),
|
||||
coderdpubsub.HandleChatEvent(func(_ context.Context, payload coderdpubsub.ChatEvent, err error) {
|
||||
messageEvents <- struct {
|
||||
payload codersdk.ChatWatchEvent
|
||||
payload coderdpubsub.ChatEvent
|
||||
err error
|
||||
}{payload: payload, err: err}
|
||||
}),
|
||||
@@ -373,7 +373,7 @@ func TestRegenerateChatTitle_PersistsAndBroadcasts_IdleChatReleasesManualLock(t
|
||||
select {
|
||||
case event := <-messageEvents:
|
||||
require.NoError(t, event.err)
|
||||
require.Equal(t, codersdk.ChatWatchEventKindTitleChange, event.payload.Kind)
|
||||
require.Equal(t, coderdpubsub.ChatEventKindTitleChange, event.payload.Kind)
|
||||
require.Equal(t, chatID, event.payload.Chat.ID)
|
||||
require.Equal(t, wantTitle, event.payload.Chat.Title)
|
||||
case <-time.After(time.Second):
|
||||
|
||||
@@ -21,6 +21,7 @@ import (
|
||||
|
||||
"cdr.dev/slog/v3"
|
||||
"github.com/coder/coder/v2/coderd/database"
|
||||
coderdpubsub "github.com/coder/coder/v2/coderd/pubsub"
|
||||
"github.com/coder/coder/v2/coderd/x/chatd/chatprompt"
|
||||
"github.com/coder/coder/v2/coderd/x/chatd/chatprovider"
|
||||
"github.com/coder/coder/v2/coderd/x/chatd/chatretry"
|
||||
@@ -159,7 +160,7 @@ func (p *Server) maybeGenerateChatTitle(
|
||||
}
|
||||
chat.Title = title
|
||||
generatedTitle.Store(title)
|
||||
p.publishChatPubsubEvent(chat, codersdk.ChatWatchEventKindTitleChange, nil)
|
||||
p.publishChatPubsubEvent(chat, coderdpubsub.ChatEventKindTitleChange, nil)
|
||||
return
|
||||
}
|
||||
|
||||
|
||||
@@ -574,7 +574,7 @@ func (p *Server) createChildSubagentChatWithOptions(
|
||||
return database.Chat{}, xerrors.Errorf("create child chat: %w", txErr)
|
||||
}
|
||||
|
||||
p.publishChatPubsubEvent(child, codersdk.ChatWatchEventKindCreated, nil)
|
||||
p.publishChatPubsubEvent(child, coderdpubsub.ChatEventKindCreated, nil)
|
||||
p.signalWake()
|
||||
return child, nil
|
||||
}
|
||||
|
||||
@@ -127,8 +127,6 @@ type AIBridgeThread struct {
|
||||
Prompt *string `json:"prompt,omitempty"`
|
||||
Model string `json:"model"`
|
||||
Provider string `json:"provider"`
|
||||
CredentialKind string `json:"credential_kind"`
|
||||
CredentialHint string `json:"credential_hint"`
|
||||
StartedAt time.Time `json:"started_at" format:"date-time"`
|
||||
EndedAt *time.Time `json:"ended_at,omitempty" format:"date-time"`
|
||||
TokenUsage AIBridgeSessionThreadsTokenUsage `json:"token_usage"`
|
||||
|
||||
+97
-130
@@ -547,123 +547,6 @@ type UpdateChatDesktopEnabledRequest struct {
|
||||
EnableDesktop bool `json:"enable_desktop"`
|
||||
}
|
||||
|
||||
// ChatDebugLoggingAdminSettings describes the runtime admin setting
|
||||
// that allows users to opt into chat debug logging.
|
||||
type ChatDebugLoggingAdminSettings struct {
|
||||
AllowUsers bool `json:"allow_users"`
|
||||
ForcedByDeployment bool `json:"forced_by_deployment"`
|
||||
}
|
||||
|
||||
// UserChatDebugLoggingSettings describes whether debug logging is
|
||||
// active for the current user and whether the user may control it.
|
||||
type UserChatDebugLoggingSettings struct {
|
||||
DebugLoggingEnabled bool `json:"debug_logging_enabled"`
|
||||
UserToggleAllowed bool `json:"user_toggle_allowed"`
|
||||
ForcedByDeployment bool `json:"forced_by_deployment"`
|
||||
}
|
||||
|
||||
// UpdateChatDebugLoggingAllowUsersRequest is the admin request to
|
||||
// toggle whether users may opt into chat debug logging.
|
||||
type UpdateChatDebugLoggingAllowUsersRequest struct {
|
||||
AllowUsers bool `json:"allow_users"`
|
||||
}
|
||||
|
||||
// UpdateUserChatDebugLoggingRequest is the per-user request to
|
||||
// opt into or out of chat debug logging.
|
||||
type UpdateUserChatDebugLoggingRequest struct {
|
||||
DebugLoggingEnabled bool `json:"debug_logging_enabled"`
|
||||
}
|
||||
|
||||
// ChatDebugStatus enumerates the lifecycle states shared by debug
|
||||
// runs and steps. These values must match the literals used in
|
||||
// FinalizeStaleChatDebugRows and all insert/update callers.
|
||||
type ChatDebugStatus string
|
||||
|
||||
const (
|
||||
ChatDebugStatusInProgress ChatDebugStatus = "in_progress"
|
||||
ChatDebugStatusCompleted ChatDebugStatus = "completed"
|
||||
ChatDebugStatusError ChatDebugStatus = "error"
|
||||
ChatDebugStatusInterrupted ChatDebugStatus = "interrupted"
|
||||
)
|
||||
|
||||
// ChatDebugRunKind labels the operation that produced the debug
|
||||
// run. Each value corresponds to a distinct call-site in chatd.
|
||||
type ChatDebugRunKind string
|
||||
|
||||
const (
|
||||
ChatDebugRunKindChatTurn ChatDebugRunKind = "chat_turn"
|
||||
ChatDebugRunKindTitleGeneration ChatDebugRunKind = "title_generation"
|
||||
ChatDebugRunKindQuickgen ChatDebugRunKind = "quickgen"
|
||||
ChatDebugRunKindCompaction ChatDebugRunKind = "compaction"
|
||||
)
|
||||
|
||||
// ChatDebugStepOperation labels the model interaction type for a
|
||||
// debug step.
|
||||
type ChatDebugStepOperation string
|
||||
|
||||
const (
|
||||
ChatDebugStepOperationStream ChatDebugStepOperation = "stream"
|
||||
ChatDebugStepOperationGenerate ChatDebugStepOperation = "generate"
|
||||
)
|
||||
|
||||
// ChatDebugRunSummary is a lightweight run entry for list endpoints.
|
||||
type ChatDebugRunSummary struct {
|
||||
ID uuid.UUID `json:"id" format:"uuid"`
|
||||
ChatID uuid.UUID `json:"chat_id" format:"uuid"`
|
||||
Kind ChatDebugRunKind `json:"kind"`
|
||||
Status ChatDebugStatus `json:"status"`
|
||||
Provider *string `json:"provider,omitempty"`
|
||||
Model *string `json:"model,omitempty"`
|
||||
Summary map[string]any `json:"summary"`
|
||||
StartedAt time.Time `json:"started_at" format:"date-time"`
|
||||
UpdatedAt time.Time `json:"updated_at" format:"date-time"`
|
||||
FinishedAt *time.Time `json:"finished_at,omitempty" format:"date-time"`
|
||||
}
|
||||
|
||||
// ChatDebugRun is the detailed run response including steps.
|
||||
// This type is consumed by the run-detail handler added in a later
|
||||
// PR in this stack; it is forward-declared here so that all SDK
|
||||
// types live in the same schema-layer commit.
|
||||
type ChatDebugRun struct {
|
||||
ID uuid.UUID `json:"id" format:"uuid"`
|
||||
ChatID uuid.UUID `json:"chat_id" format:"uuid"`
|
||||
RootChatID *uuid.UUID `json:"root_chat_id,omitempty" format:"uuid"`
|
||||
ParentChatID *uuid.UUID `json:"parent_chat_id,omitempty" format:"uuid"`
|
||||
ModelConfigID *uuid.UUID `json:"model_config_id,omitempty" format:"uuid"`
|
||||
TriggerMessageID *int64 `json:"trigger_message_id,omitempty"`
|
||||
HistoryTipMessageID *int64 `json:"history_tip_message_id,omitempty"`
|
||||
Kind ChatDebugRunKind `json:"kind"`
|
||||
Status ChatDebugStatus `json:"status"`
|
||||
Provider *string `json:"provider,omitempty"`
|
||||
Model *string `json:"model,omitempty"`
|
||||
Summary map[string]any `json:"summary"`
|
||||
StartedAt time.Time `json:"started_at" format:"date-time"`
|
||||
UpdatedAt time.Time `json:"updated_at" format:"date-time"`
|
||||
FinishedAt *time.Time `json:"finished_at,omitempty" format:"date-time"`
|
||||
Steps []ChatDebugStep `json:"steps"`
|
||||
}
|
||||
|
||||
// ChatDebugStep is a single step within a debug run.
|
||||
type ChatDebugStep struct {
|
||||
ID uuid.UUID `json:"id" format:"uuid"`
|
||||
RunID uuid.UUID `json:"run_id" format:"uuid"`
|
||||
ChatID uuid.UUID `json:"chat_id" format:"uuid"`
|
||||
StepNumber int32 `json:"step_number"`
|
||||
Operation ChatDebugStepOperation `json:"operation"`
|
||||
Status ChatDebugStatus `json:"status"`
|
||||
HistoryTipMessageID *int64 `json:"history_tip_message_id,omitempty"`
|
||||
AssistantMessageID *int64 `json:"assistant_message_id,omitempty"`
|
||||
NormalizedRequest map[string]any `json:"normalized_request"`
|
||||
NormalizedResponse map[string]any `json:"normalized_response,omitempty"`
|
||||
Usage map[string]any `json:"usage,omitempty"`
|
||||
Attempts []map[string]any `json:"attempts"`
|
||||
Error map[string]any `json:"error,omitempty"`
|
||||
Metadata map[string]any `json:"metadata"`
|
||||
StartedAt time.Time `json:"started_at" format:"date-time"`
|
||||
UpdatedAt time.Time `json:"updated_at" format:"date-time"`
|
||||
FinishedAt *time.Time `json:"finished_at,omitempty" format:"date-time"`
|
||||
}
|
||||
|
||||
// DefaultChatWorkspaceTTL is the default TTL for chat workspaces.
|
||||
// Zero means disabled — the template's own autostop setting applies.
|
||||
const DefaultChatWorkspaceTTL = 0
|
||||
@@ -1247,6 +1130,11 @@ type ChatStreamEvent struct {
|
||||
ActionRequired *ChatStreamActionRequired `json:"action_required,omitempty"`
|
||||
}
|
||||
|
||||
type chatStreamEnvelope struct {
|
||||
Type ServerSentEventType `json:"type"`
|
||||
Data json.RawMessage `json:"data,omitempty"`
|
||||
}
|
||||
|
||||
// ChatCostSummaryOptions are optional query parameters for GetChatCostSummary.
|
||||
type ChatCostSummaryOptions struct {
|
||||
StartDate time.Time
|
||||
@@ -2099,8 +1987,8 @@ func (c *ExperimentalClient) StreamChat(ctx context.Context, chatID uuid.UUID, o
|
||||
}()
|
||||
|
||||
for {
|
||||
var batch []ChatStreamEvent
|
||||
if err := wsjson.Read(streamCtx, conn, &batch); err != nil {
|
||||
var envelope chatStreamEnvelope
|
||||
if err := wsjson.Read(streamCtx, conn, &envelope); err != nil {
|
||||
if streamCtx.Err() != nil {
|
||||
return
|
||||
}
|
||||
@@ -2117,10 +2005,61 @@ func (c *ExperimentalClient) StreamChat(ctx context.Context, chatID uuid.UUID, o
|
||||
return
|
||||
}
|
||||
|
||||
for _, event := range batch {
|
||||
if !send(event) {
|
||||
switch envelope.Type {
|
||||
case ServerSentEventTypePing:
|
||||
continue
|
||||
case ServerSentEventTypeData:
|
||||
var batch []ChatStreamEvent
|
||||
decodeErr := json.Unmarshal(envelope.Data, &batch)
|
||||
if decodeErr == nil {
|
||||
for _, streamedEvent := range batch {
|
||||
if !send(streamedEvent) {
|
||||
return
|
||||
}
|
||||
}
|
||||
continue
|
||||
}
|
||||
|
||||
{
|
||||
_ = send(ChatStreamEvent{
|
||||
Type: ChatStreamEventTypeError,
|
||||
Error: &ChatStreamError{
|
||||
Message: fmt.Sprintf(
|
||||
"decode chat stream event batch: %v",
|
||||
decodeErr,
|
||||
),
|
||||
},
|
||||
})
|
||||
return
|
||||
}
|
||||
case ServerSentEventTypeError:
|
||||
message := "chat stream returned an error"
|
||||
if len(envelope.Data) > 0 {
|
||||
var response Response
|
||||
if err := json.Unmarshal(envelope.Data, &response); err == nil {
|
||||
message = formatChatStreamResponseError(response)
|
||||
} else {
|
||||
trimmed := strings.TrimSpace(string(envelope.Data))
|
||||
if trimmed != "" {
|
||||
message = trimmed
|
||||
}
|
||||
}
|
||||
}
|
||||
_ = send(ChatStreamEvent{
|
||||
Type: ChatStreamEventTypeError,
|
||||
Error: &ChatStreamError{
|
||||
Message: message,
|
||||
},
|
||||
})
|
||||
return
|
||||
default:
|
||||
_ = send(ChatStreamEvent{
|
||||
Type: ChatStreamEventTypeError,
|
||||
Error: &ChatStreamError{
|
||||
Message: fmt.Sprintf("unknown chat stream event type %q", envelope.Type),
|
||||
},
|
||||
})
|
||||
return
|
||||
}
|
||||
}
|
||||
}()
|
||||
@@ -2159,8 +2098,8 @@ func (c *ExperimentalClient) WatchChats(ctx context.Context) (<-chan ChatWatchEv
|
||||
}()
|
||||
|
||||
for {
|
||||
var event ChatWatchEvent
|
||||
if err := wsjson.Read(streamCtx, conn, &event); err != nil {
|
||||
var envelope chatStreamEnvelope
|
||||
if err := wsjson.Read(streamCtx, conn, &envelope); err != nil {
|
||||
if streamCtx.Err() != nil {
|
||||
return
|
||||
}
|
||||
@@ -2171,10 +2110,23 @@ func (c *ExperimentalClient) WatchChats(ctx context.Context) (<-chan ChatWatchEv
|
||||
return
|
||||
}
|
||||
|
||||
select {
|
||||
case <-streamCtx.Done():
|
||||
switch envelope.Type {
|
||||
case ServerSentEventTypePing:
|
||||
continue
|
||||
case ServerSentEventTypeData:
|
||||
var event ChatWatchEvent
|
||||
if err := json.Unmarshal(envelope.Data, &event); err != nil {
|
||||
return
|
||||
}
|
||||
select {
|
||||
case <-streamCtx.Done():
|
||||
return
|
||||
case events <- event:
|
||||
}
|
||||
case ServerSentEventTypeError:
|
||||
return
|
||||
default:
|
||||
return
|
||||
case events <- event:
|
||||
}
|
||||
}
|
||||
}()
|
||||
@@ -2526,12 +2478,27 @@ func (c *ExperimentalClient) GetChatsByWorkspace(ctx context.Context, workspaceI
|
||||
return result, json.NewDecoder(res.Body).Decode(&result)
|
||||
}
|
||||
|
||||
func formatChatStreamResponseError(response Response) string {
|
||||
message := strings.TrimSpace(response.Message)
|
||||
detail := strings.TrimSpace(response.Detail)
|
||||
switch {
|
||||
case message == "" && detail == "":
|
||||
return "chat stream returned an error"
|
||||
case message == "":
|
||||
return detail
|
||||
case detail == "":
|
||||
return message
|
||||
default:
|
||||
return fmt.Sprintf("%s: %s", message, detail)
|
||||
}
|
||||
}
|
||||
|
||||
// PRInsightsResponse is the response from the PR insights endpoint.
|
||||
type PRInsightsResponse struct {
|
||||
Summary PRInsightsSummary `json:"summary"`
|
||||
TimeSeries []PRInsightsTimeSeriesEntry `json:"time_series"`
|
||||
ByModel []PRInsightsModelBreakdown `json:"by_model"`
|
||||
PullRequests []PRInsightsPullRequest `json:"recent_prs"`
|
||||
Summary PRInsightsSummary `json:"summary"`
|
||||
TimeSeries []PRInsightsTimeSeriesEntry `json:"time_series"`
|
||||
ByModel []PRInsightsModelBreakdown `json:"by_model"`
|
||||
RecentPRs []PRInsightsPullRequest `json:"recent_prs"`
|
||||
}
|
||||
|
||||
// PRInsightsSummary contains aggregate PR metrics for a time period,
|
||||
|
||||
+53
-37
@@ -3624,16 +3624,6 @@ Write out the current server config as YAML to stdout.`,
|
||||
YAML: "acquireBatchSize",
|
||||
Hidden: true, // Hidden because most operators should not need to modify this.
|
||||
},
|
||||
{
|
||||
Name: "Chat: Debug Logging Enabled",
|
||||
Description: "Force chat debug logging on for every chat, bypassing the runtime admin and user opt-in settings.",
|
||||
Flag: "chat-debug-logging-enabled",
|
||||
Env: "CODER_CHAT_DEBUG_LOGGING_ENABLED",
|
||||
Value: &c.AI.Chat.DebugLoggingEnabled,
|
||||
Default: "false",
|
||||
Group: &deploymentGroupChat,
|
||||
YAML: "debugLoggingEnabled",
|
||||
},
|
||||
// AI Bridge Options
|
||||
{
|
||||
Name: "AI Bridge Enabled",
|
||||
@@ -3650,7 +3640,7 @@ Write out the current server config as YAML to stdout.`,
|
||||
Description: "The base URL of the OpenAI API.",
|
||||
Flag: "aibridge-openai-base-url",
|
||||
Env: "CODER_AIBRIDGE_OPENAI_BASE_URL",
|
||||
Value: &c.AI.BridgeConfig.OpenAI.BaseURL,
|
||||
Value: &c.AI.BridgeConfig.LegacyOpenAI.BaseURL,
|
||||
Default: "https://api.openai.com/v1/",
|
||||
Group: &deploymentGroupAIBridge,
|
||||
YAML: "openai_base_url",
|
||||
@@ -3660,7 +3650,7 @@ Write out the current server config as YAML to stdout.`,
|
||||
Description: "The key to authenticate against the OpenAI API.",
|
||||
Flag: "aibridge-openai-key",
|
||||
Env: "CODER_AIBRIDGE_OPENAI_KEY",
|
||||
Value: &c.AI.BridgeConfig.OpenAI.Key,
|
||||
Value: &c.AI.BridgeConfig.LegacyOpenAI.Key,
|
||||
Default: "",
|
||||
Group: &deploymentGroupAIBridge,
|
||||
Annotations: serpent.Annotations{}.Mark(annotationSecretKey, "true"),
|
||||
@@ -3670,7 +3660,7 @@ Write out the current server config as YAML to stdout.`,
|
||||
Description: "The base URL of the Anthropic API.",
|
||||
Flag: "aibridge-anthropic-base-url",
|
||||
Env: "CODER_AIBRIDGE_ANTHROPIC_BASE_URL",
|
||||
Value: &c.AI.BridgeConfig.Anthropic.BaseURL,
|
||||
Value: &c.AI.BridgeConfig.LegacyAnthropic.BaseURL,
|
||||
Default: "https://api.anthropic.com/",
|
||||
Group: &deploymentGroupAIBridge,
|
||||
YAML: "anthropic_base_url",
|
||||
@@ -3680,7 +3670,7 @@ Write out the current server config as YAML to stdout.`,
|
||||
Description: "The key to authenticate against the Anthropic API.",
|
||||
Flag: "aibridge-anthropic-key",
|
||||
Env: "CODER_AIBRIDGE_ANTHROPIC_KEY",
|
||||
Value: &c.AI.BridgeConfig.Anthropic.Key,
|
||||
Value: &c.AI.BridgeConfig.LegacyAnthropic.Key,
|
||||
Default: "",
|
||||
Group: &deploymentGroupAIBridge,
|
||||
Annotations: serpent.Annotations{}.Mark(annotationSecretKey, "true"),
|
||||
@@ -3691,7 +3681,7 @@ Write out the current server config as YAML to stdout.`,
|
||||
"over CODER_AIBRIDGE_BEDROCK_REGION.",
|
||||
Flag: "aibridge-bedrock-base-url",
|
||||
Env: "CODER_AIBRIDGE_BEDROCK_BASE_URL",
|
||||
Value: &c.AI.BridgeConfig.Bedrock.BaseURL,
|
||||
Value: &c.AI.BridgeConfig.LegacyBedrock.BaseURL,
|
||||
Default: "",
|
||||
Group: &deploymentGroupAIBridge,
|
||||
YAML: "bedrock_base_url",
|
||||
@@ -3702,7 +3692,7 @@ Write out the current server config as YAML to stdout.`,
|
||||
"'https://bedrock-runtime.<region>.amazonaws.com'.",
|
||||
Flag: "aibridge-bedrock-region",
|
||||
Env: "CODER_AIBRIDGE_BEDROCK_REGION",
|
||||
Value: &c.AI.BridgeConfig.Bedrock.Region,
|
||||
Value: &c.AI.BridgeConfig.LegacyBedrock.Region,
|
||||
Default: "",
|
||||
Group: &deploymentGroupAIBridge,
|
||||
YAML: "bedrock_region",
|
||||
@@ -3712,7 +3702,7 @@ Write out the current server config as YAML to stdout.`,
|
||||
Description: "The access key to authenticate against the AWS Bedrock API.",
|
||||
Flag: "aibridge-bedrock-access-key",
|
||||
Env: "CODER_AIBRIDGE_BEDROCK_ACCESS_KEY",
|
||||
Value: &c.AI.BridgeConfig.Bedrock.AccessKey,
|
||||
Value: &c.AI.BridgeConfig.LegacyBedrock.AccessKey,
|
||||
Default: "",
|
||||
Group: &deploymentGroupAIBridge,
|
||||
Annotations: serpent.Annotations{}.Mark(annotationSecretKey, "true"),
|
||||
@@ -3722,7 +3712,7 @@ Write out the current server config as YAML to stdout.`,
|
||||
Description: "The access key secret to use with the access key to authenticate against the AWS Bedrock API.",
|
||||
Flag: "aibridge-bedrock-access-key-secret",
|
||||
Env: "CODER_AIBRIDGE_BEDROCK_ACCESS_KEY_SECRET",
|
||||
Value: &c.AI.BridgeConfig.Bedrock.AccessKeySecret,
|
||||
Value: &c.AI.BridgeConfig.LegacyBedrock.AccessKeySecret,
|
||||
Default: "",
|
||||
Group: &deploymentGroupAIBridge,
|
||||
Annotations: serpent.Annotations{}.Mark(annotationSecretKey, "true"),
|
||||
@@ -3732,7 +3722,7 @@ Write out the current server config as YAML to stdout.`,
|
||||
Description: "The model to use when making requests to the AWS Bedrock API.",
|
||||
Flag: "aibridge-bedrock-model",
|
||||
Env: "CODER_AIBRIDGE_BEDROCK_MODEL",
|
||||
Value: &c.AI.BridgeConfig.Bedrock.Model,
|
||||
Value: &c.AI.BridgeConfig.LegacyBedrock.Model,
|
||||
Default: "global.anthropic.claude-sonnet-4-5-20250929-v1:0", // See https://docs.claude.com/en/api/claude-on-amazon-bedrock#accessing-bedrock.
|
||||
Group: &deploymentGroupAIBridge,
|
||||
YAML: "bedrock_model",
|
||||
@@ -3742,7 +3732,7 @@ Write out the current server config as YAML to stdout.`,
|
||||
Description: "The small fast model to use when making requests to the AWS Bedrock API. Claude Code uses Haiku-class models to perform background tasks. See https://docs.claude.com/en/docs/claude-code/settings#environment-variables.",
|
||||
Flag: "aibridge-bedrock-small-fastmodel",
|
||||
Env: "CODER_AIBRIDGE_BEDROCK_SMALL_FAST_MODEL",
|
||||
Value: &c.AI.BridgeConfig.Bedrock.SmallFastModel,
|
||||
Value: &c.AI.BridgeConfig.LegacyBedrock.SmallFastModel,
|
||||
Default: "global.anthropic.claude-haiku-4-5-20251001-v1:0", // See https://docs.claude.com/en/api/claude-on-amazon-bedrock#accessing-bedrock.
|
||||
Group: &deploymentGroupAIBridge,
|
||||
YAML: "bedrock_small_fast_model",
|
||||
@@ -3940,17 +3930,15 @@ Write out the current server config as YAML to stdout.`,
|
||||
YAML: "key_file",
|
||||
},
|
||||
{
|
||||
Name: "AI Bridge Proxy Domain Allowlist",
|
||||
Description: "Comma-separated list of AI provider domains for which HTTPS traffic will be decrypted and routed through AI Bridge. " +
|
||||
"Requests to other domains will be tunneled directly without decryption. " +
|
||||
"Supported domains: api.anthropic.com, api.openai.com, api.individual.githubcopilot.com, api.business.githubcopilot.com, api.enterprise.githubcopilot.com, chatgpt.com.",
|
||||
Flag: "aibridge-proxy-domain-allowlist",
|
||||
Env: "CODER_AIBRIDGE_PROXY_DOMAIN_ALLOWLIST",
|
||||
Value: &c.AI.BridgeProxyConfig.DomainAllowlist,
|
||||
Default: "api.anthropic.com,api.openai.com,api.individual.githubcopilot.com,api.business.githubcopilot.com,api.enterprise.githubcopilot.com,chatgpt.com",
|
||||
Hidden: true,
|
||||
Group: &deploymentGroupAIBridgeProxy,
|
||||
YAML: "domain_allowlist",
|
||||
Name: "AI Bridge Proxy Domain Allowlist",
|
||||
Description: "Deprecated: This value is now derived automatically from the configured AI Bridge providers' base URLs. This option will be removed in a future release.",
|
||||
Flag: "aibridge-proxy-domain-allowlist",
|
||||
Env: "CODER_AIBRIDGE_PROXY_DOMAIN_ALLOWLIST",
|
||||
Value: &c.AI.BridgeProxyConfig.DomainAllowlist,
|
||||
Default: "",
|
||||
Hidden: true,
|
||||
Group: &deploymentGroupAIBridgeProxy,
|
||||
YAML: "domain_allowlist",
|
||||
},
|
||||
{
|
||||
Name: "AI Bridge Proxy Upstream Proxy",
|
||||
@@ -4047,10 +4035,16 @@ Write out the current server config as YAML to stdout.`,
|
||||
}
|
||||
|
||||
type AIBridgeConfig struct {
|
||||
Enabled serpent.Bool `json:"enabled" typescript:",notnull"`
|
||||
OpenAI AIBridgeOpenAIConfig `json:"openai" typescript:",notnull"`
|
||||
Anthropic AIBridgeAnthropicConfig `json:"anthropic" typescript:",notnull"`
|
||||
Bedrock AIBridgeBedrockConfig `json:"bedrock" typescript:",notnull"`
|
||||
Enabled serpent.Bool `json:"enabled" typescript:",notnull"`
|
||||
// Deprecated: Use Providers with indexed CODER_AIBRIDGE_PROVIDER_<N>_* env vars instead.
|
||||
LegacyOpenAI AIBridgeOpenAIConfig `json:"openai" typescript:",notnull"`
|
||||
// Deprecated: Use Providers with indexed CODER_AIBRIDGE_PROVIDER_<N>_* env vars instead.
|
||||
LegacyAnthropic AIBridgeAnthropicConfig `json:"anthropic" typescript:",notnull"`
|
||||
// Deprecated: Use Providers with indexed CODER_AIBRIDGE_PROVIDER_<N>_* env vars instead.
|
||||
LegacyBedrock AIBridgeBedrockConfig `json:"bedrock" typescript:",notnull"`
|
||||
// Providers holds provider instances populated from CODER_AIBRIDGE_PROVIDER_<N>_<KEY>
|
||||
// env vars and/or the deprecated LegacyOpenAI/LegacyAnthropic/LegacyBedrock fields above.
|
||||
Providers []AIBridgeProviderConfig `json:"providers,omitempty"`
|
||||
// Deprecated: Injected MCP in AI Bridge is deprecated and will be removed in a future release.
|
||||
InjectCoderMCPTools serpent.Bool `json:"inject_coder_mcp_tools" typescript:",notnull"`
|
||||
Retention serpent.Duration `json:"retention" typescript:",notnull"`
|
||||
@@ -4086,6 +4080,29 @@ type AIBridgeBedrockConfig struct {
|
||||
SmallFastModel serpent.String `json:"small_fast_model" typescript:",notnull"`
|
||||
}
|
||||
|
||||
// AIBridgeProviderConfig represents a single AI Bridge provider instance,
|
||||
// parsed from CODER_AIBRIDGE_PROVIDER_<N>_<KEY> environment variables.
|
||||
// This follows the same indexed pattern as ExternalAuthConfig.
|
||||
type AIBridgeProviderConfig struct {
|
||||
// Type is the provider type: "openai", "anthropic", or "copilot".
|
||||
Type string `json:"type"`
|
||||
// Name is the unique instance identifier used for routing.
|
||||
// Defaults to Type if not provided.
|
||||
Name string `json:"name"`
|
||||
// Key is the API key for authenticating with the upstream provider.
|
||||
Key string `json:"-"`
|
||||
// BaseURL is the base URL of the upstream provider API.
|
||||
BaseURL string `json:"base_url"`
|
||||
|
||||
// Bedrock fields (only applicable when Type == "anthropic").
|
||||
BedrockBaseURL string `json:"-"`
|
||||
BedrockRegion string `json:"bedrock_region,omitempty"`
|
||||
BedrockAccessKey string `json:"-"`
|
||||
BedrockAccessKeySecret string `json:"-"`
|
||||
BedrockModel string `json:"bedrock_model,omitempty"`
|
||||
BedrockSmallFastModel string `json:"bedrock_small_fast_model,omitempty"`
|
||||
}
|
||||
|
||||
type AIBridgeProxyConfig struct {
|
||||
Enabled serpent.Bool `json:"enabled" typescript:",notnull"`
|
||||
ListenAddr serpent.String `json:"listen_addr" typescript:",notnull"`
|
||||
@@ -4100,8 +4117,7 @@ type AIBridgeProxyConfig struct {
|
||||
}
|
||||
|
||||
type ChatConfig struct {
|
||||
AcquireBatchSize serpent.Int64 `json:"acquire_batch_size" typescript:",notnull"`
|
||||
DebugLoggingEnabled serpent.Bool `json:"debug_logging_enabled" typescript:",notnull"`
|
||||
AcquireBatchSize serpent.Int64 `json:"acquire_batch_size" typescript:",notnull"`
|
||||
}
|
||||
|
||||
type AIConfig struct {
|
||||
|
||||
@@ -75,49 +75,6 @@ func (req *OAuth2ClientRegistrationRequest) Validate() error {
|
||||
return nil
|
||||
}
|
||||
|
||||
// ValidateRedirectURIScheme reports whether the callback URL's scheme is
|
||||
// safe to use as a redirect target. It returns an error when the scheme
|
||||
// is empty, an unsupported URN, or one of the schemes that are dangerous
|
||||
// in browser/HTML contexts (javascript, data, file, ftp).
|
||||
//
|
||||
// Legitimate custom schemes for native apps (e.g. vscode://, jetbrains://)
|
||||
// are allowed.
|
||||
// ValidateRedirectURIScheme reports whether the callback URL's scheme is
|
||||
// safe to use as a redirect target. It returns an error when the scheme
|
||||
// is empty, an unsupported URN, or one of the schemes that are dangerous
|
||||
// in browser/HTML contexts (javascript, data, file, ftp).
|
||||
//
|
||||
// Legitimate custom schemes for native apps (e.g. vscode://, jetbrains://)
|
||||
// are allowed.
|
||||
func ValidateRedirectURIScheme(u *url.URL) error {
|
||||
return validateScheme(u)
|
||||
}
|
||||
|
||||
func validateScheme(u *url.URL) error {
|
||||
if u.Scheme == "" {
|
||||
return xerrors.New("redirect URI must have a scheme")
|
||||
}
|
||||
|
||||
// Handle special URNs (RFC 6749 section 3.1.2.1).
|
||||
if u.Scheme == "urn" {
|
||||
if u.String() == "urn:ietf:wg:oauth:2.0:oob" {
|
||||
return nil
|
||||
}
|
||||
return xerrors.New("redirect URI uses unsupported URN scheme")
|
||||
}
|
||||
|
||||
// Block dangerous schemes for security (not allowed by RFCs
|
||||
// for OAuth2).
|
||||
dangerousSchemes := []string{"javascript", "data", "file", "ftp"}
|
||||
for _, dangerous := range dangerousSchemes {
|
||||
if strings.EqualFold(u.Scheme, dangerous) {
|
||||
return xerrors.Errorf("redirect URI uses dangerous scheme %s which is not allowed", dangerous)
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// validateRedirectURIs validates redirect URIs according to RFC 7591, 8252
|
||||
func validateRedirectURIs(uris []string, tokenEndpointAuthMethod OAuth2TokenEndpointAuthMethod) error {
|
||||
if len(uris) == 0 {
|
||||
@@ -134,14 +91,27 @@ func validateRedirectURIs(uris []string, tokenEndpointAuthMethod OAuth2TokenEndp
|
||||
return xerrors.Errorf("redirect URI at index %d is not a valid URL: %w", i, err)
|
||||
}
|
||||
|
||||
if err := validateScheme(uri); err != nil {
|
||||
return xerrors.Errorf("redirect URI at index %d: %w", i, err)
|
||||
// Validate schemes according to RFC requirements
|
||||
if uri.Scheme == "" {
|
||||
return xerrors.Errorf("redirect URI at index %d must have a scheme", i)
|
||||
}
|
||||
|
||||
// The urn:ietf:wg:oauth:2.0:oob scheme passed validation
|
||||
// above but needs no further checks.
|
||||
// Handle special URNs (RFC 6749 section 3.1.2.1)
|
||||
if uri.Scheme == "urn" {
|
||||
continue
|
||||
// Allow the out-of-band redirect URI for native apps
|
||||
if uriStr == "urn:ietf:wg:oauth:2.0:oob" {
|
||||
continue // This is valid for native apps
|
||||
}
|
||||
// Other URNs are not standard for OAuth2
|
||||
return xerrors.Errorf("redirect URI at index %d uses unsupported URN scheme", i)
|
||||
}
|
||||
|
||||
// Block dangerous schemes for security (not allowed by RFCs for OAuth2)
|
||||
dangerousSchemes := []string{"javascript", "data", "file", "ftp"}
|
||||
for _, dangerous := range dangerousSchemes {
|
||||
if strings.EqualFold(uri.Scheme, dangerous) {
|
||||
return xerrors.Errorf("redirect URI at index %d uses dangerous scheme %s which is not allowed", i, dangerous)
|
||||
}
|
||||
}
|
||||
|
||||
// Determine if this is a public client based on token endpoint auth method
|
||||
|
||||
@@ -143,14 +143,13 @@ type ProvisionerJobInput struct {
|
||||
|
||||
// ProvisionerJobMetadata contains metadata for the job.
|
||||
type ProvisionerJobMetadata struct {
|
||||
TemplateVersionName string `json:"template_version_name" table:"template version name"`
|
||||
TemplateID uuid.UUID `json:"template_id" format:"uuid" table:"template id"`
|
||||
TemplateName string `json:"template_name" table:"template name"`
|
||||
TemplateDisplayName string `json:"template_display_name" table:"template display name"`
|
||||
TemplateIcon string `json:"template_icon" table:"template icon"`
|
||||
WorkspaceID *uuid.UUID `json:"workspace_id,omitempty" format:"uuid" table:"workspace id"`
|
||||
WorkspaceName string `json:"workspace_name,omitempty" table:"workspace name"`
|
||||
WorkspaceBuildTransition WorkspaceTransition `json:"workspace_build_transition,omitempty" table:"workspace build transition"`
|
||||
TemplateVersionName string `json:"template_version_name" table:"template version name"`
|
||||
TemplateID uuid.UUID `json:"template_id" format:"uuid" table:"template id"`
|
||||
TemplateName string `json:"template_name" table:"template name"`
|
||||
TemplateDisplayName string `json:"template_display_name" table:"template display name"`
|
||||
TemplateIcon string `json:"template_icon" table:"template icon"`
|
||||
WorkspaceID *uuid.UUID `json:"workspace_id,omitempty" format:"uuid" table:"workspace id"`
|
||||
WorkspaceName string `json:"workspace_name,omitempty" table:"workspace name"`
|
||||
}
|
||||
|
||||
// ProvisionerJobType represents the type of job.
|
||||
|
||||
@@ -1,10 +1,6 @@
|
||||
package codersdk
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"net/http"
|
||||
"time"
|
||||
|
||||
"github.com/google/uuid"
|
||||
@@ -43,67 +39,3 @@ type UpdateUserSecretRequest struct {
|
||||
EnvName *string `json:"env_name,omitempty"`
|
||||
FilePath *string `json:"file_path,omitempty"`
|
||||
}
|
||||
|
||||
func (c *Client) CreateUserSecret(ctx context.Context, user string, req CreateUserSecretRequest) (UserSecret, error) {
|
||||
res, err := c.Request(ctx, http.MethodPost, fmt.Sprintf("/api/v2/users/%s/secrets", user), req)
|
||||
if err != nil {
|
||||
return UserSecret{}, err
|
||||
}
|
||||
defer res.Body.Close()
|
||||
if res.StatusCode != http.StatusCreated {
|
||||
return UserSecret{}, ReadBodyAsError(res)
|
||||
}
|
||||
var secret UserSecret
|
||||
return secret, json.NewDecoder(res.Body).Decode(&secret)
|
||||
}
|
||||
|
||||
func (c *Client) UserSecrets(ctx context.Context, user string) ([]UserSecret, error) {
|
||||
res, err := c.Request(ctx, http.MethodGet, fmt.Sprintf("/api/v2/users/%s/secrets", user), nil)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer res.Body.Close()
|
||||
if res.StatusCode != http.StatusOK {
|
||||
return nil, ReadBodyAsError(res)
|
||||
}
|
||||
var secrets []UserSecret
|
||||
return secrets, json.NewDecoder(res.Body).Decode(&secrets)
|
||||
}
|
||||
|
||||
func (c *Client) UserSecretByName(ctx context.Context, user string, name string) (UserSecret, error) {
|
||||
res, err := c.Request(ctx, http.MethodGet, fmt.Sprintf("/api/v2/users/%s/secrets/%s", user, name), nil)
|
||||
if err != nil {
|
||||
return UserSecret{}, err
|
||||
}
|
||||
defer res.Body.Close()
|
||||
if res.StatusCode != http.StatusOK {
|
||||
return UserSecret{}, ReadBodyAsError(res)
|
||||
}
|
||||
var secret UserSecret
|
||||
return secret, json.NewDecoder(res.Body).Decode(&secret)
|
||||
}
|
||||
|
||||
func (c *Client) UpdateUserSecret(ctx context.Context, user string, name string, req UpdateUserSecretRequest) (UserSecret, error) {
|
||||
res, err := c.Request(ctx, http.MethodPatch, fmt.Sprintf("/api/v2/users/%s/secrets/%s", user, name), req)
|
||||
if err != nil {
|
||||
return UserSecret{}, err
|
||||
}
|
||||
defer res.Body.Close()
|
||||
if res.StatusCode != http.StatusOK {
|
||||
return UserSecret{}, ReadBodyAsError(res)
|
||||
}
|
||||
var secret UserSecret
|
||||
return secret, json.NewDecoder(res.Body).Decode(&secret)
|
||||
}
|
||||
|
||||
func (c *Client) DeleteUserSecret(ctx context.Context, user string, name string) error {
|
||||
res, err := c.Request(ctx, http.MethodDelete, fmt.Sprintf("/api/v2/users/%s/secrets/%s", user, name), nil)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer res.Body.Close()
|
||||
if res.StatusCode != http.StatusNoContent {
|
||||
return ReadBodyAsError(res)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
@@ -34,14 +34,16 @@ the most important.
|
||||
- [React](https://reactjs.org/) for the UI framework
|
||||
- [Typescript](https://www.typescriptlang.org/) to keep our sanity
|
||||
- [Vite](https://vitejs.dev/) to build the project
|
||||
- [Material V5](https://mui.com/material-ui/getting-started/) for UI components
|
||||
- [react-router](https://reactrouter.com/en/main) for routing
|
||||
- [TanStack Query](https://tanstack.com/query/v4/docs/react/overview) for
|
||||
- [TanStack Query v4](https://tanstack.com/query/v4/docs/react/overview) for
|
||||
fetching data
|
||||
- [Vitest](https://vitest.dev/) for integration testing
|
||||
- [axios](https://github.com/axios/axios) as fetching lib
|
||||
- [Playwright](https://playwright.dev/) for end-to-end (E2E) testing
|
||||
- [Jest](https://jestjs.io/) for integration testing
|
||||
- [Storybook](https://storybook.js.org/) and
|
||||
[Chromatic](https://www.chromatic.com/) for visual testing
|
||||
- [pnpm](https://pnpm.io/) as the package manager
|
||||
- [PNPM](https://pnpm.io/) as the package manager
|
||||
|
||||
## Structure
|
||||
|
||||
@@ -49,6 +51,7 @@ All UI-related code is in the `site` folder. Key directories include:
|
||||
|
||||
- **e2e** - End-to-end (E2E) tests
|
||||
- **src** - Source code
|
||||
- **mocks** - [Manual mocks](https://jestjs.io/docs/manual-mocks) used by Jest
|
||||
- **@types** - Custom types for dependencies that don't have defined types
|
||||
(largely code that has no server-side equivalent)
|
||||
- **api** - API function calls and types
|
||||
@@ -56,7 +59,7 @@ All UI-related code is in the `site` folder. Key directories include:
|
||||
- **components** - Reusable UI components without Coder specific business
|
||||
logic
|
||||
- **hooks** - Custom React hooks
|
||||
- **modules** - Coder specific logic and components related to multiple parts of the UI
|
||||
- **modules** - Coder-specific UI components
|
||||
- **pages** - Page-level components
|
||||
- **testHelpers** - Helper functions for integration testing
|
||||
- **theme** - theme configuration and color definitions
|
||||
@@ -283,9 +286,9 @@ local machine and forward the necessary ports to your workspace. At the end of
|
||||
the script, you will land _inside_ your workspace with environment variables set
|
||||
so you can simply execute the test (`pnpm run playwright:test`).
|
||||
|
||||
### Integration/Unit
|
||||
### Integration/Unit – Jest
|
||||
|
||||
We use unit and integration tests mostly for testing code that does _not_ pertain to React. Functions and classes that contain notable app logic, and which are well abstracted from React should have accompanying tests. If the logic is tightly coupled to a React component, a Storybook test or an E2E test is usually a better option.
|
||||
We use Jest mostly for testing code that does _not_ pertain to React. Functions and classes that contain notable app logic, and which are well abstracted from React should have accompanying tests. If the logic is tightly coupled to a React component, a Storybook test or an E2E test may be a better option depending on the scenario.
|
||||
|
||||
### Visual Testing – Storybook
|
||||
|
||||
|
||||
@@ -40,7 +40,7 @@ CODER_EXPERIMENTS=oauth2
|
||||
2. Click **Create Application**
|
||||
3. Fill in the application details:
|
||||
- **Name**: Your application name
|
||||
- **Callback URL**: `https://yourapp.example.com/callback` (web) or `myapp://callback` (native/desktop)
|
||||
- **Callback URL**: `https://yourapp.example.com/callback`
|
||||
- **Icon**: Optional icon URL
|
||||
|
||||
### Method 2: Management API
|
||||
@@ -251,31 +251,16 @@ Add `oauth2` to your experiment flags: `coder server --experiments oauth2`
|
||||
|
||||
Ensure the redirect URI in your request exactly matches the one registered for your application.
|
||||
|
||||
### "Invalid Callback URL" on the consent page
|
||||
|
||||
If you see this error when authorizing, the registered callback URL uses a
|
||||
blocked scheme (`javascript:`, `data:`, `file:`, or `ftp:`). Update the
|
||||
application's callback URL to a valid scheme (see
|
||||
[Callback URL schemes](#callback-url-schemes)).
|
||||
|
||||
### "PKCE verification failed"
|
||||
|
||||
Verify that the `code_verifier` used in the token request matches the one used to generate the `code_challenge`.
|
||||
|
||||
## Callback URL schemes
|
||||
|
||||
Custom URI schemes (`myapp://`, `vscode://`, `jetbrains://`, etc.) are fully supported for native and desktop applications. The OS routes the redirect back to the registered application without requiring a running HTTP server.
|
||||
|
||||
The following schemes are blocked for security reasons: `javascript:`, `data:`, `file:`, `ftp:`.
|
||||
|
||||
## Security Considerations
|
||||
|
||||
- **Use HTTPS**: Always use HTTPS in production to protect tokens in transit
|
||||
- **Implement PKCE**: PKCE is mandatory for all authorization code clients
|
||||
(public and confidential)
|
||||
- **Validate redirect URLs**: Only register trusted redirect URIs. Dangerous
|
||||
schemes (`javascript:`, `data:`, `file:`, `ftp:`) are blocked by the server,
|
||||
but custom URI schemes for native apps (`myapp://`) are permitted
|
||||
- **Validate redirect URLs**: Only register trusted redirect URIs for your applications
|
||||
- **Rotate secrets**: Periodically rotate client secrets using the management API
|
||||
|
||||
## Limitations
|
||||
|
||||
@@ -23,7 +23,6 @@ The following database fields are currently encrypted:
|
||||
- `external_auth_links.oauth_access_token`
|
||||
- `external_auth_links.oauth_refresh_token`
|
||||
- `crypto_keys.secret`
|
||||
- `user_secrets.value`
|
||||
|
||||
Additional database fields may be encrypted in the future.
|
||||
|
||||
|
||||
@@ -80,19 +80,9 @@ See [Proxy TLS Configuration](#proxy-tls-configuration) for configuration steps.
|
||||
|
||||
### Restricting proxy access
|
||||
|
||||
Requests to non-allowlisted domains are tunneled through the proxy, but connections to private and reserved IP ranges are blocked by default.
|
||||
The IP validation and TCP connect happen atomically, preventing DNS rebinding attacks where the resolved address could change between the check and the connection.
|
||||
Requests to non-allowlisted domains are tunneled through the proxy without restriction.
|
||||
To prevent unauthorized use, restrict network access to the proxy so that only authorized clients can connect.
|
||||
|
||||
In case the Coder access URL resolves to a private address, it is automatically exempt from this restriction so the proxy can always reach its own deployment.
|
||||
If you need to allow access to additional internal networks via the proxy, use the Allowlist CIDRs option ([`CODER_AIBRIDGE_PROXY_ALLOWED_PRIVATE_CIDRS`](../../../reference/cli/server.md#--aibridge-proxy-allowed-private-cidrs)):
|
||||
|
||||
```shell
|
||||
CODER_AIBRIDGE_PROXY_ALLOWED_PRIVATE_CIDRS=10.0.0.0/8,172.16.0.0/12
|
||||
# or via CLI flag:
|
||||
--aibridge-proxy-allowed-private-cidrs=10.0.0.0/8,172.16.0.0/12
|
||||
```
|
||||
|
||||
## CA Certificate
|
||||
|
||||
AI Gateway Proxy uses a CA (Certificate Authority) certificate to perform MITM interception of HTTPS traffic.
|
||||
@@ -250,11 +240,6 @@ To ensure AI Gateway also routes requests through the upstream proxy, make sure
|
||||
|
||||
<!-- TODO(ssncferreira): Add diagram showing how AI Gateway Proxy integrates with upstream proxies -->
|
||||
|
||||
> [!NOTE]
|
||||
> When an upstream proxy is configured, AI Gateway Proxy validates the destination IP before forwarding the request.
|
||||
> However, the upstream proxy re-resolves DNS independently, so a small DNS rebinding window exists between the validation and the actual connection.
|
||||
> Ensure your upstream proxy enforces its own restrictions on private and reserved IP ranges.
|
||||
|
||||
### Configuration
|
||||
|
||||
Configure the upstream proxy URL:
|
||||
|
||||
@@ -101,6 +101,67 @@ AI Gateway can relay traffic to other OpenAI- or Anthropic-compatible services o
|
||||
> [!NOTE]
|
||||
> See the [Supported APIs](./reference.md#supported-apis) section below for precise endpoint coverage and interception behavior.
|
||||
|
||||
### Multiple instances of the same provider
|
||||
|
||||
You can configure multiple instances of the same provider type — for example, to
|
||||
route different teams to separate API keys, use different base URLs per region, or
|
||||
connect to both a direct API and a proxy simultaneously. Use indexed environment
|
||||
variables following the pattern `CODER_AIBRIDGE_PROVIDER_<N>_<KEY>`:
|
||||
|
||||
```sh
|
||||
# Anthropic routed through a corporate proxy
|
||||
export CODER_AIBRIDGE_PROVIDER_0_TYPE=anthropic
|
||||
export CODER_AIBRIDGE_PROVIDER_0_NAME=anthropic-corp
|
||||
export CODER_AIBRIDGE_PROVIDER_0_KEY=sk-ant-corp-xxx
|
||||
export CODER_AIBRIDGE_PROVIDER_0_BASE_URL=https://llm-proxy.internal.example.com/anthropic
|
||||
|
||||
# Anthropic direct (for teams that need direct access)
|
||||
export CODER_AIBRIDGE_PROVIDER_1_TYPE=anthropic
|
||||
export CODER_AIBRIDGE_PROVIDER_1_NAME=anthropic-direct
|
||||
export CODER_AIBRIDGE_PROVIDER_1_KEY=sk-ant-direct-yyy
|
||||
|
||||
# Azure-hosted OpenAI deployment
|
||||
export CODER_AIBRIDGE_PROVIDER_2_TYPE=openai
|
||||
export CODER_AIBRIDGE_PROVIDER_2_NAME=azure-openai
|
||||
export CODER_AIBRIDGE_PROVIDER_2_KEY=azure-key-zzz
|
||||
export CODER_AIBRIDGE_PROVIDER_2_BASE_URL=https://my-deployment.openai.azure.com/
|
||||
|
||||
coder server
|
||||
```
|
||||
|
||||
Each provider instance gets a unique route based on its `NAME`. Clients send
|
||||
requests to `/api/v2/aibridge/<NAME>/` to target a specific instance:
|
||||
|
||||
| Instance name | Route |
|
||||
|--------------------|-----------------------------------------------------|
|
||||
| `anthropic-corp` | `/api/v2/aibridge/anthropic-corp/v1/messages` |
|
||||
| `anthropic-direct` | `/api/v2/aibridge/anthropic-direct/v1/messages` |
|
||||
| `azure-openai` | `/api/v2/aibridge/azure-openai/v1/chat/completions` |
|
||||
|
||||
**Supported keys per provider:**
|
||||
|
||||
| Key | Required | Description |
|
||||
|------------|----------|------------------------------------------------------|
|
||||
| `TYPE` | Yes | Provider type: `openai`, `anthropic`, or `copilot` |
|
||||
| `NAME` | No | Unique instance name for routing. Defaults to `TYPE` |
|
||||
| `KEY` | No | API key for upstream authentication |
|
||||
| `BASE_URL` | No | Base URL of the upstream API |
|
||||
|
||||
For `anthropic` providers using AWS Bedrock, the following keys are also
|
||||
available: `BEDROCK_BASE_URL`, `BEDROCK_REGION`, `BEDROCK_ACCESS_KEY`,
|
||||
`BEDROCK_ACCESS_KEY_SECRET`, `BEDROCK_MODEL`, `BEDROCK_SMALL_FAST_MODEL`.
|
||||
|
||||
> [!NOTE]
|
||||
> Indices must be contiguous and start at `0`. Each instance must have a unique
|
||||
> `NAME` — if two instances of the same `TYPE` omit `NAME`, they will both
|
||||
> default to the type name and fail with a duplicate name error.
|
||||
>
|
||||
> The legacy single-provider environment variables (`CODER_AIBRIDGE_OPENAI_KEY`,
|
||||
> `CODER_AIBRIDGE_ANTHROPIC_KEY`, etc.) continue to work. However, setting both
|
||||
> a legacy variable and an indexed provider with the same default name (e.g.
|
||||
> `CODER_AIBRIDGE_OPENAI_KEY` and an indexed provider named `openai`) will
|
||||
> produce a startup error — remove one or the other to resolve the conflict.
|
||||
|
||||
## Data Retention
|
||||
|
||||
AI Gateway records prompts, token usage, tool invocations, and model reasoning for auditing and
|
||||
|
||||
@@ -1526,10 +1526,6 @@
|
||||
"title": "Schemas",
|
||||
"path": "./reference/api/schemas.md"
|
||||
},
|
||||
{
|
||||
"title": "Secrets",
|
||||
"path": "./reference/api/secrets.md"
|
||||
},
|
||||
{
|
||||
"title": "Tasks",
|
||||
"path": "./reference/api/tasks.md"
|
||||
|
||||
Generated
-2
@@ -334,8 +334,6 @@ curl -X GET http://coder-server:8080/api/v2/aibridge/sessions/{session_id} \
|
||||
]
|
||||
}
|
||||
],
|
||||
"credential_hint": "string",
|
||||
"credential_kind": "string",
|
||||
"ended_at": "2019-08-24T14:15:22Z",
|
||||
"id": "497f6eca-6276-4993-bfeb-53cbbbba6f08",
|
||||
"model": "string",
|
||||
|
||||
Generated
+14
-21
@@ -60,7 +60,6 @@ curl -X GET http://coder-server:8080/api/v2/users/{user}/workspace/{workspacenam
|
||||
"template_id": "c6d67e98-83ea-49f0-8812-e4abae2b68bc",
|
||||
"template_name": "string",
|
||||
"template_version_name": "string",
|
||||
"workspace_build_transition": "start",
|
||||
"workspace_id": "0967198e-ec7b-4c6b-b4d3-f71244cadbe9",
|
||||
"workspace_name": "string"
|
||||
},
|
||||
@@ -301,7 +300,6 @@ curl -X GET http://coder-server:8080/api/v2/workspacebuilds/{workspacebuild} \
|
||||
"template_id": "c6d67e98-83ea-49f0-8812-e4abae2b68bc",
|
||||
"template_name": "string",
|
||||
"template_version_name": "string",
|
||||
"workspace_build_transition": "start",
|
||||
"workspace_id": "0967198e-ec7b-4c6b-b4d3-f71244cadbe9",
|
||||
"workspace_name": "string"
|
||||
},
|
||||
@@ -1010,7 +1008,6 @@ curl -X GET http://coder-server:8080/api/v2/workspacebuilds/{workspacebuild}/sta
|
||||
"template_id": "c6d67e98-83ea-49f0-8812-e4abae2b68bc",
|
||||
"template_name": "string",
|
||||
"template_version_name": "string",
|
||||
"workspace_build_transition": "start",
|
||||
"workspace_id": "0967198e-ec7b-4c6b-b4d3-f71244cadbe9",
|
||||
"workspace_name": "string"
|
||||
},
|
||||
@@ -1362,7 +1359,6 @@ curl -X GET http://coder-server:8080/api/v2/workspaces/{workspace}/builds \
|
||||
"template_id": "c6d67e98-83ea-49f0-8812-e4abae2b68bc",
|
||||
"template_name": "string",
|
||||
"template_version_name": "string",
|
||||
"workspace_build_transition": "start",
|
||||
"workspace_id": "0967198e-ec7b-4c6b-b4d3-f71244cadbe9",
|
||||
"workspace_name": "string"
|
||||
},
|
||||
@@ -1581,7 +1577,6 @@ Status Code **200**
|
||||
| `»»» template_id` | string(uuid) | false | | |
|
||||
| `»»» template_name` | string | false | | |
|
||||
| `»»» template_version_name` | string | false | | |
|
||||
| `»»» workspace_build_transition` | [codersdk.WorkspaceTransition](schemas.md#codersdkworkspacetransition) | false | | |
|
||||
| `»»» workspace_id` | string(uuid) | false | | |
|
||||
| `»»» workspace_name` | string | false | | |
|
||||
| `»» organization_id` | string(uuid) | false | | |
|
||||
@@ -1715,21 +1710,20 @@ Status Code **200**
|
||||
|
||||
#### Enumerated Values
|
||||
|
||||
| Property | Value(s) |
|
||||
|------------------------------|--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
|
||||
| `error_code` | `REQUIRED_TEMPLATE_VARIABLES` |
|
||||
| `workspace_build_transition` | `delete`, `start`, `stop` |
|
||||
| `status` | `canceled`, `canceling`, `connected`, `connecting`, `deleted`, `deleting`, `disconnected`, `failed`, `pending`, `running`, `starting`, `stopped`, `stopping`, `succeeded`, `timeout` |
|
||||
| `type` | `template_version_dry_run`, `template_version_import`, `workspace_build` |
|
||||
| `reason` | `autostart`, `autostop`, `initiator` |
|
||||
| `health` | `disabled`, `healthy`, `initializing`, `unhealthy` |
|
||||
| `open_in` | `slim-window`, `tab` |
|
||||
| `sharing_level` | `authenticated`, `organization`, `owner`, `public` |
|
||||
| `state` | `complete`, `failure`, `idle`, `working` |
|
||||
| `lifecycle_state` | `created`, `off`, `ready`, `shutdown_error`, `shutdown_timeout`, `shutting_down`, `start_error`, `start_timeout`, `starting` |
|
||||
| `startup_script_behavior` | `blocking`, `non-blocking` |
|
||||
| `workspace_transition` | `delete`, `start`, `stop` |
|
||||
| `transition` | `delete`, `start`, `stop` |
|
||||
| Property | Value(s) |
|
||||
|---------------------------|--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
|
||||
| `error_code` | `REQUIRED_TEMPLATE_VARIABLES` |
|
||||
| `status` | `canceled`, `canceling`, `connected`, `connecting`, `deleted`, `deleting`, `disconnected`, `failed`, `pending`, `running`, `starting`, `stopped`, `stopping`, `succeeded`, `timeout` |
|
||||
| `type` | `template_version_dry_run`, `template_version_import`, `workspace_build` |
|
||||
| `reason` | `autostart`, `autostop`, `initiator` |
|
||||
| `health` | `disabled`, `healthy`, `initializing`, `unhealthy` |
|
||||
| `open_in` | `slim-window`, `tab` |
|
||||
| `sharing_level` | `authenticated`, `organization`, `owner`, `public` |
|
||||
| `state` | `complete`, `failure`, `idle`, `working` |
|
||||
| `lifecycle_state` | `created`, `off`, `ready`, `shutdown_error`, `shutdown_timeout`, `shutting_down`, `start_error`, `start_timeout`, `starting` |
|
||||
| `startup_script_behavior` | `blocking`, `non-blocking` |
|
||||
| `workspace_transition` | `delete`, `start`, `stop` |
|
||||
| `transition` | `delete`, `start`, `stop` |
|
||||
|
||||
To perform this operation, you must be authenticated. [Learn more](authentication.md).
|
||||
|
||||
@@ -1816,7 +1810,6 @@ curl -X POST http://coder-server:8080/api/v2/workspaces/{workspace}/builds \
|
||||
"template_id": "c6d67e98-83ea-49f0-8812-e4abae2b68bc",
|
||||
"template_name": "string",
|
||||
"template_version_name": "string",
|
||||
"workspace_build_transition": "start",
|
||||
"workspace_id": "0967198e-ec7b-4c6b-b4d3-f71244cadbe9",
|
||||
"workspace_name": "string"
|
||||
},
|
||||
|
||||
Generated
+11
-2
@@ -203,14 +203,23 @@ curl -X GET http://coder-server:8080/api/v2/deployment/config \
|
||||
"base_url": "string",
|
||||
"key": "string"
|
||||
},
|
||||
"providers": [
|
||||
{
|
||||
"base_url": "string",
|
||||
"bedrock_model": "string",
|
||||
"bedrock_region": "string",
|
||||
"bedrock_small_fast_model": "string",
|
||||
"name": "string",
|
||||
"type": "string"
|
||||
}
|
||||
],
|
||||
"rate_limit": 0,
|
||||
"retention": 0,
|
||||
"send_actor_headers": true,
|
||||
"structured_logging": true
|
||||
},
|
||||
"chat": {
|
||||
"acquire_batch_size": 0,
|
||||
"debug_logging_enabled": true
|
||||
"acquire_batch_size": 0
|
||||
}
|
||||
},
|
||||
"allow_workspace_renames": true,
|
||||
|
||||
Generated
+40
-44
@@ -317,7 +317,6 @@ curl -X GET http://coder-server:8080/api/v2/organizations/{organization}/provisi
|
||||
"template_id": "c6d67e98-83ea-49f0-8812-e4abae2b68bc",
|
||||
"template_name": "string",
|
||||
"template_version_name": "string",
|
||||
"workspace_build_transition": "start",
|
||||
"workspace_id": "0967198e-ec7b-4c6b-b4d3-f71244cadbe9",
|
||||
"workspace_name": "string"
|
||||
},
|
||||
@@ -347,51 +346,49 @@ curl -X GET http://coder-server:8080/api/v2/organizations/{organization}/provisi
|
||||
|
||||
Status Code **200**
|
||||
|
||||
| Name | Type | Required | Restrictions | Description |
|
||||
|---------------------------------|------------------------------------------------------------------------------|----------|--------------|-------------|
|
||||
| `[array item]` | array | false | | |
|
||||
| `» available_workers` | array | false | | |
|
||||
| `» canceled_at` | string(date-time) | false | | |
|
||||
| `» completed_at` | string(date-time) | false | | |
|
||||
| `» created_at` | string(date-time) | false | | |
|
||||
| `» error` | string | false | | |
|
||||
| `» error_code` | [codersdk.JobErrorCode](schemas.md#codersdkjoberrorcode) | false | | |
|
||||
| `» file_id` | string(uuid) | false | | |
|
||||
| `» id` | string(uuid) | false | | |
|
||||
| `» initiator_id` | string(uuid) | false | | |
|
||||
| `» input` | [codersdk.ProvisionerJobInput](schemas.md#codersdkprovisionerjobinput) | false | | |
|
||||
| `»» error` | string | false | | |
|
||||
| `»» template_version_id` | string(uuid) | false | | |
|
||||
| `»» workspace_build_id` | string(uuid) | false | | |
|
||||
| `» logs_overflowed` | boolean | false | | |
|
||||
| `» metadata` | [codersdk.ProvisionerJobMetadata](schemas.md#codersdkprovisionerjobmetadata) | false | | |
|
||||
| `»» template_display_name` | string | false | | |
|
||||
| `»» template_icon` | string | false | | |
|
||||
| `»» template_id` | string(uuid) | false | | |
|
||||
| `»» template_name` | string | false | | |
|
||||
| `»» template_version_name` | string | false | | |
|
||||
| `»» workspace_build_transition` | [codersdk.WorkspaceTransition](schemas.md#codersdkworkspacetransition) | false | | |
|
||||
| `»» workspace_id` | string(uuid) | false | | |
|
||||
| `»» workspace_name` | string | false | | |
|
||||
| `» organization_id` | string(uuid) | false | | |
|
||||
| `» queue_position` | integer | false | | |
|
||||
| `» queue_size` | integer | false | | |
|
||||
| `» started_at` | string(date-time) | false | | |
|
||||
| `» status` | [codersdk.ProvisionerJobStatus](schemas.md#codersdkprovisionerjobstatus) | false | | |
|
||||
| `» tags` | object | false | | |
|
||||
| `»» [any property]` | string | false | | |
|
||||
| `» type` | [codersdk.ProvisionerJobType](schemas.md#codersdkprovisionerjobtype) | false | | |
|
||||
| `» worker_id` | string(uuid) | false | | |
|
||||
| `» worker_name` | string | false | | |
|
||||
| Name | Type | Required | Restrictions | Description |
|
||||
|----------------------------|------------------------------------------------------------------------------|----------|--------------|-------------|
|
||||
| `[array item]` | array | false | | |
|
||||
| `» available_workers` | array | false | | |
|
||||
| `» canceled_at` | string(date-time) | false | | |
|
||||
| `» completed_at` | string(date-time) | false | | |
|
||||
| `» created_at` | string(date-time) | false | | |
|
||||
| `» error` | string | false | | |
|
||||
| `» error_code` | [codersdk.JobErrorCode](schemas.md#codersdkjoberrorcode) | false | | |
|
||||
| `» file_id` | string(uuid) | false | | |
|
||||
| `» id` | string(uuid) | false | | |
|
||||
| `» initiator_id` | string(uuid) | false | | |
|
||||
| `» input` | [codersdk.ProvisionerJobInput](schemas.md#codersdkprovisionerjobinput) | false | | |
|
||||
| `»» error` | string | false | | |
|
||||
| `»» template_version_id` | string(uuid) | false | | |
|
||||
| `»» workspace_build_id` | string(uuid) | false | | |
|
||||
| `» logs_overflowed` | boolean | false | | |
|
||||
| `» metadata` | [codersdk.ProvisionerJobMetadata](schemas.md#codersdkprovisionerjobmetadata) | false | | |
|
||||
| `»» template_display_name` | string | false | | |
|
||||
| `»» template_icon` | string | false | | |
|
||||
| `»» template_id` | string(uuid) | false | | |
|
||||
| `»» template_name` | string | false | | |
|
||||
| `»» template_version_name` | string | false | | |
|
||||
| `»» workspace_id` | string(uuid) | false | | |
|
||||
| `»» workspace_name` | string | false | | |
|
||||
| `» organization_id` | string(uuid) | false | | |
|
||||
| `» queue_position` | integer | false | | |
|
||||
| `» queue_size` | integer | false | | |
|
||||
| `» started_at` | string(date-time) | false | | |
|
||||
| `» status` | [codersdk.ProvisionerJobStatus](schemas.md#codersdkprovisionerjobstatus) | false | | |
|
||||
| `» tags` | object | false | | |
|
||||
| `»» [any property]` | string | false | | |
|
||||
| `» type` | [codersdk.ProvisionerJobType](schemas.md#codersdkprovisionerjobtype) | false | | |
|
||||
| `» worker_id` | string(uuid) | false | | |
|
||||
| `» worker_name` | string | false | | |
|
||||
|
||||
#### Enumerated Values
|
||||
|
||||
| Property | Value(s) |
|
||||
|------------------------------|--------------------------------------------------------------------------|
|
||||
| `error_code` | `REQUIRED_TEMPLATE_VARIABLES` |
|
||||
| `workspace_build_transition` | `delete`, `start`, `stop` |
|
||||
| `status` | `canceled`, `canceling`, `failed`, `pending`, `running`, `succeeded` |
|
||||
| `type` | `template_version_dry_run`, `template_version_import`, `workspace_build` |
|
||||
| Property | Value(s) |
|
||||
|--------------|--------------------------------------------------------------------------|
|
||||
| `error_code` | `REQUIRED_TEMPLATE_VARIABLES` |
|
||||
| `status` | `canceled`, `canceling`, `failed`, `pending`, `running`, `succeeded` |
|
||||
| `type` | `template_version_dry_run`, `template_version_import`, `workspace_build` |
|
||||
|
||||
To perform this operation, you must be authenticated. [Learn more](authentication.md).
|
||||
|
||||
@@ -444,7 +441,6 @@ curl -X GET http://coder-server:8080/api/v2/organizations/{organization}/provisi
|
||||
"template_id": "c6d67e98-83ea-49f0-8812-e4abae2b68bc",
|
||||
"template_name": "string",
|
||||
"template_version_name": "string",
|
||||
"workspace_build_transition": "start",
|
||||
"workspace_id": "0967198e-ec7b-4c6b-b4d3-f71244cadbe9",
|
||||
"workspace_name": "string"
|
||||
},
|
||||
|
||||
Generated
+98
-121
@@ -455,6 +455,16 @@
|
||||
"base_url": "string",
|
||||
"key": "string"
|
||||
},
|
||||
"providers": [
|
||||
{
|
||||
"base_url": "string",
|
||||
"bedrock_model": "string",
|
||||
"bedrock_region": "string",
|
||||
"bedrock_small_fast_model": "string",
|
||||
"name": "string",
|
||||
"type": "string"
|
||||
}
|
||||
],
|
||||
"rate_limit": 0,
|
||||
"retention": 0,
|
||||
"send_actor_headers": true,
|
||||
@@ -464,23 +474,24 @@
|
||||
|
||||
### Properties
|
||||
|
||||
| Name | Type | Required | Restrictions | Description |
|
||||
|-------------------------------------|----------------------------------------------------------------------|----------|--------------|-----------------------------------------------------------------------------------------------------------------------|
|
||||
| `anthropic` | [codersdk.AIBridgeAnthropicConfig](#codersdkaibridgeanthropicconfig) | false | | |
|
||||
| `bedrock` | [codersdk.AIBridgeBedrockConfig](#codersdkaibridgebedrockconfig) | false | | |
|
||||
| `circuit_breaker_enabled` | boolean | false | | Circuit breaker protects against cascading failures from upstream AI provider rate limits (429, 503, 529 overloaded). |
|
||||
| `circuit_breaker_failure_threshold` | integer | false | | |
|
||||
| `circuit_breaker_interval` | integer | false | | |
|
||||
| `circuit_breaker_max_requests` | integer | false | | |
|
||||
| `circuit_breaker_timeout` | integer | false | | |
|
||||
| `enabled` | boolean | false | | |
|
||||
| `inject_coder_mcp_tools` | boolean | false | | Deprecated: Injected MCP in AI Bridge is deprecated and will be removed in a future release. |
|
||||
| `max_concurrency` | integer | false | | |
|
||||
| `openai` | [codersdk.AIBridgeOpenAIConfig](#codersdkaibridgeopenaiconfig) | false | | |
|
||||
| `rate_limit` | integer | false | | |
|
||||
| `retention` | integer | false | | |
|
||||
| `send_actor_headers` | boolean | false | | |
|
||||
| `structured_logging` | boolean | false | | |
|
||||
| Name | Type | Required | Restrictions | Description |
|
||||
|-------------------------------------|-----------------------------------------------------------------------------|----------|--------------|-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
|
||||
| `anthropic` | [codersdk.AIBridgeAnthropicConfig](#codersdkaibridgeanthropicconfig) | false | | Deprecated: Use Providers with indexed CODER_AIBRIDGE_PROVIDER_<N>_* env vars instead. |
|
||||
| `bedrock` | [codersdk.AIBridgeBedrockConfig](#codersdkaibridgebedrockconfig) | false | | Deprecated: Use Providers with indexed CODER_AIBRIDGE_PROVIDER_<N>_* env vars instead. |
|
||||
| `circuit_breaker_enabled` | boolean | false | | Circuit breaker protects against cascading failures from upstream AI provider rate limits (429, 503, 529 overloaded). |
|
||||
| `circuit_breaker_failure_threshold` | integer | false | | |
|
||||
| `circuit_breaker_interval` | integer | false | | |
|
||||
| `circuit_breaker_max_requests` | integer | false | | |
|
||||
| `circuit_breaker_timeout` | integer | false | | |
|
||||
| `enabled` | boolean | false | | |
|
||||
| `inject_coder_mcp_tools` | boolean | false | | Deprecated: Injected MCP in AI Bridge is deprecated and will be removed in a future release. |
|
||||
| `max_concurrency` | integer | false | | |
|
||||
| `openai` | [codersdk.AIBridgeOpenAIConfig](#codersdkaibridgeopenaiconfig) | false | | Deprecated: Use Providers with indexed CODER_AIBRIDGE_PROVIDER_<N>_* env vars instead. |
|
||||
| `providers` | array of [codersdk.AIBridgeProviderConfig](#codersdkaibridgeproviderconfig) | false | | Providers holds provider instances populated from CODER_AIBRIDGE_PROVIDER_<N>_<KEY> env vars and/or the deprecated LegacyOpenAI/LegacyAnthropic/LegacyBedrock fields above. |
|
||||
| `rate_limit` | integer | false | | |
|
||||
| `retention` | integer | false | | |
|
||||
| `send_actor_headers` | boolean | false | | |
|
||||
| `structured_logging` | boolean | false | | |
|
||||
|
||||
## codersdk.AIBridgeInterception
|
||||
|
||||
@@ -732,6 +743,30 @@
|
||||
| `base_url` | string | false | | |
|
||||
| `key` | string | false | | |
|
||||
|
||||
## codersdk.AIBridgeProviderConfig
|
||||
|
||||
```json
|
||||
{
|
||||
"base_url": "string",
|
||||
"bedrock_model": "string",
|
||||
"bedrock_region": "string",
|
||||
"bedrock_small_fast_model": "string",
|
||||
"name": "string",
|
||||
"type": "string"
|
||||
}
|
||||
```
|
||||
|
||||
### Properties
|
||||
|
||||
| Name | Type | Required | Restrictions | Description |
|
||||
|----------------------------|--------|----------|--------------|--------------------------------------------------------------------------------------------|
|
||||
| `base_url` | string | false | | Base URL is the base URL of the upstream provider API. |
|
||||
| `bedrock_model` | string | false | | |
|
||||
| `bedrock_region` | string | false | | |
|
||||
| `bedrock_small_fast_model` | string | false | | |
|
||||
| `name` | string | false | | Name is the unique instance identifier used for routing. Defaults to Type if not provided. |
|
||||
| `type` | string | false | | Type is the provider type: "openai", "anthropic", or "copilot". |
|
||||
|
||||
## codersdk.AIBridgeProxyConfig
|
||||
|
||||
```json
|
||||
@@ -884,8 +919,6 @@
|
||||
]
|
||||
}
|
||||
],
|
||||
"credential_hint": "string",
|
||||
"credential_kind": "string",
|
||||
"ended_at": "2019-08-24T14:15:22Z",
|
||||
"id": "497f6eca-6276-4993-bfeb-53cbbbba6f08",
|
||||
"model": "string",
|
||||
@@ -1021,8 +1054,6 @@
|
||||
]
|
||||
}
|
||||
],
|
||||
"credential_hint": "string",
|
||||
"credential_kind": "string",
|
||||
"ended_at": "2019-08-24T14:15:22Z",
|
||||
"id": "497f6eca-6276-4993-bfeb-53cbbbba6f08",
|
||||
"model": "string",
|
||||
@@ -1047,8 +1078,6 @@
|
||||
| Name | Type | Required | Restrictions | Description |
|
||||
|-------------------|----------------------------------------------------------------------------------------|----------|--------------|-------------|
|
||||
| `agentic_actions` | array of [codersdk.AIBridgeAgenticAction](#codersdkaibridgeagenticaction) | false | | |
|
||||
| `credential_hint` | string | false | | |
|
||||
| `credential_kind` | string | false | | |
|
||||
| `ended_at` | string | false | | |
|
||||
| `id` | string | false | | |
|
||||
| `model` | string | false | | |
|
||||
@@ -1234,14 +1263,23 @@
|
||||
"base_url": "string",
|
||||
"key": "string"
|
||||
},
|
||||
"providers": [
|
||||
{
|
||||
"base_url": "string",
|
||||
"bedrock_model": "string",
|
||||
"bedrock_region": "string",
|
||||
"bedrock_small_fast_model": "string",
|
||||
"name": "string",
|
||||
"type": "string"
|
||||
}
|
||||
],
|
||||
"rate_limit": 0,
|
||||
"retention": 0,
|
||||
"send_actor_headers": true,
|
||||
"structured_logging": true
|
||||
},
|
||||
"chat": {
|
||||
"acquire_batch_size": 0,
|
||||
"debug_logging_enabled": true
|
||||
"acquire_batch_size": 0
|
||||
}
|
||||
}
|
||||
```
|
||||
@@ -2022,17 +2060,15 @@ AuthorizationObject can represent a "set" of objects, such as: all workspaces in
|
||||
|
||||
```json
|
||||
{
|
||||
"acquire_batch_size": 0,
|
||||
"debug_logging_enabled": true
|
||||
"acquire_batch_size": 0
|
||||
}
|
||||
```
|
||||
|
||||
### Properties
|
||||
|
||||
| Name | Type | Required | Restrictions | Description |
|
||||
|-------------------------|---------|----------|--------------|-------------|
|
||||
| `acquire_batch_size` | integer | false | | |
|
||||
| `debug_logging_enabled` | boolean | false | | |
|
||||
| Name | Type | Required | Restrictions | Description |
|
||||
|----------------------|---------|----------|--------------|-------------|
|
||||
| `acquire_batch_size` | integer | false | | |
|
||||
|
||||
## codersdk.ChatRetentionDaysResponse
|
||||
|
||||
@@ -2709,28 +2745,6 @@ This is required on creation to enable a user-flow of validating a template work
|
||||
| `user_status` | [codersdk.UserStatus](#codersdkuserstatus) | false | | User status defaults to UserStatusDormant. |
|
||||
| `username` | string | true | | |
|
||||
|
||||
## codersdk.CreateUserSecretRequest
|
||||
|
||||
```json
|
||||
{
|
||||
"description": "string",
|
||||
"env_name": "string",
|
||||
"file_path": "string",
|
||||
"name": "string",
|
||||
"value": "string"
|
||||
}
|
||||
```
|
||||
|
||||
### Properties
|
||||
|
||||
| Name | Type | Required | Restrictions | Description |
|
||||
|---------------|--------|----------|--------------|-------------|
|
||||
| `description` | string | false | | |
|
||||
| `env_name` | string | false | | |
|
||||
| `file_path` | string | false | | |
|
||||
| `name` | string | false | | |
|
||||
| `value` | string | false | | |
|
||||
|
||||
## codersdk.CreateWorkspaceBuildReason
|
||||
|
||||
```json
|
||||
@@ -3258,14 +3272,23 @@ CreateWorkspaceRequest provides options for creating a new workspace. Only one o
|
||||
"base_url": "string",
|
||||
"key": "string"
|
||||
},
|
||||
"providers": [
|
||||
{
|
||||
"base_url": "string",
|
||||
"bedrock_model": "string",
|
||||
"bedrock_region": "string",
|
||||
"bedrock_small_fast_model": "string",
|
||||
"name": "string",
|
||||
"type": "string"
|
||||
}
|
||||
],
|
||||
"rate_limit": 0,
|
||||
"retention": 0,
|
||||
"send_actor_headers": true,
|
||||
"structured_logging": true
|
||||
},
|
||||
"chat": {
|
||||
"acquire_batch_size": 0,
|
||||
"debug_logging_enabled": true
|
||||
"acquire_batch_size": 0
|
||||
}
|
||||
},
|
||||
"allow_workspace_renames": true,
|
||||
@@ -3837,14 +3860,23 @@ CreateWorkspaceRequest provides options for creating a new workspace. Only one o
|
||||
"base_url": "string",
|
||||
"key": "string"
|
||||
},
|
||||
"providers": [
|
||||
{
|
||||
"base_url": "string",
|
||||
"bedrock_model": "string",
|
||||
"bedrock_region": "string",
|
||||
"bedrock_small_fast_model": "string",
|
||||
"name": "string",
|
||||
"type": "string"
|
||||
}
|
||||
],
|
||||
"rate_limit": 0,
|
||||
"retention": 0,
|
||||
"send_actor_headers": true,
|
||||
"structured_logging": true
|
||||
},
|
||||
"chat": {
|
||||
"acquire_batch_size": 0,
|
||||
"debug_logging_enabled": true
|
||||
"acquire_batch_size": 0
|
||||
}
|
||||
},
|
||||
"allow_workspace_renames": true,
|
||||
@@ -7126,7 +7158,6 @@ Only certain features set these fields: - FeatureManagedAgentLimit|
|
||||
"template_id": "c6d67e98-83ea-49f0-8812-e4abae2b68bc",
|
||||
"template_name": "string",
|
||||
"template_version_name": "string",
|
||||
"workspace_build_transition": "start",
|
||||
"workspace_id": "0967198e-ec7b-4c6b-b4d3-f71244cadbe9",
|
||||
"workspace_name": "string"
|
||||
},
|
||||
@@ -7793,7 +7824,6 @@ Only certain features set these fields: - FeatureManagedAgentLimit|
|
||||
"template_id": "c6d67e98-83ea-49f0-8812-e4abae2b68bc",
|
||||
"template_name": "string",
|
||||
"template_version_name": "string",
|
||||
"workspace_build_transition": "start",
|
||||
"workspace_id": "0967198e-ec7b-4c6b-b4d3-f71244cadbe9",
|
||||
"workspace_name": "string"
|
||||
},
|
||||
@@ -7903,7 +7933,6 @@ Only certain features set these fields: - FeatureManagedAgentLimit|
|
||||
"template_id": "c6d67e98-83ea-49f0-8812-e4abae2b68bc",
|
||||
"template_name": "string",
|
||||
"template_version_name": "string",
|
||||
"workspace_build_transition": "start",
|
||||
"workspace_id": "0967198e-ec7b-4c6b-b4d3-f71244cadbe9",
|
||||
"workspace_name": "string"
|
||||
}
|
||||
@@ -7911,16 +7940,15 @@ Only certain features set these fields: - FeatureManagedAgentLimit|
|
||||
|
||||
### Properties
|
||||
|
||||
| Name | Type | Required | Restrictions | Description |
|
||||
|------------------------------|--------------------------------------------------------------|----------|--------------|-------------|
|
||||
| `template_display_name` | string | false | | |
|
||||
| `template_icon` | string | false | | |
|
||||
| `template_id` | string | false | | |
|
||||
| `template_name` | string | false | | |
|
||||
| `template_version_name` | string | false | | |
|
||||
| `workspace_build_transition` | [codersdk.WorkspaceTransition](#codersdkworkspacetransition) | false | | |
|
||||
| `workspace_id` | string | false | | |
|
||||
| `workspace_name` | string | false | | |
|
||||
| Name | Type | Required | Restrictions | Description |
|
||||
|-------------------------|--------|----------|--------------|-------------|
|
||||
| `template_display_name` | string | false | | |
|
||||
| `template_icon` | string | false | | |
|
||||
| `template_id` | string | false | | |
|
||||
| `template_name` | string | false | | |
|
||||
| `template_version_name` | string | false | | |
|
||||
| `workspace_id` | string | false | | |
|
||||
| `workspace_name` | string | false | | |
|
||||
|
||||
## codersdk.ProvisionerJobStatus
|
||||
|
||||
@@ -8476,7 +8504,6 @@ Only certain features set these fields: - FeatureManagedAgentLimit|
|
||||
"template_id": "c6d67e98-83ea-49f0-8812-e4abae2b68bc",
|
||||
"template_name": "string",
|
||||
"template_version_name": "string",
|
||||
"workspace_build_transition": "start",
|
||||
"workspace_id": "0967198e-ec7b-4c6b-b4d3-f71244cadbe9",
|
||||
"workspace_name": "string"
|
||||
},
|
||||
@@ -10024,7 +10051,6 @@ Restarts will only happen on weekdays in this list on weeks which line up with W
|
||||
"template_id": "c6d67e98-83ea-49f0-8812-e4abae2b68bc",
|
||||
"template_name": "string",
|
||||
"template_version_name": "string",
|
||||
"workspace_build_transition": "start",
|
||||
"workspace_id": "0967198e-ec7b-4c6b-b4d3-f71244cadbe9",
|
||||
"workspace_name": "string"
|
||||
},
|
||||
@@ -10621,26 +10647,6 @@ Restarts will only happen on weekdays in this list on weeks which line up with W
|
||||
The schedule must be daily with a single time, and should have a timezone specified via a CRON_TZ prefix (otherwise UTC will be used).
|
||||
If the schedule is empty, the user will be updated to use the default schedule.|
|
||||
|
||||
## codersdk.UpdateUserSecretRequest
|
||||
|
||||
```json
|
||||
{
|
||||
"description": "string",
|
||||
"env_name": "string",
|
||||
"file_path": "string",
|
||||
"value": "string"
|
||||
}
|
||||
```
|
||||
|
||||
### Properties
|
||||
|
||||
| Name | Type | Required | Restrictions | Description |
|
||||
|---------------|--------|----------|--------------|-------------|
|
||||
| `description` | string | false | | |
|
||||
| `env_name` | string | false | | |
|
||||
| `file_path` | string | false | | |
|
||||
| `value` | string | false | | |
|
||||
|
||||
## codersdk.UpdateWorkspaceACL
|
||||
|
||||
```json
|
||||
@@ -11199,32 +11205,6 @@ If the schedule is empty, the user will be updated to use the default schedule.|
|
||||
| `user_can_set` | boolean | false | | User can set is true if the user is allowed to set their own quiet hours schedule. If false, the user cannot set a custom schedule and the default schedule will always be used. |
|
||||
| `user_set` | boolean | false | | User set is true if the user has set their own quiet hours schedule. If false, the user is using the default schedule. |
|
||||
|
||||
## codersdk.UserSecret
|
||||
|
||||
```json
|
||||
{
|
||||
"created_at": "2019-08-24T14:15:22Z",
|
||||
"description": "string",
|
||||
"env_name": "string",
|
||||
"file_path": "string",
|
||||
"id": "497f6eca-6276-4993-bfeb-53cbbbba6f08",
|
||||
"name": "string",
|
||||
"updated_at": "2019-08-24T14:15:22Z"
|
||||
}
|
||||
```
|
||||
|
||||
### Properties
|
||||
|
||||
| Name | Type | Required | Restrictions | Description |
|
||||
|---------------|--------|----------|--------------|-------------|
|
||||
| `created_at` | string | false | | |
|
||||
| `description` | string | false | | |
|
||||
| `env_name` | string | false | | |
|
||||
| `file_path` | string | false | | |
|
||||
| `id` | string | false | | |
|
||||
| `name` | string | false | | |
|
||||
| `updated_at` | string | false | | |
|
||||
|
||||
## codersdk.UserStatus
|
||||
|
||||
```json
|
||||
@@ -11415,7 +11395,6 @@ If the schedule is empty, the user will be updated to use the default schedule.|
|
||||
"template_id": "c6d67e98-83ea-49f0-8812-e4abae2b68bc",
|
||||
"template_name": "string",
|
||||
"template_version_name": "string",
|
||||
"workspace_build_transition": "start",
|
||||
"workspace_id": "0967198e-ec7b-4c6b-b4d3-f71244cadbe9",
|
||||
"workspace_name": "string"
|
||||
},
|
||||
@@ -12574,7 +12553,6 @@ If the schedule is empty, the user will be updated to use the default schedule.|
|
||||
"template_id": "c6d67e98-83ea-49f0-8812-e4abae2b68bc",
|
||||
"template_name": "string",
|
||||
"template_version_name": "string",
|
||||
"workspace_build_transition": "start",
|
||||
"workspace_id": "0967198e-ec7b-4c6b-b4d3-f71244cadbe9",
|
||||
"workspace_name": "string"
|
||||
},
|
||||
@@ -13407,7 +13385,6 @@ If the schedule is empty, the user will be updated to use the default schedule.|
|
||||
"template_id": "c6d67e98-83ea-49f0-8812-e4abae2b68bc",
|
||||
"template_name": "string",
|
||||
"template_version_name": "string",
|
||||
"workspace_build_transition": "start",
|
||||
"workspace_id": "0967198e-ec7b-4c6b-b4d3-f71244cadbe9",
|
||||
"workspace_name": "string"
|
||||
},
|
||||
|
||||
Generated
-246
@@ -1,246 +0,0 @@
|
||||
# Secrets
|
||||
|
||||
## List user secrets
|
||||
|
||||
### Code samples
|
||||
|
||||
```shell
|
||||
# Example request using curl
|
||||
curl -X GET http://coder-server:8080/api/v2/users/{user}/secrets \
|
||||
-H 'Accept: application/json' \
|
||||
-H 'Coder-Session-Token: API_KEY'
|
||||
```
|
||||
|
||||
`GET /users/{user}/secrets`
|
||||
|
||||
### Parameters
|
||||
|
||||
| Name | In | Type | Required | Description |
|
||||
|--------|------|--------|----------|--------------------------|
|
||||
| `user` | path | string | true | User ID, username, or me |
|
||||
|
||||
### Example responses
|
||||
|
||||
> 200 Response
|
||||
|
||||
```json
|
||||
[
|
||||
{
|
||||
"created_at": "2019-08-24T14:15:22Z",
|
||||
"description": "string",
|
||||
"env_name": "string",
|
||||
"file_path": "string",
|
||||
"id": "497f6eca-6276-4993-bfeb-53cbbbba6f08",
|
||||
"name": "string",
|
||||
"updated_at": "2019-08-24T14:15:22Z"
|
||||
}
|
||||
]
|
||||
```
|
||||
|
||||
### Responses
|
||||
|
||||
| Status | Meaning | Description | Schema |
|
||||
|--------|---------------------------------------------------------|-------------|---------------------------------------------------------------|
|
||||
| 200 | [OK](https://tools.ietf.org/html/rfc7231#section-6.3.1) | OK | array of [codersdk.UserSecret](schemas.md#codersdkusersecret) |
|
||||
|
||||
<h3 id="list-user-secrets-responseschema">Response Schema</h3>
|
||||
|
||||
Status Code **200**
|
||||
|
||||
| Name | Type | Required | Restrictions | Description |
|
||||
|-----------------|-------------------|----------|--------------|-------------|
|
||||
| `[array item]` | array | false | | |
|
||||
| `» created_at` | string(date-time) | false | | |
|
||||
| `» description` | string | false | | |
|
||||
| `» env_name` | string | false | | |
|
||||
| `» file_path` | string | false | | |
|
||||
| `» id` | string(uuid) | false | | |
|
||||
| `» name` | string | false | | |
|
||||
| `» updated_at` | string(date-time) | false | | |
|
||||
|
||||
To perform this operation, you must be authenticated. [Learn more](authentication.md).
|
||||
|
||||
## Create a new user secret
|
||||
|
||||
### Code samples
|
||||
|
||||
```shell
|
||||
# Example request using curl
|
||||
curl -X POST http://coder-server:8080/api/v2/users/{user}/secrets \
|
||||
-H 'Content-Type: application/json' \
|
||||
-H 'Accept: application/json' \
|
||||
-H 'Coder-Session-Token: API_KEY'
|
||||
```
|
||||
|
||||
`POST /users/{user}/secrets`
|
||||
|
||||
> Body parameter
|
||||
|
||||
```json
|
||||
{
|
||||
"description": "string",
|
||||
"env_name": "string",
|
||||
"file_path": "string",
|
||||
"name": "string",
|
||||
"value": "string"
|
||||
}
|
||||
```
|
||||
|
||||
### Parameters
|
||||
|
||||
| Name | In | Type | Required | Description |
|
||||
|--------|------|--------------------------------------------------------------------------------|----------|--------------------------|
|
||||
| `user` | path | string | true | User ID, username, or me |
|
||||
| `body` | body | [codersdk.CreateUserSecretRequest](schemas.md#codersdkcreateusersecretrequest) | true | Create secret request |
|
||||
|
||||
### Example responses
|
||||
|
||||
> 201 Response
|
||||
|
||||
```json
|
||||
{
|
||||
"created_at": "2019-08-24T14:15:22Z",
|
||||
"description": "string",
|
||||
"env_name": "string",
|
||||
"file_path": "string",
|
||||
"id": "497f6eca-6276-4993-bfeb-53cbbbba6f08",
|
||||
"name": "string",
|
||||
"updated_at": "2019-08-24T14:15:22Z"
|
||||
}
|
||||
```
|
||||
|
||||
### Responses
|
||||
|
||||
| Status | Meaning | Description | Schema |
|
||||
|--------|--------------------------------------------------------------|-------------|------------------------------------------------------|
|
||||
| 201 | [Created](https://tools.ietf.org/html/rfc7231#section-6.3.2) | Created | [codersdk.UserSecret](schemas.md#codersdkusersecret) |
|
||||
|
||||
To perform this operation, you must be authenticated. [Learn more](authentication.md).
|
||||
|
||||
## Get a user secret by name
|
||||
|
||||
### Code samples
|
||||
|
||||
```shell
|
||||
# Example request using curl
|
||||
curl -X GET http://coder-server:8080/api/v2/users/{user}/secrets/{name} \
|
||||
-H 'Accept: application/json' \
|
||||
-H 'Coder-Session-Token: API_KEY'
|
||||
```
|
||||
|
||||
`GET /users/{user}/secrets/{name}`
|
||||
|
||||
### Parameters
|
||||
|
||||
| Name | In | Type | Required | Description |
|
||||
|--------|------|--------|----------|--------------------------|
|
||||
| `user` | path | string | true | User ID, username, or me |
|
||||
| `name` | path | string | true | Secret name |
|
||||
|
||||
### Example responses
|
||||
|
||||
> 200 Response
|
||||
|
||||
```json
|
||||
{
|
||||
"created_at": "2019-08-24T14:15:22Z",
|
||||
"description": "string",
|
||||
"env_name": "string",
|
||||
"file_path": "string",
|
||||
"id": "497f6eca-6276-4993-bfeb-53cbbbba6f08",
|
||||
"name": "string",
|
||||
"updated_at": "2019-08-24T14:15:22Z"
|
||||
}
|
||||
```
|
||||
|
||||
### Responses
|
||||
|
||||
| Status | Meaning | Description | Schema |
|
||||
|--------|---------------------------------------------------------|-------------|------------------------------------------------------|
|
||||
| 200 | [OK](https://tools.ietf.org/html/rfc7231#section-6.3.1) | OK | [codersdk.UserSecret](schemas.md#codersdkusersecret) |
|
||||
|
||||
To perform this operation, you must be authenticated. [Learn more](authentication.md).
|
||||
|
||||
## Delete a user secret
|
||||
|
||||
### Code samples
|
||||
|
||||
```shell
|
||||
# Example request using curl
|
||||
curl -X DELETE http://coder-server:8080/api/v2/users/{user}/secrets/{name} \
|
||||
-H 'Coder-Session-Token: API_KEY'
|
||||
```
|
||||
|
||||
`DELETE /users/{user}/secrets/{name}`
|
||||
|
||||
### Parameters
|
||||
|
||||
| Name | In | Type | Required | Description |
|
||||
|--------|------|--------|----------|--------------------------|
|
||||
| `user` | path | string | true | User ID, username, or me |
|
||||
| `name` | path | string | true | Secret name |
|
||||
|
||||
### Responses
|
||||
|
||||
| Status | Meaning | Description | Schema |
|
||||
|--------|-----------------------------------------------------------------|-------------|--------|
|
||||
| 204 | [No Content](https://tools.ietf.org/html/rfc7231#section-6.3.5) | No Content | |
|
||||
|
||||
To perform this operation, you must be authenticated. [Learn more](authentication.md).
|
||||
|
||||
## Update a user secret
|
||||
|
||||
### Code samples
|
||||
|
||||
```shell
|
||||
# Example request using curl
|
||||
curl -X PATCH http://coder-server:8080/api/v2/users/{user}/secrets/{name} \
|
||||
-H 'Content-Type: application/json' \
|
||||
-H 'Accept: application/json' \
|
||||
-H 'Coder-Session-Token: API_KEY'
|
||||
```
|
||||
|
||||
`PATCH /users/{user}/secrets/{name}`
|
||||
|
||||
> Body parameter
|
||||
|
||||
```json
|
||||
{
|
||||
"description": "string",
|
||||
"env_name": "string",
|
||||
"file_path": "string",
|
||||
"value": "string"
|
||||
}
|
||||
```
|
||||
|
||||
### Parameters
|
||||
|
||||
| Name | In | Type | Required | Description |
|
||||
|--------|------|--------------------------------------------------------------------------------|----------|--------------------------|
|
||||
| `user` | path | string | true | User ID, username, or me |
|
||||
| `name` | path | string | true | Secret name |
|
||||
| `body` | body | [codersdk.UpdateUserSecretRequest](schemas.md#codersdkupdateusersecretrequest) | true | Update secret request |
|
||||
|
||||
### Example responses
|
||||
|
||||
> 200 Response
|
||||
|
||||
```json
|
||||
{
|
||||
"created_at": "2019-08-24T14:15:22Z",
|
||||
"description": "string",
|
||||
"env_name": "string",
|
||||
"file_path": "string",
|
||||
"id": "497f6eca-6276-4993-bfeb-53cbbbba6f08",
|
||||
"name": "string",
|
||||
"updated_at": "2019-08-24T14:15:22Z"
|
||||
}
|
||||
```
|
||||
|
||||
### Responses
|
||||
|
||||
| Status | Meaning | Description | Schema |
|
||||
|--------|---------------------------------------------------------|-------------|------------------------------------------------------|
|
||||
| 200 | [OK](https://tools.ietf.org/html/rfc7231#section-6.3.1) | OK | [codersdk.UserSecret](schemas.md#codersdkusersecret) |
|
||||
|
||||
To perform this operation, you must be authenticated. [Learn more](authentication.md).
|
||||
Generated
-2
@@ -425,7 +425,6 @@ curl -X POST http://coder-server:8080/api/v2/tasks/{user}/{task}/pause \
|
||||
"template_id": "c6d67e98-83ea-49f0-8812-e4abae2b68bc",
|
||||
"template_name": "string",
|
||||
"template_version_name": "string",
|
||||
"workspace_build_transition": "start",
|
||||
"workspace_id": "0967198e-ec7b-4c6b-b4d3-f71244cadbe9",
|
||||
"workspace_name": "string"
|
||||
},
|
||||
@@ -669,7 +668,6 @@ curl -X POST http://coder-server:8080/api/v2/tasks/{user}/{task}/resume \
|
||||
"template_id": "c6d67e98-83ea-49f0-8812-e4abae2b68bc",
|
||||
"template_name": "string",
|
||||
"template_version_name": "string",
|
||||
"workspace_build_transition": "start",
|
||||
"workspace_id": "0967198e-ec7b-4c6b-b4d3-f71244cadbe9",
|
||||
"workspace_name": "string"
|
||||
},
|
||||
|
||||
Generated
+122
-135
@@ -493,7 +493,6 @@ curl -X GET http://coder-server:8080/api/v2/organizations/{organization}/templat
|
||||
"template_id": "c6d67e98-83ea-49f0-8812-e4abae2b68bc",
|
||||
"template_name": "string",
|
||||
"template_version_name": "string",
|
||||
"workspace_build_transition": "start",
|
||||
"workspace_id": "0967198e-ec7b-4c6b-b4d3-f71244cadbe9",
|
||||
"workspace_name": "string"
|
||||
},
|
||||
@@ -596,7 +595,6 @@ curl -X GET http://coder-server:8080/api/v2/organizations/{organization}/templat
|
||||
"template_id": "c6d67e98-83ea-49f0-8812-e4abae2b68bc",
|
||||
"template_name": "string",
|
||||
"template_version_name": "string",
|
||||
"workspace_build_transition": "start",
|
||||
"workspace_id": "0967198e-ec7b-4c6b-b4d3-f71244cadbe9",
|
||||
"workspace_name": "string"
|
||||
},
|
||||
@@ -723,7 +721,6 @@ curl -X POST http://coder-server:8080/api/v2/organizations/{organization}/templa
|
||||
"template_id": "c6d67e98-83ea-49f0-8812-e4abae2b68bc",
|
||||
"template_name": "string",
|
||||
"template_version_name": "string",
|
||||
"workspace_build_transition": "start",
|
||||
"workspace_id": "0967198e-ec7b-4c6b-b4d3-f71244cadbe9",
|
||||
"workspace_name": "string"
|
||||
},
|
||||
@@ -1338,7 +1335,6 @@ curl -X GET http://coder-server:8080/api/v2/templates/{template}/versions \
|
||||
"template_id": "c6d67e98-83ea-49f0-8812-e4abae2b68bc",
|
||||
"template_name": "string",
|
||||
"template_version_name": "string",
|
||||
"workspace_build_transition": "start",
|
||||
"workspace_id": "0967198e-ec7b-4c6b-b4d3-f71244cadbe9",
|
||||
"workspace_name": "string"
|
||||
},
|
||||
@@ -1383,72 +1379,70 @@ curl -X GET http://coder-server:8080/api/v2/templates/{template}/versions \
|
||||
|
||||
Status Code **200**
|
||||
|
||||
| Name | Type | Required | Restrictions | Description |
|
||||
|----------------------------------|------------------------------------------------------------------------------|----------|--------------|---------------------------------------------------------------------------------------------------------------------------------------------------------------------|
|
||||
| `[array item]` | array | false | | |
|
||||
| `» archived` | boolean | false | | |
|
||||
| `» created_at` | string(date-time) | false | | |
|
||||
| `» created_by` | [codersdk.MinimalUser](schemas.md#codersdkminimaluser) | false | | |
|
||||
| `»» avatar_url` | string(uri) | false | | |
|
||||
| `»» id` | string(uuid) | true | | |
|
||||
| `»» name` | string | false | | |
|
||||
| `»» username` | string | true | | |
|
||||
| `» has_external_agent` | boolean | false | | |
|
||||
| `» id` | string(uuid) | false | | |
|
||||
| `» job` | [codersdk.ProvisionerJob](schemas.md#codersdkprovisionerjob) | false | | |
|
||||
| `»» available_workers` | array | false | | |
|
||||
| `»» canceled_at` | string(date-time) | false | | |
|
||||
| `»» completed_at` | string(date-time) | false | | |
|
||||
| `»» created_at` | string(date-time) | false | | |
|
||||
| `»» error` | string | false | | |
|
||||
| `»» error_code` | [codersdk.JobErrorCode](schemas.md#codersdkjoberrorcode) | false | | |
|
||||
| `»» file_id` | string(uuid) | false | | |
|
||||
| `»» id` | string(uuid) | false | | |
|
||||
| `»» initiator_id` | string(uuid) | false | | |
|
||||
| `»» input` | [codersdk.ProvisionerJobInput](schemas.md#codersdkprovisionerjobinput) | false | | |
|
||||
| `»»» error` | string | false | | |
|
||||
| `»»» template_version_id` | string(uuid) | false | | |
|
||||
| `»»» workspace_build_id` | string(uuid) | false | | |
|
||||
| `»» logs_overflowed` | boolean | false | | |
|
||||
| `»» metadata` | [codersdk.ProvisionerJobMetadata](schemas.md#codersdkprovisionerjobmetadata) | false | | |
|
||||
| `»»» template_display_name` | string | false | | |
|
||||
| `»»» template_icon` | string | false | | |
|
||||
| `»»» template_id` | string(uuid) | false | | |
|
||||
| `»»» template_name` | string | false | | |
|
||||
| `»»» template_version_name` | string | false | | |
|
||||
| `»»» workspace_build_transition` | [codersdk.WorkspaceTransition](schemas.md#codersdkworkspacetransition) | false | | |
|
||||
| `»»» workspace_id` | string(uuid) | false | | |
|
||||
| `»»» workspace_name` | string | false | | |
|
||||
| `»» organization_id` | string(uuid) | false | | |
|
||||
| `»» queue_position` | integer | false | | |
|
||||
| `»» queue_size` | integer | false | | |
|
||||
| `»» started_at` | string(date-time) | false | | |
|
||||
| `»» status` | [codersdk.ProvisionerJobStatus](schemas.md#codersdkprovisionerjobstatus) | false | | |
|
||||
| `»» tags` | object | false | | |
|
||||
| `»»» [any property]` | string | false | | |
|
||||
| `»» type` | [codersdk.ProvisionerJobType](schemas.md#codersdkprovisionerjobtype) | false | | |
|
||||
| `»» worker_id` | string(uuid) | false | | |
|
||||
| `»» worker_name` | string | false | | |
|
||||
| `» matched_provisioners` | [codersdk.MatchedProvisioners](schemas.md#codersdkmatchedprovisioners) | false | | |
|
||||
| `»» available` | integer | false | | Available is the number of provisioner daemons that are available to take jobs. This may be less than the count if some provisioners are busy or have been stopped. |
|
||||
| `»» count` | integer | false | | Count is the number of provisioner daemons that matched the given tags. If the count is 0, it means no provisioner daemons matched the requested tags. |
|
||||
| `»» most_recently_seen` | string(date-time) | false | | Most recently seen is the most recently seen time of the set of matched provisioners. If no provisioners matched, this field will be null. |
|
||||
| `» message` | string | false | | |
|
||||
| `» name` | string | false | | |
|
||||
| `» organization_id` | string(uuid) | false | | |
|
||||
| `» readme` | string | false | | |
|
||||
| `» template_id` | string(uuid) | false | | |
|
||||
| `» updated_at` | string(date-time) | false | | |
|
||||
| `» warnings` | array | false | | |
|
||||
| Name | Type | Required | Restrictions | Description |
|
||||
|-----------------------------|------------------------------------------------------------------------------|----------|--------------|---------------------------------------------------------------------------------------------------------------------------------------------------------------------|
|
||||
| `[array item]` | array | false | | |
|
||||
| `» archived` | boolean | false | | |
|
||||
| `» created_at` | string(date-time) | false | | |
|
||||
| `» created_by` | [codersdk.MinimalUser](schemas.md#codersdkminimaluser) | false | | |
|
||||
| `»» avatar_url` | string(uri) | false | | |
|
||||
| `»» id` | string(uuid) | true | | |
|
||||
| `»» name` | string | false | | |
|
||||
| `»» username` | string | true | | |
|
||||
| `» has_external_agent` | boolean | false | | |
|
||||
| `» id` | string(uuid) | false | | |
|
||||
| `» job` | [codersdk.ProvisionerJob](schemas.md#codersdkprovisionerjob) | false | | |
|
||||
| `»» available_workers` | array | false | | |
|
||||
| `»» canceled_at` | string(date-time) | false | | |
|
||||
| `»» completed_at` | string(date-time) | false | | |
|
||||
| `»» created_at` | string(date-time) | false | | |
|
||||
| `»» error` | string | false | | |
|
||||
| `»» error_code` | [codersdk.JobErrorCode](schemas.md#codersdkjoberrorcode) | false | | |
|
||||
| `»» file_id` | string(uuid) | false | | |
|
||||
| `»» id` | string(uuid) | false | | |
|
||||
| `»» initiator_id` | string(uuid) | false | | |
|
||||
| `»» input` | [codersdk.ProvisionerJobInput](schemas.md#codersdkprovisionerjobinput) | false | | |
|
||||
| `»»» error` | string | false | | |
|
||||
| `»»» template_version_id` | string(uuid) | false | | |
|
||||
| `»»» workspace_build_id` | string(uuid) | false | | |
|
||||
| `»» logs_overflowed` | boolean | false | | |
|
||||
| `»» metadata` | [codersdk.ProvisionerJobMetadata](schemas.md#codersdkprovisionerjobmetadata) | false | | |
|
||||
| `»»» template_display_name` | string | false | | |
|
||||
| `»»» template_icon` | string | false | | |
|
||||
| `»»» template_id` | string(uuid) | false | | |
|
||||
| `»»» template_name` | string | false | | |
|
||||
| `»»» template_version_name` | string | false | | |
|
||||
| `»»» workspace_id` | string(uuid) | false | | |
|
||||
| `»»» workspace_name` | string | false | | |
|
||||
| `»» organization_id` | string(uuid) | false | | |
|
||||
| `»» queue_position` | integer | false | | |
|
||||
| `»» queue_size` | integer | false | | |
|
||||
| `»» started_at` | string(date-time) | false | | |
|
||||
| `»» status` | [codersdk.ProvisionerJobStatus](schemas.md#codersdkprovisionerjobstatus) | false | | |
|
||||
| `»» tags` | object | false | | |
|
||||
| `»»» [any property]` | string | false | | |
|
||||
| `»» type` | [codersdk.ProvisionerJobType](schemas.md#codersdkprovisionerjobtype) | false | | |
|
||||
| `»» worker_id` | string(uuid) | false | | |
|
||||
| `»» worker_name` | string | false | | |
|
||||
| `» matched_provisioners` | [codersdk.MatchedProvisioners](schemas.md#codersdkmatchedprovisioners) | false | | |
|
||||
| `»» available` | integer | false | | Available is the number of provisioner daemons that are available to take jobs. This may be less than the count if some provisioners are busy or have been stopped. |
|
||||
| `»» count` | integer | false | | Count is the number of provisioner daemons that matched the given tags. If the count is 0, it means no provisioner daemons matched the requested tags. |
|
||||
| `»» most_recently_seen` | string(date-time) | false | | Most recently seen is the most recently seen time of the set of matched provisioners. If no provisioners matched, this field will be null. |
|
||||
| `» message` | string | false | | |
|
||||
| `» name` | string | false | | |
|
||||
| `» organization_id` | string(uuid) | false | | |
|
||||
| `» readme` | string | false | | |
|
||||
| `» template_id` | string(uuid) | false | | |
|
||||
| `» updated_at` | string(date-time) | false | | |
|
||||
| `» warnings` | array | false | | |
|
||||
|
||||
#### Enumerated Values
|
||||
|
||||
| Property | Value(s) |
|
||||
|------------------------------|--------------------------------------------------------------------------|
|
||||
| `error_code` | `REQUIRED_TEMPLATE_VARIABLES` |
|
||||
| `workspace_build_transition` | `delete`, `start`, `stop` |
|
||||
| `status` | `canceled`, `canceling`, `failed`, `pending`, `running`, `succeeded` |
|
||||
| `type` | `template_version_dry_run`, `template_version_import`, `workspace_build` |
|
||||
| Property | Value(s) |
|
||||
|--------------|--------------------------------------------------------------------------|
|
||||
| `error_code` | `REQUIRED_TEMPLATE_VARIABLES` |
|
||||
| `status` | `canceled`, `canceling`, `failed`, `pending`, `running`, `succeeded` |
|
||||
| `type` | `template_version_dry_run`, `template_version_import`, `workspace_build` |
|
||||
|
||||
To perform this operation, you must be authenticated. [Learn more](authentication.md).
|
||||
|
||||
@@ -1621,7 +1615,6 @@ curl -X GET http://coder-server:8080/api/v2/templates/{template}/versions/{templ
|
||||
"template_id": "c6d67e98-83ea-49f0-8812-e4abae2b68bc",
|
||||
"template_name": "string",
|
||||
"template_version_name": "string",
|
||||
"workspace_build_transition": "start",
|
||||
"workspace_id": "0967198e-ec7b-4c6b-b4d3-f71244cadbe9",
|
||||
"workspace_name": "string"
|
||||
},
|
||||
@@ -1666,72 +1659,70 @@ curl -X GET http://coder-server:8080/api/v2/templates/{template}/versions/{templ
|
||||
|
||||
Status Code **200**
|
||||
|
||||
| Name | Type | Required | Restrictions | Description |
|
||||
|----------------------------------|------------------------------------------------------------------------------|----------|--------------|---------------------------------------------------------------------------------------------------------------------------------------------------------------------|
|
||||
| `[array item]` | array | false | | |
|
||||
| `» archived` | boolean | false | | |
|
||||
| `» created_at` | string(date-time) | false | | |
|
||||
| `» created_by` | [codersdk.MinimalUser](schemas.md#codersdkminimaluser) | false | | |
|
||||
| `»» avatar_url` | string(uri) | false | | |
|
||||
| `»» id` | string(uuid) | true | | |
|
||||
| `»» name` | string | false | | |
|
||||
| `»» username` | string | true | | |
|
||||
| `» has_external_agent` | boolean | false | | |
|
||||
| `» id` | string(uuid) | false | | |
|
||||
| `» job` | [codersdk.ProvisionerJob](schemas.md#codersdkprovisionerjob) | false | | |
|
||||
| `»» available_workers` | array | false | | |
|
||||
| `»» canceled_at` | string(date-time) | false | | |
|
||||
| `»» completed_at` | string(date-time) | false | | |
|
||||
| `»» created_at` | string(date-time) | false | | |
|
||||
| `»» error` | string | false | | |
|
||||
| `»» error_code` | [codersdk.JobErrorCode](schemas.md#codersdkjoberrorcode) | false | | |
|
||||
| `»» file_id` | string(uuid) | false | | |
|
||||
| `»» id` | string(uuid) | false | | |
|
||||
| `»» initiator_id` | string(uuid) | false | | |
|
||||
| `»» input` | [codersdk.ProvisionerJobInput](schemas.md#codersdkprovisionerjobinput) | false | | |
|
||||
| `»»» error` | string | false | | |
|
||||
| `»»» template_version_id` | string(uuid) | false | | |
|
||||
| `»»» workspace_build_id` | string(uuid) | false | | |
|
||||
| `»» logs_overflowed` | boolean | false | | |
|
||||
| `»» metadata` | [codersdk.ProvisionerJobMetadata](schemas.md#codersdkprovisionerjobmetadata) | false | | |
|
||||
| `»»» template_display_name` | string | false | | |
|
||||
| `»»» template_icon` | string | false | | |
|
||||
| `»»» template_id` | string(uuid) | false | | |
|
||||
| `»»» template_name` | string | false | | |
|
||||
| `»»» template_version_name` | string | false | | |
|
||||
| `»»» workspace_build_transition` | [codersdk.WorkspaceTransition](schemas.md#codersdkworkspacetransition) | false | | |
|
||||
| `»»» workspace_id` | string(uuid) | false | | |
|
||||
| `»»» workspace_name` | string | false | | |
|
||||
| `»» organization_id` | string(uuid) | false | | |
|
||||
| `»» queue_position` | integer | false | | |
|
||||
| `»» queue_size` | integer | false | | |
|
||||
| `»» started_at` | string(date-time) | false | | |
|
||||
| `»» status` | [codersdk.ProvisionerJobStatus](schemas.md#codersdkprovisionerjobstatus) | false | | |
|
||||
| `»» tags` | object | false | | |
|
||||
| `»»» [any property]` | string | false | | |
|
||||
| `»» type` | [codersdk.ProvisionerJobType](schemas.md#codersdkprovisionerjobtype) | false | | |
|
||||
| `»» worker_id` | string(uuid) | false | | |
|
||||
| `»» worker_name` | string | false | | |
|
||||
| `» matched_provisioners` | [codersdk.MatchedProvisioners](schemas.md#codersdkmatchedprovisioners) | false | | |
|
||||
| `»» available` | integer | false | | Available is the number of provisioner daemons that are available to take jobs. This may be less than the count if some provisioners are busy or have been stopped. |
|
||||
| `»» count` | integer | false | | Count is the number of provisioner daemons that matched the given tags. If the count is 0, it means no provisioner daemons matched the requested tags. |
|
||||
| `»» most_recently_seen` | string(date-time) | false | | Most recently seen is the most recently seen time of the set of matched provisioners. If no provisioners matched, this field will be null. |
|
||||
| `» message` | string | false | | |
|
||||
| `» name` | string | false | | |
|
||||
| `» organization_id` | string(uuid) | false | | |
|
||||
| `» readme` | string | false | | |
|
||||
| `» template_id` | string(uuid) | false | | |
|
||||
| `» updated_at` | string(date-time) | false | | |
|
||||
| `» warnings` | array | false | | |
|
||||
| Name | Type | Required | Restrictions | Description |
|
||||
|-----------------------------|------------------------------------------------------------------------------|----------|--------------|---------------------------------------------------------------------------------------------------------------------------------------------------------------------|
|
||||
| `[array item]` | array | false | | |
|
||||
| `» archived` | boolean | false | | |
|
||||
| `» created_at` | string(date-time) | false | | |
|
||||
| `» created_by` | [codersdk.MinimalUser](schemas.md#codersdkminimaluser) | false | | |
|
||||
| `»» avatar_url` | string(uri) | false | | |
|
||||
| `»» id` | string(uuid) | true | | |
|
||||
| `»» name` | string | false | | |
|
||||
| `»» username` | string | true | | |
|
||||
| `» has_external_agent` | boolean | false | | |
|
||||
| `» id` | string(uuid) | false | | |
|
||||
| `» job` | [codersdk.ProvisionerJob](schemas.md#codersdkprovisionerjob) | false | | |
|
||||
| `»» available_workers` | array | false | | |
|
||||
| `»» canceled_at` | string(date-time) | false | | |
|
||||
| `»» completed_at` | string(date-time) | false | | |
|
||||
| `»» created_at` | string(date-time) | false | | |
|
||||
| `»» error` | string | false | | |
|
||||
| `»» error_code` | [codersdk.JobErrorCode](schemas.md#codersdkjoberrorcode) | false | | |
|
||||
| `»» file_id` | string(uuid) | false | | |
|
||||
| `»» id` | string(uuid) | false | | |
|
||||
| `»» initiator_id` | string(uuid) | false | | |
|
||||
| `»» input` | [codersdk.ProvisionerJobInput](schemas.md#codersdkprovisionerjobinput) | false | | |
|
||||
| `»»» error` | string | false | | |
|
||||
| `»»» template_version_id` | string(uuid) | false | | |
|
||||
| `»»» workspace_build_id` | string(uuid) | false | | |
|
||||
| `»» logs_overflowed` | boolean | false | | |
|
||||
| `»» metadata` | [codersdk.ProvisionerJobMetadata](schemas.md#codersdkprovisionerjobmetadata) | false | | |
|
||||
| `»»» template_display_name` | string | false | | |
|
||||
| `»»» template_icon` | string | false | | |
|
||||
| `»»» template_id` | string(uuid) | false | | |
|
||||
| `»»» template_name` | string | false | | |
|
||||
| `»»» template_version_name` | string | false | | |
|
||||
| `»»» workspace_id` | string(uuid) | false | | |
|
||||
| `»»» workspace_name` | string | false | | |
|
||||
| `»» organization_id` | string(uuid) | false | | |
|
||||
| `»» queue_position` | integer | false | | |
|
||||
| `»» queue_size` | integer | false | | |
|
||||
| `»» started_at` | string(date-time) | false | | |
|
||||
| `»» status` | [codersdk.ProvisionerJobStatus](schemas.md#codersdkprovisionerjobstatus) | false | | |
|
||||
| `»» tags` | object | false | | |
|
||||
| `»»» [any property]` | string | false | | |
|
||||
| `»» type` | [codersdk.ProvisionerJobType](schemas.md#codersdkprovisionerjobtype) | false | | |
|
||||
| `»» worker_id` | string(uuid) | false | | |
|
||||
| `»» worker_name` | string | false | | |
|
||||
| `» matched_provisioners` | [codersdk.MatchedProvisioners](schemas.md#codersdkmatchedprovisioners) | false | | |
|
||||
| `»» available` | integer | false | | Available is the number of provisioner daemons that are available to take jobs. This may be less than the count if some provisioners are busy or have been stopped. |
|
||||
| `»» count` | integer | false | | Count is the number of provisioner daemons that matched the given tags. If the count is 0, it means no provisioner daemons matched the requested tags. |
|
||||
| `»» most_recently_seen` | string(date-time) | false | | Most recently seen is the most recently seen time of the set of matched provisioners. If no provisioners matched, this field will be null. |
|
||||
| `» message` | string | false | | |
|
||||
| `» name` | string | false | | |
|
||||
| `» organization_id` | string(uuid) | false | | |
|
||||
| `» readme` | string | false | | |
|
||||
| `» template_id` | string(uuid) | false | | |
|
||||
| `» updated_at` | string(date-time) | false | | |
|
||||
| `» warnings` | array | false | | |
|
||||
|
||||
#### Enumerated Values
|
||||
|
||||
| Property | Value(s) |
|
||||
|------------------------------|--------------------------------------------------------------------------|
|
||||
| `error_code` | `REQUIRED_TEMPLATE_VARIABLES` |
|
||||
| `workspace_build_transition` | `delete`, `start`, `stop` |
|
||||
| `status` | `canceled`, `canceling`, `failed`, `pending`, `running`, `succeeded` |
|
||||
| `type` | `template_version_dry_run`, `template_version_import`, `workspace_build` |
|
||||
| Property | Value(s) |
|
||||
|--------------|--------------------------------------------------------------------------|
|
||||
| `error_code` | `REQUIRED_TEMPLATE_VARIABLES` |
|
||||
| `status` | `canceled`, `canceling`, `failed`, `pending`, `running`, `succeeded` |
|
||||
| `type` | `template_version_dry_run`, `template_version_import`, `workspace_build` |
|
||||
|
||||
To perform this operation, you must be authenticated. [Learn more](authentication.md).
|
||||
|
||||
@@ -1794,7 +1785,6 @@ curl -X GET http://coder-server:8080/api/v2/templateversions/{templateversion} \
|
||||
"template_id": "c6d67e98-83ea-49f0-8812-e4abae2b68bc",
|
||||
"template_name": "string",
|
||||
"template_version_name": "string",
|
||||
"workspace_build_transition": "start",
|
||||
"workspace_id": "0967198e-ec7b-4c6b-b4d3-f71244cadbe9",
|
||||
"workspace_name": "string"
|
||||
},
|
||||
@@ -1906,7 +1896,6 @@ curl -X PATCH http://coder-server:8080/api/v2/templateversions/{templateversion}
|
||||
"template_id": "c6d67e98-83ea-49f0-8812-e4abae2b68bc",
|
||||
"template_name": "string",
|
||||
"template_version_name": "string",
|
||||
"workspace_build_transition": "start",
|
||||
"workspace_id": "0967198e-ec7b-4c6b-b4d3-f71244cadbe9",
|
||||
"workspace_name": "string"
|
||||
},
|
||||
@@ -2106,7 +2095,6 @@ curl -X POST http://coder-server:8080/api/v2/templateversions/{templateversion}/
|
||||
"template_id": "c6d67e98-83ea-49f0-8812-e4abae2b68bc",
|
||||
"template_name": "string",
|
||||
"template_version_name": "string",
|
||||
"workspace_build_transition": "start",
|
||||
"workspace_id": "0967198e-ec7b-4c6b-b4d3-f71244cadbe9",
|
||||
"workspace_name": "string"
|
||||
},
|
||||
@@ -2182,7 +2170,6 @@ curl -X GET http://coder-server:8080/api/v2/templateversions/{templateversion}/d
|
||||
"template_id": "c6d67e98-83ea-49f0-8812-e4abae2b68bc",
|
||||
"template_name": "string",
|
||||
"template_version_name": "string",
|
||||
"workspace_build_transition": "start",
|
||||
"workspace_id": "0967198e-ec7b-4c6b-b4d3-f71244cadbe9",
|
||||
"workspace_name": "string"
|
||||
},
|
||||
|
||||
Generated
-6
@@ -115,7 +115,6 @@ of the template will be used.
|
||||
"template_id": "c6d67e98-83ea-49f0-8812-e4abae2b68bc",
|
||||
"template_name": "string",
|
||||
"template_version_name": "string",
|
||||
"workspace_build_transition": "start",
|
||||
"workspace_id": "0967198e-ec7b-4c6b-b4d3-f71244cadbe9",
|
||||
"workspace_name": "string"
|
||||
},
|
||||
@@ -479,7 +478,6 @@ curl -X GET http://coder-server:8080/api/v2/users/{user}/workspace/{workspacenam
|
||||
"template_id": "c6d67e98-83ea-49f0-8812-e4abae2b68bc",
|
||||
"template_name": "string",
|
||||
"template_version_name": "string",
|
||||
"workspace_build_transition": "start",
|
||||
"workspace_id": "0967198e-ec7b-4c6b-b4d3-f71244cadbe9",
|
||||
"workspace_name": "string"
|
||||
},
|
||||
@@ -810,7 +808,6 @@ of the template will be used.
|
||||
"template_id": "c6d67e98-83ea-49f0-8812-e4abae2b68bc",
|
||||
"template_name": "string",
|
||||
"template_version_name": "string",
|
||||
"workspace_build_transition": "start",
|
||||
"workspace_id": "0967198e-ec7b-4c6b-b4d3-f71244cadbe9",
|
||||
"workspace_name": "string"
|
||||
},
|
||||
@@ -1119,7 +1116,6 @@ curl -X GET http://coder-server:8080/api/v2/workspaces \
|
||||
"template_id": "c6d67e98-83ea-49f0-8812-e4abae2b68bc",
|
||||
"template_name": "string",
|
||||
"template_version_name": "string",
|
||||
"workspace_build_transition": "start",
|
||||
"workspace_id": "0967198e-ec7b-4c6b-b4d3-f71244cadbe9",
|
||||
"workspace_name": "string"
|
||||
},
|
||||
@@ -1409,7 +1405,6 @@ curl -X GET http://coder-server:8080/api/v2/workspaces/{workspace} \
|
||||
"template_id": "c6d67e98-83ea-49f0-8812-e4abae2b68bc",
|
||||
"template_name": "string",
|
||||
"template_version_name": "string",
|
||||
"workspace_build_transition": "start",
|
||||
"workspace_id": "0967198e-ec7b-4c6b-b4d3-f71244cadbe9",
|
||||
"workspace_name": "string"
|
||||
},
|
||||
@@ -1976,7 +1971,6 @@ curl -X PUT http://coder-server:8080/api/v2/workspaces/{workspace}/dormant \
|
||||
"template_id": "c6d67e98-83ea-49f0-8812-e4abae2b68bc",
|
||||
"template_name": "string",
|
||||
"template_version_name": "string",
|
||||
"workspace_build_transition": "start",
|
||||
"workspace_id": "0967198e-ec7b-4c6b-b4d3-f71244cadbe9",
|
||||
"workspace_name": "string"
|
||||
},
|
||||
|
||||
+4
-4
@@ -54,10 +54,10 @@ Select which organization (uuid or name) to use.
|
||||
|
||||
### -c, --column
|
||||
|
||||
| | |
|
||||
|---------|--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
|
||||
| Type | <code>[id\|created at\|started at\|completed at\|canceled at\|error\|error code\|status\|worker id\|worker name\|file id\|tags\|queue position\|queue size\|organization id\|initiator id\|template version id\|workspace build id\|type\|available workers\|template version name\|template id\|template name\|template display name\|template icon\|workspace id\|workspace name\|workspace build transition\|logs overflowed\|organization\|queue]</code> |
|
||||
| Default | <code>created at,id,type,template display name,status,queue,tags</code> |
|
||||
| | |
|
||||
|---------|----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
|
||||
| Type | <code>[id\|created at\|started at\|completed at\|canceled at\|error\|error code\|status\|worker id\|worker name\|file id\|tags\|queue position\|queue size\|organization id\|initiator id\|template version id\|workspace build id\|type\|available workers\|template version name\|template id\|template name\|template display name\|template icon\|workspace id\|workspace name\|logs overflowed\|organization\|queue]</code> |
|
||||
| Default | <code>created at,id,type,template display name,status,queue,tags</code> |
|
||||
|
||||
Columns to display in table output.
|
||||
|
||||
|
||||
Generated
-11
@@ -1702,17 +1702,6 @@ How often to reconcile workspace prebuilds state.
|
||||
|
||||
Hide AI tasks from the dashboard.
|
||||
|
||||
### --chat-debug-logging-enabled
|
||||
|
||||
| | |
|
||||
|-------------|------------------------------------------------|
|
||||
| Type | <code>bool</code> |
|
||||
| Environment | <code>$CODER_CHAT_DEBUG_LOGGING_ENABLED</code> |
|
||||
| YAML | <code>chat.debugLoggingEnabled</code> |
|
||||
| Default | <code>false</code> |
|
||||
|
||||
Force chat debug logging on for every chat, bypassing the runtime admin and user opt-in settings.
|
||||
|
||||
### --aibridge-enabled
|
||||
|
||||
| | |
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
# 1.93.1
|
||||
FROM rust:slim@sha256:cf09adf8c3ebaba10779e5c23ff7fe4df4cccdab8a91f199b0c142c53fef3e1a AS rust-utils
|
||||
FROM rust:slim@sha256:a08d20a404f947ed358dfb63d1ee7e0b88ecad3c45ba9682ccbf2cb09c98acca AS rust-utils
|
||||
# Install rust helper programs
|
||||
ENV CARGO_INSTALL_ROOT=/tmp/
|
||||
# Use more reliable mirrors for Debian packages
|
||||
|
||||
@@ -416,7 +416,7 @@ module "vscode-web" {
|
||||
module "jetbrains" {
|
||||
count = contains(jsondecode(data.coder_parameter.ide_choices.value), "jetbrains") ? data.coder_workspace.me.start_count : 0
|
||||
source = "dev.registry.coder.com/coder/jetbrains/coder"
|
||||
version = "1.4.0"
|
||||
version = "1.3.1"
|
||||
agent_id = coder_agent.dev.id
|
||||
agent_name = "dev"
|
||||
folder = local.repo_dir
|
||||
@@ -922,7 +922,7 @@ resource "coder_script" "boundary_config_setup" {
|
||||
module "claude-code" {
|
||||
count = data.coder_task.me.enabled ? data.coder_workspace.me.start_count : 0
|
||||
source = "dev.registry.coder.com/coder/claude-code/coder"
|
||||
version = "4.9.2"
|
||||
version = "4.9.1"
|
||||
enable_boundary = true
|
||||
agent_id = coder_agent.dev.id
|
||||
workdir = local.repo_dir
|
||||
|
||||
@@ -2093,6 +2093,64 @@ func TestUpstreamProxy(t *testing.T) {
|
||||
}
|
||||
}
|
||||
|
||||
// TestProxy_MITM_CustomProvider verifies that a non-builtin provider
|
||||
// (e.g. OpenRouter) whose domain is added to the allowlist is correctly
|
||||
// MITM'd and routed through the proxy to the bridge endpoint.
|
||||
func TestProxy_MITM_CustomProvider(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
const (
|
||||
openrouterDomain = "openrouter.ai"
|
||||
openrouterProvider = "openrouter"
|
||||
)
|
||||
|
||||
// Track what aibridged receives.
|
||||
var receivedPath, receivedBYOK string
|
||||
|
||||
// Create a mock aibridged server that captures requests.
|
||||
aibridgedServer := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
receivedPath = r.URL.Path
|
||||
receivedBYOK = r.Header.Get(agplaibridge.HeaderCoderToken)
|
||||
w.WriteHeader(http.StatusOK)
|
||||
_, _ = w.Write([]byte("hello from aibridged"))
|
||||
}))
|
||||
t.Cleanup(aibridgedServer.Close)
|
||||
|
||||
// Wire the custom domain and provider mapping directly, as the
|
||||
// real daemon would after calling domainsFromProviders.
|
||||
srv := newTestProxy(t,
|
||||
withCoderAccessURL(aibridgedServer.URL),
|
||||
withDomainAllowlist(openrouterDomain),
|
||||
withAIBridgeProviderFromHost(func(host string) string {
|
||||
if host == openrouterDomain {
|
||||
return openrouterProvider
|
||||
}
|
||||
return ""
|
||||
}),
|
||||
)
|
||||
|
||||
certPool := getProxyCertPool(t)
|
||||
client := newProxyClient(t, srv, makeProxyAuthHeader("coder-token"), certPool, false)
|
||||
req, err := http.NewRequestWithContext(t.Context(), http.MethodPost, "https://"+openrouterDomain+"/api/v1/chat/completions", strings.NewReader(`{}`))
|
||||
require.NoError(t, err)
|
||||
req.Header.Set("Content-Type", "application/json")
|
||||
req.Header.Set("Authorization", "Bearer user-llm-token")
|
||||
|
||||
resp, err := client.Do(req)
|
||||
require.NoError(t, err)
|
||||
defer resp.Body.Close()
|
||||
|
||||
body, err := io.ReadAll(resp.Body)
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, http.StatusOK, resp.StatusCode)
|
||||
require.Equal(t, "hello from aibridged", string(body))
|
||||
|
||||
// The proxy should route through the aibridge path using the custom
|
||||
// provider name.
|
||||
require.Equal(t, "/api/v2/aibridge/"+openrouterProvider+"/api/v1/chat/completions", receivedPath)
|
||||
require.Equal(t, "coder-token", receivedBYOK)
|
||||
}
|
||||
|
||||
func TestProxy_PrivateIPBlocking(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
|
||||
+138
-51
@@ -17,61 +17,11 @@ import (
|
||||
"github.com/coder/coder/v2/enterprise/coderd"
|
||||
)
|
||||
|
||||
func newAIBridgeDaemon(coderAPI *coderd.API) (*aibridged.Server, error) {
|
||||
func newAIBridgeDaemon(coderAPI *coderd.API, providers []aibridge.Provider) (*aibridged.Server, error) {
|
||||
ctx := context.Background()
|
||||
coderAPI.Logger.Debug(ctx, "starting in-memory aibridge daemon")
|
||||
|
||||
logger := coderAPI.Logger.Named("aibridged")
|
||||
cfg := coderAPI.DeploymentValues.AI.BridgeConfig
|
||||
|
||||
// Build circuit breaker config if enabled.
|
||||
var cbConfig *config.CircuitBreaker
|
||||
if cfg.CircuitBreakerEnabled.Value() {
|
||||
cbConfig = &config.CircuitBreaker{
|
||||
FailureThreshold: uint32(cfg.CircuitBreakerFailureThreshold.Value()), //nolint:gosec // Validated by serpent.Validate in deployment options.
|
||||
Interval: cfg.CircuitBreakerInterval.Value(),
|
||||
Timeout: cfg.CircuitBreakerTimeout.Value(),
|
||||
MaxRequests: uint32(cfg.CircuitBreakerMaxRequests.Value()), //nolint:gosec // Validated by serpent.Validate in deployment options.
|
||||
}
|
||||
}
|
||||
|
||||
// Setup supported providers with circuit breaker config.
|
||||
providers := []aibridge.Provider{
|
||||
aibridge.NewOpenAIProvider(aibridge.OpenAIConfig{
|
||||
Name: aibridge.ProviderOpenAI,
|
||||
BaseURL: cfg.OpenAI.BaseURL.String(),
|
||||
Key: cfg.OpenAI.Key.String(),
|
||||
CircuitBreaker: cbConfig,
|
||||
SendActorHeaders: cfg.SendActorHeaders.Value(),
|
||||
}),
|
||||
aibridge.NewAnthropicProvider(aibridge.AnthropicConfig{
|
||||
Name: aibridge.ProviderAnthropic,
|
||||
BaseURL: cfg.Anthropic.BaseURL.String(),
|
||||
Key: cfg.Anthropic.Key.String(),
|
||||
CircuitBreaker: cbConfig,
|
||||
SendActorHeaders: cfg.SendActorHeaders.Value(),
|
||||
}, getBedrockConfig(cfg.Bedrock)),
|
||||
aibridge.NewCopilotProvider(aibridge.CopilotConfig{
|
||||
Name: aibridge.ProviderCopilot,
|
||||
CircuitBreaker: cbConfig,
|
||||
}),
|
||||
aibridge.NewCopilotProvider(aibridge.CopilotConfig{
|
||||
Name: agplaibridge.ProviderCopilotBusiness,
|
||||
BaseURL: "https://" + agplaibridge.HostCopilotBusiness,
|
||||
CircuitBreaker: cbConfig,
|
||||
}),
|
||||
aibridge.NewCopilotProvider(aibridge.CopilotConfig{
|
||||
Name: agplaibridge.ProviderCopilotEnterprise,
|
||||
BaseURL: "https://" + agplaibridge.HostCopilotEnterprise,
|
||||
CircuitBreaker: cbConfig,
|
||||
}),
|
||||
aibridge.NewOpenAIProvider(aibridge.OpenAIConfig{
|
||||
Name: agplaibridge.ProviderChatGPT,
|
||||
BaseURL: agplaibridge.BaseURLChatGPT,
|
||||
CircuitBreaker: cbConfig,
|
||||
SendActorHeaders: cfg.SendActorHeaders.Value(),
|
||||
}),
|
||||
}
|
||||
|
||||
reg := prometheus.WrapRegistererWithPrefix("coder_aibridged_", coderAPI.PrometheusRegistry)
|
||||
metrics := aibridge.NewMetrics(reg)
|
||||
@@ -93,6 +43,143 @@ func newAIBridgeDaemon(coderAPI *coderd.API) (*aibridged.Server, error) {
|
||||
return srv, nil
|
||||
}
|
||||
|
||||
// buildProviders constructs the list of aibridge providers from config.
|
||||
// It merges legacy single-provider env vars, indexed provider configs, and
|
||||
// built-in providers (copilot variants, chatgpt):
|
||||
// 1. Legacy providers (from CODER_AIBRIDGE_OPENAI_KEY, etc.) are added first.
|
||||
// If a legacy name conflicts with an indexed provider, startup fails with
|
||||
// a clear error asking the admin to remove one or the other.
|
||||
// 2. Indexed providers (from CODER_AIBRIDGE_PROVIDER_<N>_*) are added next.
|
||||
// 3. Built-in providers are always added unless their name is already claimed.
|
||||
func buildProviders(cfg codersdk.AIBridgeConfig) ([]aibridge.Provider, error) {
|
||||
var cbConfig *config.CircuitBreaker
|
||||
if cfg.CircuitBreakerEnabled.Value() {
|
||||
cbConfig = &config.CircuitBreaker{
|
||||
FailureThreshold: uint32(cfg.CircuitBreakerFailureThreshold.Value()), //nolint:gosec // Validated by serpent.Validate in deployment options.
|
||||
Interval: cfg.CircuitBreakerInterval.Value(),
|
||||
Timeout: cfg.CircuitBreakerTimeout.Value(),
|
||||
MaxRequests: uint32(cfg.CircuitBreakerMaxRequests.Value()), //nolint:gosec // Validated by serpent.Validate in deployment options.
|
||||
}
|
||||
}
|
||||
|
||||
var providers []aibridge.Provider
|
||||
usedNames := make(map[string]struct{})
|
||||
|
||||
// Collect names from indexed providers so we know which legacy/builtin
|
||||
// providers to skip.
|
||||
for _, p := range cfg.Providers {
|
||||
name := p.Name
|
||||
if name == "" {
|
||||
name = p.Type
|
||||
}
|
||||
usedNames[name] = struct{}{}
|
||||
}
|
||||
|
||||
// Add legacy OpenAI provider if configured.
|
||||
if cfg.LegacyOpenAI.Key.String() != "" {
|
||||
if _, conflict := usedNames[aibridge.ProviderOpenAI]; conflict {
|
||||
return nil, xerrors.Errorf("legacy CODER_AIBRIDGE_OPENAI_KEY conflicts with indexed provider named %q; remove one or the other", aibridge.ProviderOpenAI)
|
||||
}
|
||||
providers = append(providers, aibridge.NewOpenAIProvider(aibridge.OpenAIConfig{
|
||||
Name: aibridge.ProviderOpenAI,
|
||||
BaseURL: cfg.LegacyOpenAI.BaseURL.String(),
|
||||
Key: cfg.LegacyOpenAI.Key.String(),
|
||||
CircuitBreaker: cbConfig,
|
||||
SendActorHeaders: cfg.SendActorHeaders.Value(),
|
||||
}))
|
||||
usedNames[aibridge.ProviderOpenAI] = struct{}{}
|
||||
}
|
||||
|
||||
// Add legacy Anthropic provider if configured.
|
||||
if cfg.LegacyAnthropic.Key.String() != "" {
|
||||
if _, conflict := usedNames[aibridge.ProviderAnthropic]; conflict {
|
||||
return nil, xerrors.Errorf("legacy CODER_AIBRIDGE_ANTHROPIC_KEY conflicts with indexed provider named %q; remove one or the other", aibridge.ProviderAnthropic)
|
||||
}
|
||||
providers = append(providers, aibridge.NewAnthropicProvider(aibridge.AnthropicConfig{
|
||||
Name: aibridge.ProviderAnthropic,
|
||||
BaseURL: cfg.LegacyAnthropic.BaseURL.String(),
|
||||
Key: cfg.LegacyAnthropic.Key.String(),
|
||||
CircuitBreaker: cbConfig,
|
||||
SendActorHeaders: cfg.SendActorHeaders.Value(),
|
||||
}, getBedrockConfig(cfg.LegacyBedrock)))
|
||||
usedNames[aibridge.ProviderAnthropic] = struct{}{}
|
||||
}
|
||||
|
||||
// Add indexed providers.
|
||||
for _, p := range cfg.Providers {
|
||||
name := p.Name
|
||||
if name == "" {
|
||||
name = p.Type
|
||||
}
|
||||
switch p.Type {
|
||||
case aibridge.ProviderOpenAI:
|
||||
providers = append(providers, aibridge.NewOpenAIProvider(aibridge.OpenAIConfig{
|
||||
Name: name,
|
||||
BaseURL: p.BaseURL,
|
||||
Key: p.Key,
|
||||
CircuitBreaker: cbConfig,
|
||||
SendActorHeaders: cfg.SendActorHeaders.Value(),
|
||||
}))
|
||||
case aibridge.ProviderAnthropic:
|
||||
providers = append(providers, aibridge.NewAnthropicProvider(aibridge.AnthropicConfig{
|
||||
Name: name,
|
||||
BaseURL: p.BaseURL,
|
||||
Key: p.Key,
|
||||
CircuitBreaker: cbConfig,
|
||||
SendActorHeaders: cfg.SendActorHeaders.Value(),
|
||||
}, bedrockConfigFromProvider(p)))
|
||||
case aibridge.ProviderCopilot:
|
||||
providers = append(providers, aibridge.NewCopilotProvider(aibridge.CopilotConfig{
|
||||
Name: name,
|
||||
BaseURL: p.BaseURL,
|
||||
CircuitBreaker: cbConfig,
|
||||
}))
|
||||
default:
|
||||
return nil, xerrors.Errorf("unknown provider type %q for provider %q", p.Type, name)
|
||||
}
|
||||
}
|
||||
|
||||
// Always add built-in providers unless their name is already claimed.
|
||||
addBuiltin := func(name string, p aibridge.Provider) {
|
||||
if _, exists := usedNames[name]; !exists {
|
||||
providers = append(providers, p)
|
||||
usedNames[name] = struct{}{}
|
||||
}
|
||||
}
|
||||
addBuiltin(aibridge.ProviderCopilot, aibridge.NewCopilotProvider(aibridge.CopilotConfig{
|
||||
Name: aibridge.ProviderCopilot, CircuitBreaker: cbConfig,
|
||||
}))
|
||||
addBuiltin(agplaibridge.ProviderCopilotBusiness, aibridge.NewCopilotProvider(aibridge.CopilotConfig{
|
||||
Name: agplaibridge.ProviderCopilotBusiness, BaseURL: "https://" + agplaibridge.HostCopilotBusiness, CircuitBreaker: cbConfig,
|
||||
}))
|
||||
addBuiltin(agplaibridge.ProviderCopilotEnterprise, aibridge.NewCopilotProvider(aibridge.CopilotConfig{
|
||||
Name: agplaibridge.ProviderCopilotEnterprise, BaseURL: "https://" + agplaibridge.HostCopilotEnterprise, CircuitBreaker: cbConfig,
|
||||
}))
|
||||
addBuiltin(agplaibridge.ProviderChatGPT, aibridge.NewOpenAIProvider(aibridge.OpenAIConfig{
|
||||
Name: agplaibridge.ProviderChatGPT, BaseURL: agplaibridge.BaseURLChatGPT, CircuitBreaker: cbConfig,
|
||||
SendActorHeaders: cfg.SendActorHeaders.Value(),
|
||||
}))
|
||||
|
||||
return providers, nil
|
||||
}
|
||||
|
||||
// bedrockConfigFromProvider converts Bedrock fields from an indexed
|
||||
// AIBridgeProviderConfig into an aibridge AWSBedrockConfig.
|
||||
// Returns nil if no Bedrock fields are set.
|
||||
func bedrockConfigFromProvider(p codersdk.AIBridgeProviderConfig) *aibridge.AWSBedrockConfig {
|
||||
if p.BedrockRegion == "" && p.BedrockBaseURL == "" && p.BedrockAccessKey == "" && p.BedrockAccessKeySecret == "" {
|
||||
return nil
|
||||
}
|
||||
return &aibridge.AWSBedrockConfig{
|
||||
BaseURL: p.BedrockBaseURL,
|
||||
Region: p.BedrockRegion,
|
||||
AccessKey: p.BedrockAccessKey,
|
||||
AccessKeySecret: p.BedrockAccessKeySecret,
|
||||
Model: p.BedrockModel,
|
||||
SmallFastModel: p.BedrockSmallFastModel,
|
||||
}
|
||||
}
|
||||
|
||||
func getBedrockConfig(cfg codersdk.AIBridgeBedrockConfig) *aibridge.AWSBedrockConfig {
|
||||
if cfg.Region.String() == "" && cfg.BaseURL.String() == "" && cfg.AccessKey.String() == "" && cfg.AccessKeySecret.String() == "" {
|
||||
return nil
|
||||
|
||||
@@ -0,0 +1,252 @@
|
||||
//go:build !slim
|
||||
|
||||
package cli
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
|
||||
"github.com/coder/aibridge"
|
||||
agplaibridge "github.com/coder/coder/v2/coderd/aibridge"
|
||||
"github.com/coder/coder/v2/codersdk"
|
||||
"github.com/coder/serpent"
|
||||
)
|
||||
|
||||
// builtinProviderNames are the providers that buildProviders always registers
|
||||
// unless explicitly overridden.
|
||||
var builtinProviderNames = []string{
|
||||
aibridge.ProviderCopilot,
|
||||
agplaibridge.ProviderCopilotBusiness,
|
||||
agplaibridge.ProviderCopilotEnterprise,
|
||||
agplaibridge.ProviderChatGPT,
|
||||
}
|
||||
|
||||
func assertHasBuiltins(t *testing.T, names []string) {
|
||||
t.Helper()
|
||||
for _, b := range builtinProviderNames {
|
||||
assert.Contains(t, names, b)
|
||||
}
|
||||
}
|
||||
|
||||
func TestBuildProviders(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
t.Run("EmptyConfig", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
providers, err := buildProviders(codersdk.AIBridgeConfig{})
|
||||
require.NoError(t, err)
|
||||
|
||||
names := providerNames(providers)
|
||||
assertHasBuiltins(t, names)
|
||||
assert.Len(t, names, len(builtinProviderNames))
|
||||
})
|
||||
|
||||
t.Run("LegacyOnly", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
cfg := codersdk.AIBridgeConfig{}
|
||||
cfg.LegacyOpenAI.Key = serpent.String("sk-openai")
|
||||
cfg.LegacyAnthropic.Key = serpent.String("sk-anthropic")
|
||||
|
||||
providers, err := buildProviders(cfg)
|
||||
require.NoError(t, err)
|
||||
|
||||
names := providerNames(providers)
|
||||
assertHasBuiltins(t, names)
|
||||
assert.Contains(t, names, aibridge.ProviderOpenAI)
|
||||
assert.Contains(t, names, aibridge.ProviderAnthropic)
|
||||
})
|
||||
|
||||
t.Run("IndexedOnly", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
cfg := codersdk.AIBridgeConfig{
|
||||
Providers: []codersdk.AIBridgeProviderConfig{
|
||||
{Type: aibridge.ProviderAnthropic, Name: "anthropic-zdr", Key: "sk-zdr"},
|
||||
{Type: aibridge.ProviderOpenAI, Name: "openai-azure", Key: "sk-azure", BaseURL: "https://azure.openai.com"},
|
||||
},
|
||||
}
|
||||
|
||||
providers, err := buildProviders(cfg)
|
||||
require.NoError(t, err)
|
||||
|
||||
names := providerNames(providers)
|
||||
assertHasBuiltins(t, names)
|
||||
assert.Contains(t, names, "anthropic-zdr")
|
||||
assert.Contains(t, names, "openai-azure")
|
||||
assert.NotContains(t, names, aibridge.ProviderOpenAI)
|
||||
assert.NotContains(t, names, aibridge.ProviderAnthropic)
|
||||
})
|
||||
|
||||
t.Run("LegacyOpenAIConflictsWithIndexed", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
cfg := codersdk.AIBridgeConfig{
|
||||
Providers: []codersdk.AIBridgeProviderConfig{
|
||||
{Type: aibridge.ProviderOpenAI, Name: aibridge.ProviderOpenAI, Key: "sk-indexed"},
|
||||
},
|
||||
}
|
||||
cfg.LegacyOpenAI.Key = serpent.String("sk-legacy")
|
||||
|
||||
_, err := buildProviders(cfg)
|
||||
require.Error(t, err)
|
||||
assert.Contains(t, err.Error(), "conflicts with indexed provider")
|
||||
})
|
||||
|
||||
t.Run("LegacyAnthropicConflictsWithIndexed", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
cfg := codersdk.AIBridgeConfig{
|
||||
Providers: []codersdk.AIBridgeProviderConfig{
|
||||
{Type: aibridge.ProviderAnthropic, Name: aibridge.ProviderAnthropic, Key: "sk-indexed"},
|
||||
},
|
||||
}
|
||||
cfg.LegacyAnthropic.Key = serpent.String("sk-legacy")
|
||||
|
||||
_, err := buildProviders(cfg)
|
||||
require.Error(t, err)
|
||||
assert.Contains(t, err.Error(), "conflicts with indexed provider")
|
||||
})
|
||||
|
||||
t.Run("IndexedOverridesBuiltin", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
cfg := codersdk.AIBridgeConfig{
|
||||
Providers: []codersdk.AIBridgeProviderConfig{
|
||||
{Type: aibridge.ProviderCopilot, Name: aibridge.ProviderCopilot, BaseURL: "https://custom.copilot.com"},
|
||||
},
|
||||
}
|
||||
|
||||
providers, err := buildProviders(cfg)
|
||||
require.NoError(t, err)
|
||||
|
||||
for _, p := range providers {
|
||||
if p.Name() == aibridge.ProviderCopilot {
|
||||
assert.Equal(t, "https://custom.copilot.com", p.BaseURL())
|
||||
break
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("MixedLegacyAndIndexed", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
cfg := codersdk.AIBridgeConfig{
|
||||
Providers: []codersdk.AIBridgeProviderConfig{
|
||||
{Type: aibridge.ProviderAnthropic, Name: "anthropic-zdr", Key: "sk-zdr"},
|
||||
},
|
||||
}
|
||||
cfg.LegacyOpenAI.Key = serpent.String("sk-openai")
|
||||
cfg.LegacyAnthropic.Key = serpent.String("sk-anthropic")
|
||||
|
||||
providers, err := buildProviders(cfg)
|
||||
require.NoError(t, err)
|
||||
|
||||
names := providerNames(providers)
|
||||
assert.Contains(t, names, aibridge.ProviderOpenAI)
|
||||
assert.Contains(t, names, aibridge.ProviderAnthropic)
|
||||
assert.Contains(t, names, "anthropic-zdr")
|
||||
})
|
||||
|
||||
t.Run("LegacyAnthropicWithBedrock", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
cfg := codersdk.AIBridgeConfig{}
|
||||
cfg.LegacyAnthropic.Key = serpent.String("sk-anthropic")
|
||||
cfg.LegacyBedrock.Region = serpent.String("us-west-2")
|
||||
cfg.LegacyBedrock.AccessKey = serpent.String("AKID")
|
||||
cfg.LegacyBedrock.AccessKeySecret = serpent.String("secret")
|
||||
|
||||
providers, err := buildProviders(cfg)
|
||||
require.NoError(t, err)
|
||||
|
||||
names := providerNames(providers)
|
||||
assert.Contains(t, names, aibridge.ProviderAnthropic)
|
||||
})
|
||||
|
||||
t.Run("UnknownType", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
cfg := codersdk.AIBridgeConfig{
|
||||
Providers: []codersdk.AIBridgeProviderConfig{
|
||||
{Type: "gemini", Name: "gemini-pro"},
|
||||
},
|
||||
}
|
||||
|
||||
_, err := buildProviders(cfg)
|
||||
require.Error(t, err)
|
||||
assert.Contains(t, err.Error(), "unknown provider type")
|
||||
})
|
||||
}
|
||||
|
||||
func providerNames(providers []aibridge.Provider) []string {
|
||||
names := make([]string, len(providers))
|
||||
for i, p := range providers {
|
||||
names[i] = p.Name()
|
||||
}
|
||||
return names
|
||||
}
|
||||
|
||||
func TestDomainsFromProviders(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
t.Run("ExtractsHostnames", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
providers, err := buildProviders(codersdk.AIBridgeConfig{
|
||||
Providers: []codersdk.AIBridgeProviderConfig{
|
||||
{Type: aibridge.ProviderOpenAI, Name: "openai", Key: "k"},
|
||||
{Type: aibridge.ProviderAnthropic, Name: "anthropic", Key: "k"},
|
||||
{Type: aibridge.ProviderOpenAI, Name: "custom", Key: "k", BaseURL: "https://custom-llm.example.com:8443/api"},
|
||||
},
|
||||
})
|
||||
require.NoError(t, err)
|
||||
|
||||
domains, mapping := domainsFromProviders(providers)
|
||||
|
||||
assert.Contains(t, domains, "api.openai.com")
|
||||
assert.Contains(t, domains, "api.anthropic.com")
|
||||
assert.Contains(t, domains, "custom-llm.example.com")
|
||||
|
||||
assert.Equal(t, "openai", mapping("api.openai.com"))
|
||||
assert.Equal(t, "anthropic", mapping("api.anthropic.com"))
|
||||
assert.Equal(t, "custom", mapping("custom-llm.example.com"))
|
||||
assert.Empty(t, mapping("unknown.com"))
|
||||
})
|
||||
|
||||
t.Run("DeduplicatesSameHost", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
providers, err := buildProviders(codersdk.AIBridgeConfig{
|
||||
Providers: []codersdk.AIBridgeProviderConfig{
|
||||
{Type: aibridge.ProviderOpenAI, Name: "first", Key: "k", BaseURL: "https://api.example.com/v1"},
|
||||
{Type: aibridge.ProviderOpenAI, Name: "second", Key: "k", BaseURL: "https://api.example.com/v2"},
|
||||
},
|
||||
})
|
||||
require.NoError(t, err)
|
||||
|
||||
domains, mapping := domainsFromProviders(providers)
|
||||
|
||||
// Count occurrences of api.example.com.
|
||||
count := 0
|
||||
for _, d := range domains {
|
||||
if d == "api.example.com" {
|
||||
count++
|
||||
}
|
||||
}
|
||||
assert.Equal(t, 1, count)
|
||||
// First provider wins.
|
||||
assert.Equal(t, "first", mapping("api.example.com"))
|
||||
})
|
||||
|
||||
t.Run("CaseInsensitive", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
providers, err := buildProviders(codersdk.AIBridgeConfig{
|
||||
Providers: []codersdk.AIBridgeProviderConfig{
|
||||
{Type: aibridge.ProviderOpenAI, Name: "provider", Key: "k", BaseURL: "https://API.Example.COM/v1"},
|
||||
},
|
||||
})
|
||||
require.NoError(t, err)
|
||||
|
||||
domains, mapping := domainsFromProviders(providers)
|
||||
|
||||
assert.Contains(t, domains, "api.example.com")
|
||||
assert.Equal(t, "provider", mapping("API.Example.COM"))
|
||||
assert.Equal(t, "provider", mapping("api.example.com"))
|
||||
})
|
||||
}
|
||||
@@ -4,35 +4,41 @@ package cli
|
||||
|
||||
import (
|
||||
"context"
|
||||
"net/url"
|
||||
"strings"
|
||||
|
||||
"github.com/prometheus/client_golang/prometheus"
|
||||
"golang.org/x/xerrors"
|
||||
|
||||
"github.com/coder/aibridge"
|
||||
"github.com/coder/coder/v2/enterprise/aibridgeproxyd"
|
||||
"github.com/coder/coder/v2/enterprise/coderd"
|
||||
)
|
||||
|
||||
func newAIBridgeProxyDaemon(coderAPI *coderd.API) (*aibridgeproxyd.Server, error) {
|
||||
func newAIBridgeProxyDaemon(coderAPI *coderd.API, providers []aibridge.Provider) (*aibridgeproxyd.Server, error) {
|
||||
ctx := context.Background()
|
||||
coderAPI.Logger.Debug(ctx, "starting in-memory aibridgeproxy daemon")
|
||||
|
||||
logger := coderAPI.Logger.Named("aibridgeproxyd")
|
||||
|
||||
domains, providerFromHost := domainsFromProviders(providers)
|
||||
|
||||
reg := prometheus.WrapRegistererWithPrefix("coder_aibridgeproxyd_", coderAPI.PrometheusRegistry)
|
||||
metrics := aibridgeproxyd.NewMetrics(reg)
|
||||
|
||||
srv, err := aibridgeproxyd.New(ctx, logger, aibridgeproxyd.Options{
|
||||
ListenAddr: coderAPI.DeploymentValues.AI.BridgeProxyConfig.ListenAddr.String(),
|
||||
TLSCertFile: coderAPI.DeploymentValues.AI.BridgeProxyConfig.TLSCertFile.String(),
|
||||
TLSKeyFile: coderAPI.DeploymentValues.AI.BridgeProxyConfig.TLSKeyFile.String(),
|
||||
CoderAccessURL: coderAPI.AccessURL.String(),
|
||||
MITMCertFile: coderAPI.DeploymentValues.AI.BridgeProxyConfig.MITMCertFile.String(),
|
||||
MITMKeyFile: coderAPI.DeploymentValues.AI.BridgeProxyConfig.MITMKeyFile.String(),
|
||||
DomainAllowlist: coderAPI.DeploymentValues.AI.BridgeProxyConfig.DomainAllowlist.Value(),
|
||||
UpstreamProxy: coderAPI.DeploymentValues.AI.BridgeProxyConfig.UpstreamProxy.String(),
|
||||
UpstreamProxyCA: coderAPI.DeploymentValues.AI.BridgeProxyConfig.UpstreamProxyCA.String(),
|
||||
AllowedPrivateCIDRs: coderAPI.DeploymentValues.AI.BridgeProxyConfig.AllowedPrivateCIDRs.Value(),
|
||||
Metrics: metrics,
|
||||
ListenAddr: coderAPI.DeploymentValues.AI.BridgeProxyConfig.ListenAddr.String(),
|
||||
TLSCertFile: coderAPI.DeploymentValues.AI.BridgeProxyConfig.TLSCertFile.String(),
|
||||
TLSKeyFile: coderAPI.DeploymentValues.AI.BridgeProxyConfig.TLSKeyFile.String(),
|
||||
CoderAccessURL: coderAPI.AccessURL.String(),
|
||||
MITMCertFile: coderAPI.DeploymentValues.AI.BridgeProxyConfig.MITMCertFile.String(),
|
||||
MITMKeyFile: coderAPI.DeploymentValues.AI.BridgeProxyConfig.MITMKeyFile.String(),
|
||||
DomainAllowlist: domains,
|
||||
AIBridgeProviderFromHost: providerFromHost,
|
||||
UpstreamProxy: coderAPI.DeploymentValues.AI.BridgeProxyConfig.UpstreamProxy.String(),
|
||||
UpstreamProxyCA: coderAPI.DeploymentValues.AI.BridgeProxyConfig.UpstreamProxyCA.String(),
|
||||
AllowedPrivateCIDRs: coderAPI.DeploymentValues.AI.BridgeProxyConfig.AllowedPrivateCIDRs.Value(),
|
||||
Metrics: metrics,
|
||||
})
|
||||
if err != nil {
|
||||
return nil, xerrors.Errorf("failed to start in-memory aibridgeproxy daemon: %w", err)
|
||||
@@ -40,3 +46,35 @@ func newAIBridgeProxyDaemon(coderAPI *coderd.API) (*aibridgeproxyd.Server, error
|
||||
|
||||
return srv, nil
|
||||
}
|
||||
|
||||
// domainsFromProviders extracts distinct hostnames from providers' base
|
||||
// URLs and builds a host-to-provider-name mapping function. The returned
|
||||
// domain list is suitable for use as DomainAllowlist and the mapping
|
||||
// function is suitable for use as AIBridgeProviderFromHost.
|
||||
func domainsFromProviders(providers []aibridge.Provider) ([]string, func(string) string) {
|
||||
hostToProvider := make(map[string]string, len(providers))
|
||||
var domains []string
|
||||
for _, p := range providers {
|
||||
raw := p.BaseURL()
|
||||
if raw == "" {
|
||||
continue
|
||||
}
|
||||
u, err := url.Parse(raw)
|
||||
if err != nil || u.Hostname() == "" {
|
||||
continue
|
||||
}
|
||||
host := strings.ToLower(u.Hostname())
|
||||
if _, exists := hostToProvider[host]; exists {
|
||||
// First provider wins; duplicates are expected for
|
||||
// providers that share a host (e.g. copilot variants
|
||||
// pointing at the same endpoint).
|
||||
continue
|
||||
}
|
||||
hostToProvider[host] = p.Name()
|
||||
domains = append(domains, host)
|
||||
}
|
||||
|
||||
return domains, func(host string) string {
|
||||
return hostToProvider[strings.ToLower(host)]
|
||||
}
|
||||
}
|
||||
|
||||
@@ -162,6 +162,13 @@ func (r *RootCmd) Server(_ func()) *serpent.Command {
|
||||
usageCron.Start(ctx)
|
||||
closers.Add(usageCron)
|
||||
|
||||
// Build the provider list once; both the bridge daemon and the
|
||||
// proxy daemon consume it.
|
||||
providers, err := buildProviders(options.DeploymentValues.AI.BridgeConfig)
|
||||
if err != nil {
|
||||
return nil, nil, xerrors.Errorf("build aibridge providers: %w", err)
|
||||
}
|
||||
|
||||
// In-memory aibridge daemon.
|
||||
// TODO(@deansheather): the lifecycle of the aibridged server is
|
||||
// probably better managed by the enterprise API type itself. Managing
|
||||
@@ -169,7 +176,7 @@ func (r *RootCmd) Server(_ func()) *serpent.Command {
|
||||
// is not entitled to the feature.
|
||||
var aibridgeDaemon *aibridged.Server
|
||||
if options.DeploymentValues.AI.BridgeConfig.Enabled {
|
||||
aibridgeDaemon, err = newAIBridgeDaemon(api)
|
||||
aibridgeDaemon, err = newAIBridgeDaemon(api, providers)
|
||||
if err != nil {
|
||||
return nil, nil, xerrors.Errorf("create aibridged: %w", err)
|
||||
}
|
||||
@@ -185,7 +192,7 @@ func (r *RootCmd) Server(_ func()) *serpent.Command {
|
||||
|
||||
// In-memory AI Bridge Proxy daemon
|
||||
if options.DeploymentValues.AI.BridgeProxyConfig.Enabled.Value() {
|
||||
aiBridgeProxyServer, err := newAIBridgeProxyDaemon(api)
|
||||
aiBridgeProxyServer, err := newAIBridgeProxyDaemon(api, providers)
|
||||
if err != nil {
|
||||
_ = closers.Close()
|
||||
return nil, nil, xerrors.Errorf("create aibridgeproxyd: %w", err)
|
||||
|
||||
@@ -197,10 +197,6 @@ func TestServerDBCrypt(t *testing.T) {
|
||||
gitAuthLinks, err := db.GetExternalAuthLinksByUserID(ctx, usr.ID)
|
||||
require.NoError(t, err, "failed to get git auth links for user %s", usr.ID)
|
||||
require.Empty(t, gitAuthLinks)
|
||||
|
||||
userSecrets, err := db.ListUserSecretsWithValues(ctx, usr.ID)
|
||||
require.NoError(t, err, "failed to get user secrets for user %s", usr.ID)
|
||||
require.Empty(t, userSecrets)
|
||||
}
|
||||
|
||||
// Validate that the key has been revoked in the database.
|
||||
@@ -246,14 +242,6 @@ func genData(t *testing.T, db database.Store) []database.User {
|
||||
OAuthRefreshToken: "refresh-" + usr.ID.String(),
|
||||
})
|
||||
}
|
||||
|
||||
_ = dbgen.UserSecret(t, db, database.UserSecret{
|
||||
UserID: usr.ID,
|
||||
Name: "secret-" + usr.ID.String(),
|
||||
Value: "value-" + usr.ID.String(),
|
||||
EnvName: "",
|
||||
FilePath: "",
|
||||
})
|
||||
users = append(users, usr)
|
||||
}
|
||||
}
|
||||
@@ -295,13 +283,6 @@ func requireEncryptedWithCipher(ctx context.Context, t *testing.T, db database.S
|
||||
require.Equal(t, c.HexDigest(), gal.OAuthAccessTokenKeyID.String)
|
||||
require.Equal(t, c.HexDigest(), gal.OAuthRefreshTokenKeyID.String)
|
||||
}
|
||||
|
||||
userSecrets, err := db.ListUserSecretsWithValues(ctx, userID)
|
||||
require.NoError(t, err, "failed to get user secrets for user %s", userID)
|
||||
for _, s := range userSecrets {
|
||||
requireEncryptedEquals(t, c, "value-"+userID.String(), s.Value)
|
||||
require.Equal(t, c.HexDigest(), s.ValueKeyID.String)
|
||||
}
|
||||
}
|
||||
|
||||
// nullCipher is a dbcrypt.Cipher that does not encrypt or decrypt.
|
||||
|
||||
@@ -11,7 +11,7 @@ OPTIONS:
|
||||
-O, --org string, $CODER_ORGANIZATION
|
||||
Select which organization (uuid or name) to use.
|
||||
|
||||
-c, --column [id|created at|started at|completed at|canceled at|error|error code|status|worker id|worker name|file id|tags|queue position|queue size|organization id|initiator id|template version id|workspace build id|type|available workers|template version name|template id|template name|template display name|template icon|workspace id|workspace name|workspace build transition|logs overflowed|organization|queue] (default: created at,id,type,template display name,status,queue,tags)
|
||||
-c, --column [id|created at|started at|completed at|canceled at|error|error code|status|worker id|worker name|file id|tags|queue position|queue size|organization id|initiator id|template version id|workspace build id|type|available workers|template version name|template id|template name|template display name|template icon|workspace id|workspace name|logs overflowed|organization|queue] (default: created at,id,type,template display name,status,queue,tags)
|
||||
Columns to display in table output.
|
||||
|
||||
-i, --initiator string, $CODER_PROVISIONER_JOB_LIST_INITIATOR
|
||||
|
||||
@@ -212,13 +212,6 @@ AI BRIDGE PROXY OPTIONS:
|
||||
certificates not trusted by the system. If not provided, the system
|
||||
certificate pool is used.
|
||||
|
||||
CHAT OPTIONS:
|
||||
Configure the background chat processing daemon.
|
||||
|
||||
--chat-debug-logging-enabled bool, $CODER_CHAT_DEBUG_LOGGING_ENABLED (default: false)
|
||||
Force chat debug logging on for every chat, bypassing the runtime
|
||||
admin and user opt-in settings.
|
||||
|
||||
CLIENT OPTIONS:
|
||||
These options change the behavior of how clients interact with the Coder.
|
||||
Clients include the Coder CLI, Coder Desktop, IDE extensions, and the web UI.
|
||||
|
||||
@@ -96,34 +96,6 @@ func Rotate(ctx context.Context, log slog.Logger, sqlDB *sql.DB, ciphers []Ciphe
|
||||
}
|
||||
log.Debug(ctx, "encrypted user chat provider key", slog.F("user_id", uid), slog.F("chat_provider_id", userProviderKey.ChatProviderID), slog.F("current", idx+1), slog.F("cipher", ciphers[0].HexDigest()))
|
||||
}
|
||||
|
||||
userSecrets, err := cryptTx.ListUserSecretsWithValues(ctx, uid)
|
||||
if err != nil {
|
||||
return xerrors.Errorf("get user secrets for user %s: %w", uid, err)
|
||||
}
|
||||
for _, secret := range userSecrets {
|
||||
if secret.ValueKeyID.Valid && secret.ValueKeyID.String == ciphers[0].HexDigest() {
|
||||
log.Debug(ctx, "skipping user secret", slog.F("user_id", uid), slog.F("secret_name", secret.Name), slog.F("current", idx+1), slog.F("cipher", ciphers[0].HexDigest()))
|
||||
continue
|
||||
}
|
||||
if _, err := cryptTx.UpdateUserSecretByUserIDAndName(ctx, database.UpdateUserSecretByUserIDAndNameParams{
|
||||
UserID: uid,
|
||||
Name: secret.Name,
|
||||
UpdateValue: true,
|
||||
Value: secret.Value,
|
||||
ValueKeyID: sql.NullString{}, // dbcrypt will re-encrypt
|
||||
UpdateDescription: false,
|
||||
Description: "",
|
||||
UpdateEnvName: false,
|
||||
EnvName: "",
|
||||
UpdateFilePath: false,
|
||||
FilePath: "",
|
||||
}); err != nil {
|
||||
return xerrors.Errorf("rotate user secret user_id=%s name=%s: %w", uid, secret.Name, err)
|
||||
}
|
||||
log.Debug(ctx, "rotated user secret", slog.F("user_id", uid), slog.F("secret_name", secret.Name), slog.F("current", idx+1), slog.F("cipher", ciphers[0].HexDigest()))
|
||||
}
|
||||
|
||||
return nil
|
||||
}, &database.TxOptions{
|
||||
Isolation: sql.LevelRepeatableRead,
|
||||
@@ -263,34 +235,6 @@ func Decrypt(ctx context.Context, log slog.Logger, sqlDB *sql.DB, ciphers []Ciph
|
||||
}
|
||||
log.Debug(ctx, "decrypted user chat provider key", slog.F("user_id", uid), slog.F("chat_provider_id", userProviderKey.ChatProviderID), slog.F("current", idx+1))
|
||||
}
|
||||
|
||||
userSecrets, err := tx.ListUserSecretsWithValues(ctx, uid)
|
||||
if err != nil {
|
||||
return xerrors.Errorf("get user secrets for user %s: %w", uid, err)
|
||||
}
|
||||
for _, secret := range userSecrets {
|
||||
if !secret.ValueKeyID.Valid {
|
||||
log.Debug(ctx, "skipping user secret", slog.F("user_id", uid), slog.F("secret_name", secret.Name), slog.F("current", idx+1))
|
||||
continue
|
||||
}
|
||||
if _, err := tx.UpdateUserSecretByUserIDAndName(ctx, database.UpdateUserSecretByUserIDAndNameParams{
|
||||
UserID: uid,
|
||||
Name: secret.Name,
|
||||
UpdateValue: true,
|
||||
Value: secret.Value,
|
||||
ValueKeyID: sql.NullString{}, // clear the key ID
|
||||
UpdateDescription: false,
|
||||
Description: "",
|
||||
UpdateEnvName: false,
|
||||
EnvName: "",
|
||||
UpdateFilePath: false,
|
||||
FilePath: "",
|
||||
}); err != nil {
|
||||
return xerrors.Errorf("decrypt user secret user_id=%s name=%s: %w", uid, secret.Name, err)
|
||||
}
|
||||
log.Debug(ctx, "decrypted user secret", slog.F("user_id", uid), slog.F("secret_name", secret.Name), slog.F("current", idx+1))
|
||||
}
|
||||
|
||||
return nil
|
||||
}, &database.TxOptions{
|
||||
Isolation: sql.LevelRepeatableRead,
|
||||
@@ -348,8 +292,6 @@ DELETE FROM external_auth_links
|
||||
OR oauth_refresh_token_key_id IS NOT NULL;
|
||||
DELETE FROM user_chat_provider_keys
|
||||
WHERE api_key_key_id IS NOT NULL;
|
||||
DELETE FROM user_secrets
|
||||
WHERE value_key_id IS NOT NULL;
|
||||
UPDATE chat_providers
|
||||
SET api_key = '',
|
||||
api_key_key_id = NULL
|
||||
|
||||
@@ -717,60 +717,6 @@ func (db *dbCrypt) UpsertMCPServerUserToken(ctx context.Context, params database
|
||||
return tok, nil
|
||||
}
|
||||
|
||||
func (db *dbCrypt) CreateUserSecret(ctx context.Context, params database.CreateUserSecretParams) (database.UserSecret, error) {
|
||||
if err := db.encryptField(¶ms.Value, ¶ms.ValueKeyID); err != nil {
|
||||
return database.UserSecret{}, err
|
||||
}
|
||||
secret, err := db.Store.CreateUserSecret(ctx, params)
|
||||
if err != nil {
|
||||
return database.UserSecret{}, err
|
||||
}
|
||||
if err := db.decryptField(&secret.Value, secret.ValueKeyID); err != nil {
|
||||
return database.UserSecret{}, err
|
||||
}
|
||||
return secret, nil
|
||||
}
|
||||
|
||||
func (db *dbCrypt) GetUserSecretByUserIDAndName(ctx context.Context, arg database.GetUserSecretByUserIDAndNameParams) (database.UserSecret, error) {
|
||||
secret, err := db.Store.GetUserSecretByUserIDAndName(ctx, arg)
|
||||
if err != nil {
|
||||
return database.UserSecret{}, err
|
||||
}
|
||||
if err := db.decryptField(&secret.Value, secret.ValueKeyID); err != nil {
|
||||
return database.UserSecret{}, err
|
||||
}
|
||||
return secret, nil
|
||||
}
|
||||
|
||||
func (db *dbCrypt) ListUserSecretsWithValues(ctx context.Context, userID uuid.UUID) ([]database.UserSecret, error) {
|
||||
secrets, err := db.Store.ListUserSecretsWithValues(ctx, userID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
for i := range secrets {
|
||||
if err := db.decryptField(&secrets[i].Value, secrets[i].ValueKeyID); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
return secrets, nil
|
||||
}
|
||||
|
||||
func (db *dbCrypt) UpdateUserSecretByUserIDAndName(ctx context.Context, arg database.UpdateUserSecretByUserIDAndNameParams) (database.UserSecret, error) {
|
||||
if arg.UpdateValue {
|
||||
if err := db.encryptField(&arg.Value, &arg.ValueKeyID); err != nil {
|
||||
return database.UserSecret{}, err
|
||||
}
|
||||
}
|
||||
secret, err := db.Store.UpdateUserSecretByUserIDAndName(ctx, arg)
|
||||
if err != nil {
|
||||
return database.UserSecret{}, err
|
||||
}
|
||||
if err := db.decryptField(&secret.Value, secret.ValueKeyID); err != nil {
|
||||
return database.UserSecret{}, err
|
||||
}
|
||||
return secret, nil
|
||||
}
|
||||
|
||||
func (db *dbCrypt) encryptField(field *string, digest *sql.NullString) error {
|
||||
// If no cipher is loaded, then we can't encrypt anything!
|
||||
if db.ciphers == nil || db.primaryCipherDigest == "" {
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user