Compare commits

..

1 Commits

Author SHA1 Message Date
Danielle Maywood 51c539ee46 fix(site): move useRef render-time reads/writes into effects
Audit of all useRef calls in site/src/pages/AgentsPage found 12
violations of React's rule: "Don't read or write ref.current
during rendering."

Changes per file:

- AgentChatInput.tsx: Move .focus() side effect from render body
  into useEffect keyed on isLoading. Replace useState-based
  prev-tracking with useRef.

- AgentsPage.tsx: Wrap activeChatIDRef.current sync in useEffect.

- AgentCreateForm.tsx: Remove selectedWorkspaceIdRef and
  selectedModelRef entirely — the memo boundary they were
  protecting is already broken by inline JSX props and direct
  selectedModel prop. Use values directly in useCallback deps.

- AgentDetailView.tsx: Wrap isFetchingRef and onFetchRef syncs in
  useEffect inside ScrollAnchoredContainer.

- ChatContext.ts: Use lazy init pattern for storeRef to avoid
  calling createChatStore() on every render. Wrap
  lastMessageIdRef sync in useEffect.

- useWorkspaceCreationWatcher.ts: Move processedToolCallIdsRef
  reset from derived-state-from-props block into useEffect keyed
  on chatID.

- useFileAttachments.ts: Wrap previewUrlsRef sync in useEffect.

- AgentEmbedPage.tsx: Wrap latestEmbedSessionMutationRef sync in
  useEffect.

- useDesktopConnection.ts: Add rfb state variable so consumers
  get reactive updates. Keep rfbRef for synchronous access in
  callbacks/cleanup. Return state instead of ref snapshot.
2026-03-18 22:15:43 +00:00
133 changed files with 744 additions and 10853 deletions
+10 -10
View File
@@ -109,16 +109,16 @@ jobs:
- name: Reconcile Flux
run: |
set -euxo pipefail
flux --namespace flux-system reconcile source git flux-system
flux --namespace flux-system reconcile source git coder-main
flux --namespace flux-system reconcile kustomization flux-system
flux --namespace flux-system reconcile kustomization coder
flux --namespace flux-system reconcile source chart coder-coder
flux --namespace flux-system reconcile source chart coder-coder-provisioner
flux --namespace coder reconcile helmrelease coder
flux --namespace coder reconcile helmrelease coder-provisioner
flux --namespace coder reconcile helmrelease coder-provisioner-tagged
flux --namespace coder reconcile helmrelease coder-provisioner-tagged-prebuilds
flux --namespace flux-system reconcile --verbose --timeout=5m source git flux-system
flux --namespace flux-system reconcile --verbose --timeout=5m source git coder-main
flux --namespace flux-system reconcile --verbose --timeout=5m kustomization flux-system
flux --namespace flux-system reconcile --verbose --timeout=5m kustomization coder
flux --namespace flux-system reconcile --verbose --timeout=5m source chart coder-coder
flux --namespace flux-system reconcile --verbose --timeout=5m source chart coder-coder-provisioner
flux --namespace coder reconcile --verbose --timeout=10m helmrelease coder
flux --namespace coder reconcile --verbose --timeout=10m helmrelease coder-provisioner
flux --namespace coder reconcile --verbose --timeout=10m helmrelease coder-provisioner-tagged
flux --namespace coder reconcile --verbose --timeout=10m helmrelease coder-provisioner-tagged-prebuilds
# Just updating Flux is usually not enough. The Helm release may get
# redeployed, but unless something causes the Deployment to update the
-5
View File
@@ -208,11 +208,6 @@ seems like it should use `time.Sleep`, read through https://github.com/coder/qua
- Follow [Uber Go Style Guide](https://github.com/uber-go/guide/blob/master/style.md)
- Commit format: `type(scope): message`
- PR titles follow the same `type(scope): message` format.
- When you use a scope, it must be a real filesystem path containing every
changed file.
- Use a broader path scope, or omit the scope, for cross-cutting changes.
- Example: `fix(coderd/chatd): ...` for changes only in `coderd/chatd/`.
### Frontend Patterns
+10 -11
View File
@@ -8,17 +8,16 @@ USAGE:
Aliases: user
SUBCOMMANDS:
activate Update a user's status to 'active'. Active users can fully
interact with the platform
create Create a new user.
delete Delete a user by username or user_id.
edit-roles Edit a user's roles by username or id
list Prints the list of users.
oidc-claims Display the OIDC claims for the authenticated user.
show Show a single user. Use 'me' to indicate the currently
authenticated user.
suspend Update a user's status to 'suspended'. A suspended user
cannot log into the platform
activate Update a user's status to 'active'. Active users can fully
interact with the platform
create Create a new user.
delete Delete a user by username or user_id.
edit-roles Edit a user's roles by username or id
list Prints the list of users.
show Show a single user. Use 'me' to indicate the currently
authenticated user.
suspend Update a user's status to 'suspended'. A suspended user cannot
log into the platform
———
Run `coder --help` for a list of global options.
-24
View File
@@ -1,24 +0,0 @@
coder v0.0.0-devel
USAGE:
coder users oidc-claims [flags]
Display the OIDC claims for the authenticated user.
- Display your OIDC claims:
$ coder users oidc-claims
- Display your OIDC claims as JSON:
$ coder users oidc-claims -o json
OPTIONS:
-c, --column [key|value] (default: key,value)
Columns to display in table output.
-o, --output table|json (default: table)
Output format.
———
Run `coder --help` for a list of global options.
-79
View File
@@ -1,79 +0,0 @@
package cli
import (
"fmt"
"golang.org/x/xerrors"
"github.com/coder/coder/v2/cli/cliui"
"github.com/coder/coder/v2/codersdk"
"github.com/coder/serpent"
)
func (r *RootCmd) userOIDCClaims() *serpent.Command {
formatter := cliui.NewOutputFormatter(
cliui.ChangeFormatterData(
cliui.TableFormat([]claimRow{}, []string{"key", "value"}),
func(data any) (any, error) {
resp, ok := data.(codersdk.OIDCClaimsResponse)
if !ok {
return nil, xerrors.Errorf("expected type %T, got %T", resp, data)
}
rows := make([]claimRow, 0, len(resp.Claims))
for k, v := range resp.Claims {
rows = append(rows, claimRow{
Key: k,
Value: fmt.Sprintf("%v", v),
})
}
return rows, nil
},
),
cliui.JSONFormat(),
)
cmd := &serpent.Command{
Use: "oidc-claims",
Short: "Display the OIDC claims for the authenticated user.",
Long: FormatExamples(
Example{
Description: "Display your OIDC claims",
Command: "coder users oidc-claims",
},
Example{
Description: "Display your OIDC claims as JSON",
Command: "coder users oidc-claims -o json",
},
),
Middleware: serpent.Chain(
serpent.RequireNArgs(0),
),
Handler: func(inv *serpent.Invocation) error {
client, err := r.InitClient(inv)
if err != nil {
return err
}
resp, err := client.UserOIDCClaims(inv.Context())
if err != nil {
return xerrors.Errorf("get oidc claims: %w", err)
}
out, err := formatter.Format(inv.Context(), resp)
if err != nil {
return err
}
_, err = fmt.Fprintln(inv.Stdout, out)
return err
},
}
formatter.AttachOptions(&cmd.Options)
return cmd
}
type claimRow struct {
Key string `json:"-" table:"key,default_sort"`
Value string `json:"-" table:"value"`
}
-161
View File
@@ -1,161 +0,0 @@
package cli_test
import (
"bytes"
"encoding/json"
"testing"
"github.com/golang-jwt/jwt/v4"
"github.com/google/uuid"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/coder/coder/v2/cli/clitest"
"github.com/coder/coder/v2/coderd"
"github.com/coder/coder/v2/coderd/coderdtest"
"github.com/coder/coder/v2/coderd/coderdtest/oidctest"
"github.com/coder/coder/v2/codersdk"
"github.com/coder/coder/v2/testutil"
)
func TestUserOIDCClaims(t *testing.T) {
t.Parallel()
newOIDCTest := func(t *testing.T) (*oidctest.FakeIDP, *codersdk.Client) {
t.Helper()
fake := oidctest.NewFakeIDP(t,
oidctest.WithServing(),
)
cfg := fake.OIDCConfig(t, nil, func(cfg *coderd.OIDCConfig) {
cfg.AllowSignups = true
})
ownerClient := coderdtest.New(t, &coderdtest.Options{
OIDCConfig: cfg,
})
return fake, ownerClient
}
t.Run("OwnClaims", func(t *testing.T) {
t.Parallel()
fake, ownerClient := newOIDCTest(t)
claims := jwt.MapClaims{
"email": "alice@coder.com",
"email_verified": true,
"sub": uuid.NewString(),
"groups": []string{"admin", "eng"},
}
userClient, loginResp := fake.Login(t, ownerClient, claims)
defer loginResp.Body.Close()
inv, root := clitest.New(t, "users", "oidc-claims", "-o", "json")
clitest.SetupConfig(t, userClient, root)
buf := bytes.NewBuffer(nil)
inv.Stdout = buf
err := inv.WithContext(testutil.Context(t, testutil.WaitMedium)).Run()
require.NoError(t, err)
var resp codersdk.OIDCClaimsResponse
err = json.Unmarshal(buf.Bytes(), &resp)
require.NoError(t, err, "unmarshal JSON output")
require.NotEmpty(t, resp.Claims, "claims should not be empty")
assert.Equal(t, "alice@coder.com", resp.Claims["email"])
})
t.Run("Table", func(t *testing.T) {
t.Parallel()
fake, ownerClient := newOIDCTest(t)
claims := jwt.MapClaims{
"email": "bob@coder.com",
"email_verified": true,
"sub": uuid.NewString(),
}
userClient, loginResp := fake.Login(t, ownerClient, claims)
defer loginResp.Body.Close()
inv, root := clitest.New(t, "users", "oidc-claims")
clitest.SetupConfig(t, userClient, root)
buf := bytes.NewBuffer(nil)
inv.Stdout = buf
err := inv.WithContext(testutil.Context(t, testutil.WaitMedium)).Run()
require.NoError(t, err)
output := buf.String()
require.Contains(t, output, "email")
require.Contains(t, output, "bob@coder.com")
})
t.Run("NotOIDCUser", func(t *testing.T) {
t.Parallel()
client := coderdtest.New(t, nil)
_ = coderdtest.CreateFirstUser(t, client)
inv, root := clitest.New(t, "users", "oidc-claims")
clitest.SetupConfig(t, client, root)
err := inv.WithContext(testutil.Context(t, testutil.WaitMedium)).Run()
require.Error(t, err)
require.Contains(t, err.Error(), "not an OIDC user")
})
// Verify that two different OIDC users each only see their own
// claims. The endpoint has no user parameter, so there is no way
// to request another user's claims by design.
t.Run("OnlyOwnClaims", func(t *testing.T) {
t.Parallel()
aliceFake, aliceOwnerClient := newOIDCTest(t)
aliceClaims := jwt.MapClaims{
"email": "alice-isolation@coder.com",
"email_verified": true,
"sub": uuid.NewString(),
}
aliceClient, aliceLoginResp := aliceFake.Login(t, aliceOwnerClient, aliceClaims)
defer aliceLoginResp.Body.Close()
bobFake, bobOwnerClient := newOIDCTest(t)
bobClaims := jwt.MapClaims{
"email": "bob-isolation@coder.com",
"email_verified": true,
"sub": uuid.NewString(),
}
bobClient, bobLoginResp := bobFake.Login(t, bobOwnerClient, bobClaims)
defer bobLoginResp.Body.Close()
ctx := testutil.Context(t, testutil.WaitMedium)
// Alice sees her own claims.
aliceResp, err := aliceClient.UserOIDCClaims(ctx)
require.NoError(t, err)
assert.Equal(t, "alice-isolation@coder.com", aliceResp.Claims["email"])
// Bob sees his own claims.
bobResp, err := bobClient.UserOIDCClaims(ctx)
require.NoError(t, err)
assert.Equal(t, "bob-isolation@coder.com", bobResp.Claims["email"])
})
t.Run("ClaimsNeverNull", func(t *testing.T) {
t.Parallel()
fake, ownerClient := newOIDCTest(t)
// Use minimal claims — just enough for OIDC login.
claims := jwt.MapClaims{
"email": "minimal@coder.com",
"email_verified": true,
"sub": uuid.NewString(),
}
userClient, loginResp := fake.Login(t, ownerClient, claims)
defer loginResp.Body.Close()
ctx := testutil.Context(t, testutil.WaitMedium)
resp, err := userClient.UserOIDCClaims(ctx)
require.NoError(t, err)
require.NotNil(t, resp.Claims, "claims should never be nil, expected empty map")
})
}
-1
View File
@@ -19,7 +19,6 @@ func (r *RootCmd) users() *serpent.Command {
r.userSingle(),
r.userDelete(),
r.userEditRoles(),
r.userOIDCClaims(),
r.createUserStatusCommand(codersdk.UserStatusActive),
r.createUserStatusCommand(codersdk.UserStatusSuspended),
},
-58
View File
@@ -7826,29 +7826,6 @@ const docTemplate = `{
]
}
},
"/users/me/session/token-to-cookie": {
"post": {
"description": "Converts the current session token into a Set-Cookie response.\nThis is used by embedded iframes (e.g. VS Code chat) that\nreceive a session token out-of-band via postMessage but need\ncookie-based auth for WebSocket connections.",
"tags": [
"Authorization"
],
"summary": "Set session token cookie",
"operationId": "set-session-token-cookie",
"responses": {
"204": {
"description": "No Content"
}
},
"security": [
{
"CoderSessionToken": []
}
],
"x-apidocgen": {
"skip": true
}
}
},
"/users/oauth2/github/callback": {
"get": {
"tags": [
@@ -7893,31 +7870,6 @@ const docTemplate = `{
]
}
},
"/users/oidc-claims": {
"get": {
"produces": [
"application/json"
],
"tags": [
"Users"
],
"summary": "Get OIDC claims for the authenticated user",
"operationId": "get-oidc-claims-for-the-authenticated-user",
"responses": {
"200": {
"description": "OK",
"schema": {
"$ref": "#/definitions/codersdk.OIDCClaimsResponse"
}
}
},
"security": [
{
"CoderSessionToken": []
}
]
}
},
"/users/oidc/callback": {
"get": {
"tags": [
@@ -16934,16 +16886,6 @@ const docTemplate = `{
}
}
},
"codersdk.OIDCClaimsResponse": {
"type": "object",
"properties": {
"claims": {
"description": "Claims are the merged claims from the OIDC provider. These\nare the union of the ID token claims and the userinfo claims,\nwhere userinfo claims take precedence on conflict.",
"type": "object",
"additionalProperties": true
}
}
},
"codersdk.OIDCConfig": {
"type": "object",
"properties": {
-52
View File
@@ -6927,27 +6927,6 @@
]
}
},
"/users/me/session/token-to-cookie": {
"post": {
"description": "Converts the current session token into a Set-Cookie response.\nThis is used by embedded iframes (e.g. VS Code chat) that\nreceive a session token out-of-band via postMessage but need\ncookie-based auth for WebSocket connections.",
"tags": ["Authorization"],
"summary": "Set session token cookie",
"operationId": "set-session-token-cookie",
"responses": {
"204": {
"description": "No Content"
}
},
"security": [
{
"CoderSessionToken": []
}
],
"x-apidocgen": {
"skip": true
}
}
},
"/users/oauth2/github/callback": {
"get": {
"tags": ["Users"],
@@ -6986,27 +6965,6 @@
]
}
},
"/users/oidc-claims": {
"get": {
"produces": ["application/json"],
"tags": ["Users"],
"summary": "Get OIDC claims for the authenticated user",
"operationId": "get-oidc-claims-for-the-authenticated-user",
"responses": {
"200": {
"description": "OK",
"schema": {
"$ref": "#/definitions/codersdk.OIDCClaimsResponse"
}
}
},
"security": [
{
"CoderSessionToken": []
}
]
}
},
"/users/oidc/callback": {
"get": {
"tags": ["Users"],
@@ -15379,16 +15337,6 @@
}
}
},
"codersdk.OIDCClaimsResponse": {
"type": "object",
"properties": {
"claims": {
"description": "Claims are the merged claims from the OIDC provider. These\nare the union of the ID token claims and the userinfo claims,\nwhere userinfo claims take precedence on conflict.",
"type": "object",
"additionalProperties": true
}
}
},
"codersdk.OIDCConfig": {
"type": "object",
"properties": {
+28 -125
View File
@@ -51,13 +51,10 @@ const (
// heartbeat updates while a chat is being processed.
DefaultChatHeartbeatInterval = 30 * time.Second
maxChatSteps = 1200
// maxStreamBufferSize caps the number of message_part events buffered
// per chat during a single LLM step. When exceeded the oldest event is
// evicted so memory stays bounded.
// maxStreamBufferSize caps the number of events buffered
// per chat during a single LLM step. When exceeded the
// oldest event is evicted so memory stays bounded.
maxStreamBufferSize = 10000
// maxDurableMessageCacheSize caps the number of recent durable message
// events cached per chat for same-replica stream catch-up.
maxDurableMessageCacheSize = 256
// staleRecoveryIntervalDivisor determines how often the stale
// recovery loop runs relative to the stale threshold. A value
@@ -320,12 +317,10 @@ type SubscribeFnParams struct {
}
type chatStreamState struct {
mu sync.Mutex
buffer []codersdk.ChatStreamEvent
buffering bool
durableMessages []codersdk.ChatStreamEvent
durableEvictedBefore int64 // highest message ID evicted from durable cache
subscribers map[uuid.UUID]chan codersdk.ChatStreamEvent
mu sync.Mutex
buffer []codersdk.ChatStreamEvent
buffering bool
subscribers map[uuid.UUID]chan codersdk.ChatStreamEvent
}
// MaxQueueSize is the maximum number of queued user messages per chat.
@@ -379,7 +374,6 @@ type CreateOptions struct {
ChatMode database.NullChatMode
SystemPrompt string
InitialUserContent []codersdk.ChatMessagePart
MCPServerIDs []uuid.UUID
}
// SendMessageBusyBehavior controls what happens when a chat is already active.
@@ -402,7 +396,6 @@ type SendMessageOptions struct {
Content []codersdk.ChatMessagePart
ModelConfigID *uuid.UUID
BusyBehavior SendMessageBusyBehavior
MCPServerIDs *[]uuid.UUID
}
// SendMessageResult contains the outcome of user message processing.
@@ -452,12 +445,6 @@ func (p *Server) CreateChat(ctx context.Context, opts CreateOptions) (database.C
if len(opts.InitialUserContent) == 0 {
return database.Chat{}, xerrors.New("initial user content is required")
}
// Ensure MCPServerIDs is non-nil so pq.Array produces '{}'
// instead of SQL NULL, which violates the NOT NULL column
// constraint.
if opts.MCPServerIDs == nil {
opts.MCPServerIDs = []uuid.UUID{}
}
var chat database.Chat
txErr := p.db.InTx(func(tx database.Store) error {
@@ -473,7 +460,6 @@ func (p *Server) CreateChat(ctx context.Context, opts CreateOptions) (database.C
LastModelConfigID: opts.ModelConfigID,
Title: opts.Title,
Mode: opts.ChatMode,
MCPServerIDs: opts.MCPServerIDs,
})
if err != nil {
return xerrors.Errorf("insert chat: %w", err)
@@ -605,17 +591,6 @@ func (p *Server) SendMessage(
modelConfigID = *opts.ModelConfigID
}
// Update MCP server IDs on the chat when explicitly provided.
if opts.MCPServerIDs != nil {
lockedChat, err = tx.UpdateChatMCPServerIDs(ctx, database.UpdateChatMCPServerIDsParams{
ID: opts.ChatID,
MCPServerIDs: *opts.MCPServerIDs,
})
if err != nil {
return xerrors.Errorf("update chat mcp server ids: %w", err)
}
}
existingQueued, err := tx.GetChatQueuedMessages(ctx, opts.ChatID)
if err != nil {
return xerrors.Errorf("get queued messages: %w", err)
@@ -1400,19 +1375,11 @@ func (p *Server) start(ctx context.Context) {
// to handle chats orphaned by crashed or redeployed workers.
p.recoverStaleChats(ctx)
acquireTicker := p.clock.NewTicker(
p.pendingChatAcquireInterval,
"chatd",
"acquire",
)
acquireTicker := time.NewTicker(p.pendingChatAcquireInterval)
defer acquireTicker.Stop()
staleRecoveryInterval := p.inFlightChatStaleAfter / staleRecoveryIntervalDivisor
staleTicker := p.clock.NewTicker(
staleRecoveryInterval,
"chatd",
"stale-recovery",
)
staleTicker := time.NewTicker(staleRecoveryInterval)
defer staleTicker.Stop()
for {
@@ -1527,48 +1494,6 @@ func (p *Server) publishToStream(chatID uuid.UUID, event codersdk.ChatStreamEven
state.mu.Unlock()
}
// cacheDurableMessage stores a recently persisted message event in the
// per-chat stream state so that same-replica subscribers can catch up
// from memory instead of the database. The afterMessageID is the
// message ID that precedes this message (i.e. message.ID - 1).
func (p *Server) cacheDurableMessage(chatID uuid.UUID, event codersdk.ChatStreamEvent) {
state := p.getOrCreateStreamState(chatID)
state.mu.Lock()
defer state.mu.Unlock()
if len(state.durableMessages) >= maxDurableMessageCacheSize {
if evicted := state.durableMessages[0]; evicted.Message != nil {
state.durableEvictedBefore = evicted.Message.ID
}
state.durableMessages = state.durableMessages[1:]
}
state.durableMessages = append(state.durableMessages, event)
}
// getCachedDurableMessages returns cached durable messages with IDs
// greater than afterID. Returns nil when the cache has no relevant
// entries.
func (p *Server) getCachedDurableMessages(
chatID uuid.UUID,
afterID int64,
) []codersdk.ChatStreamEvent {
state := p.getOrCreateStreamState(chatID)
state.mu.Lock()
defer state.mu.Unlock()
if afterID < state.durableEvictedBefore {
return nil
}
var result []codersdk.ChatStreamEvent
for _, event := range state.durableMessages {
if event.Message != nil && event.Message.ID > afterID {
result = append(result, event)
}
}
return result
}
func (p *Server) subscribeToStream(chatID uuid.UUID) (
[]codersdk.ChatStreamEvent,
<-chan codersdk.ChatStreamEvent,
@@ -1640,8 +1565,7 @@ func (p *Server) Subscribe(
ctx = context.Background()
}
// Subscribe to the local stream for message_parts and same-replica
// persisted messages.
// Subscribe to local stream for message_parts (ephemeral).
localSnapshot, localParts, localCancel := p.subscribeToStream(chatID)
// Merge all event sources.
@@ -1791,9 +1715,10 @@ func (p *Server) Subscribe(
initialSnapshot = append([]codersdk.ChatStreamEvent{statusEvent}, initialSnapshot...)
}
// Track the highest durable message ID delivered to this subscriber,
// whether it came from the initial DB snapshot, the same-replica local
// stream, or a later DB/cache catch-up.
// Track the last message ID we've seen for DB queries.
// Initialize from afterMessageID so that when the caller passes
// afterMessageID > 0 but no new messages exist yet, the first
// pubsub catch-up doesn't re-fetch already-seen messages.
lastMessageID := afterMessageID
if len(messages) > 0 {
lastMessageID = messages[len(messages)-1].ID
@@ -1854,32 +1779,21 @@ func (p *Server) Subscribe(
return
case notify := <-notifications:
if notify.AfterMessageID > 0 || notify.FullRefresh {
afterID := lastMessageID
if notify.FullRefresh {
lastMessageID = 0
afterID = 0
}
cached := p.getCachedDurableMessages(chatID, lastMessageID)
if !notify.FullRefresh && len(cached) > 0 {
for _, event := range cached {
select {
case <-mergedCtx.Done():
return
case mergedEvents <- event:
}
lastMessageID = event.Message.ID
}
} else if newMessages, msgErr := p.db.GetChatMessagesByChatID(mergedCtx, database.GetChatMessagesByChatIDParams{
newMessages, msgErr := p.db.GetChatMessagesByChatID(mergedCtx, database.GetChatMessagesByChatIDParams{
ChatID: chatID,
AfterID: lastMessageID,
}); msgErr != nil {
AfterID: afterID,
})
if msgErr != nil {
p.logger.Warn(mergedCtx, "failed to get chat messages after pubsub notification",
slog.F("chat_id", chatID),
slog.Error(msgErr),
)
} else {
for _, msg := range newMessages {
if msg.ID <= lastMessageID {
continue
}
sdkMsg := db2sdk.ChatMessage(msg)
select {
case <-mergedCtx.Done():
@@ -1965,7 +1879,7 @@ func (p *Server) Subscribe(
}
if hasPubsub {
// Only forward message_part events from local
// (durable events come via pubsub + cache).
// (durable events come via pubsub).
if event.Type == codersdk.ChatStreamEventTypeMessagePart {
select {
case <-mergedCtx.Done():
@@ -2170,35 +2084,24 @@ func panicFailureReason(recovered any) string {
func (p *Server) publishMessage(chatID uuid.UUID, message database.ChatMessage) {
sdkMessage := db2sdk.ChatMessage(message)
event := codersdk.ChatStreamEvent{
p.publishEvent(chatID, codersdk.ChatStreamEvent{
Type: codersdk.ChatStreamEventTypeMessage,
ChatID: chatID,
Message: &sdkMessage,
}
p.cacheDurableMessage(chatID, event)
p.publishEvent(chatID, event)
})
p.publishChatStreamNotify(chatID, coderdpubsub.ChatStreamNotifyMessage{
AfterMessageID: message.ID - 1,
})
}
// publishEditedMessage is like publishMessage but uses FullRefresh
// so remote subscribers re-fetch from the beginning, ensuring the
// edit is never silently dropped. The durable cache is replaced
// with only the edited message.
// publishEditedMessage is like publishMessage but uses
// AfterMessageID=0 so remote subscribers re-fetch from the
// beginning, ensuring the edit is never silently dropped.
func (p *Server) publishEditedMessage(chatID uuid.UUID, message database.ChatMessage) {
sdkMessage := db2sdk.ChatMessage(message)
event := codersdk.ChatStreamEvent{
p.publishEvent(chatID, codersdk.ChatStreamEvent{
Type: codersdk.ChatStreamEventTypeMessage,
ChatID: chatID,
Message: &sdkMessage,
}
state := p.getOrCreateStreamState(chatID)
state.mu.Lock()
state.durableMessages = []codersdk.ChatStreamEvent{event}
state.durableEvictedBefore = 0
state.mu.Unlock()
p.publishEvent(chatID, event)
})
p.publishChatStreamNotify(chatID, coderdpubsub.ChatStreamNotifyMessage{
FullRefresh: true,
})
-231
View File
@@ -4,7 +4,6 @@ import (
"context"
"sync"
"testing"
"time"
"github.com/google/uuid"
"github.com/stretchr/testify/require"
@@ -14,8 +13,6 @@ import (
"cdr.dev/slog/v3/sloggers/slogtest"
"github.com/coder/coder/v2/coderd/database"
"github.com/coder/coder/v2/coderd/database/dbmock"
dbpubsub "github.com/coder/coder/v2/coderd/database/pubsub"
coderdpubsub "github.com/coder/coder/v2/coderd/pubsub"
"github.com/coder/coder/v2/codersdk"
"github.com/coder/coder/v2/codersdk/workspacesdk"
"github.com/coder/coder/v2/codersdk/workspacesdk/agentconnmock"
@@ -226,231 +223,3 @@ func TestTurnWorkspaceContextGetWorkspaceConnRefreshesWorkspaceAgent(t *testing.
require.Same(t, conn, gotConn)
require.Equal(t, []uuid.UUID{initialAgent.ID, refreshedAgent.ID}, dialed)
}
func TestSubscribeSkipsDatabaseCatchupForLocallyDeliveredMessage(t *testing.T) {
t.Parallel()
ctx, cancelCtx := context.WithCancel(context.Background())
defer cancelCtx()
ctrl := gomock.NewController(t)
db := dbmock.NewMockStore(ctrl)
chatID := uuid.New()
chat := database.Chat{ID: chatID, Status: database.ChatStatusPending}
initialMessage := database.ChatMessage{
ID: 1,
ChatID: chatID,
Role: database.ChatMessageRoleUser,
}
localMessage := database.ChatMessage{
ID: 2,
ChatID: chatID,
Role: database.ChatMessageRoleAssistant,
}
gomock.InOrder(
db.EXPECT().GetChatMessagesByChatID(gomock.Any(), database.GetChatMessagesByChatIDParams{
ChatID: chatID,
AfterID: 0,
}).Return([]database.ChatMessage{initialMessage}, nil),
db.EXPECT().GetChatQueuedMessages(gomock.Any(), chatID).Return(nil, nil),
db.EXPECT().GetChatByID(gomock.Any(), chatID).Return(chat, nil),
)
server := newSubscribeTestServer(t, db)
_, events, cancel, ok := server.Subscribe(ctx, chatID, nil, 0)
require.True(t, ok)
defer cancel()
server.publishMessage(chatID, localMessage)
event := requireStreamMessageEvent(t, events)
require.Equal(t, int64(2), event.Message.ID)
requireNoStreamEvent(t, events, 200*time.Millisecond)
}
func TestSubscribeUsesDurableCacheWhenLocalMessageWasNotDelivered(t *testing.T) {
t.Parallel()
ctx, cancelCtx := context.WithCancel(context.Background())
defer cancelCtx()
ctrl := gomock.NewController(t)
db := dbmock.NewMockStore(ctrl)
chatID := uuid.New()
chat := database.Chat{ID: chatID, Status: database.ChatStatusPending}
initialMessage := database.ChatMessage{
ID: 1,
ChatID: chatID,
Role: database.ChatMessageRoleUser,
}
cachedMessage := codersdk.ChatMessage{
ID: 2,
ChatID: chatID,
Role: codersdk.ChatMessageRoleAssistant,
}
gomock.InOrder(
db.EXPECT().GetChatMessagesByChatID(gomock.Any(), database.GetChatMessagesByChatIDParams{
ChatID: chatID,
AfterID: 0,
}).Return([]database.ChatMessage{initialMessage}, nil),
db.EXPECT().GetChatQueuedMessages(gomock.Any(), chatID).Return(nil, nil),
db.EXPECT().GetChatByID(gomock.Any(), chatID).Return(chat, nil),
)
server := newSubscribeTestServer(t, db)
server.cacheDurableMessage(chatID, codersdk.ChatStreamEvent{
Type: codersdk.ChatStreamEventTypeMessage,
ChatID: chatID,
Message: &cachedMessage,
})
_, events, cancel, ok := server.Subscribe(ctx, chatID, nil, 0)
require.True(t, ok)
defer cancel()
server.publishChatStreamNotify(chatID, coderdpubsub.ChatStreamNotifyMessage{
AfterMessageID: 1,
})
event := requireStreamMessageEvent(t, events)
require.Equal(t, int64(2), event.Message.ID)
requireNoStreamEvent(t, events, 200*time.Millisecond)
}
func TestSubscribeQueriesDatabaseWhenDurableCacheMisses(t *testing.T) {
t.Parallel()
ctx, cancelCtx := context.WithCancel(context.Background())
defer cancelCtx()
ctrl := gomock.NewController(t)
db := dbmock.NewMockStore(ctrl)
chatID := uuid.New()
chat := database.Chat{ID: chatID, Status: database.ChatStatusPending}
initialMessage := database.ChatMessage{
ID: 1,
ChatID: chatID,
Role: database.ChatMessageRoleUser,
}
catchupMessage := database.ChatMessage{
ID: 2,
ChatID: chatID,
Role: database.ChatMessageRoleAssistant,
}
gomock.InOrder(
db.EXPECT().GetChatMessagesByChatID(gomock.Any(), database.GetChatMessagesByChatIDParams{
ChatID: chatID,
AfterID: 0,
}).Return([]database.ChatMessage{initialMessage}, nil),
db.EXPECT().GetChatQueuedMessages(gomock.Any(), chatID).Return(nil, nil),
db.EXPECT().GetChatByID(gomock.Any(), chatID).Return(chat, nil),
db.EXPECT().GetChatMessagesByChatID(gomock.Any(), database.GetChatMessagesByChatIDParams{
ChatID: chatID,
AfterID: 1,
}).Return([]database.ChatMessage{catchupMessage}, nil),
)
server := newSubscribeTestServer(t, db)
_, events, cancel, ok := server.Subscribe(ctx, chatID, nil, 0)
require.True(t, ok)
defer cancel()
server.publishChatStreamNotify(chatID, coderdpubsub.ChatStreamNotifyMessage{
AfterMessageID: 1,
})
event := requireStreamMessageEvent(t, events)
require.Equal(t, int64(2), event.Message.ID)
requireNoStreamEvent(t, events, 200*time.Millisecond)
}
func TestSubscribeFullRefreshStillUsesDatabaseCatchup(t *testing.T) {
t.Parallel()
ctx, cancelCtx := context.WithCancel(context.Background())
defer cancelCtx()
ctrl := gomock.NewController(t)
db := dbmock.NewMockStore(ctrl)
chatID := uuid.New()
chat := database.Chat{ID: chatID, Status: database.ChatStatusPending}
initialMessage := database.ChatMessage{
ID: 1,
ChatID: chatID,
Role: database.ChatMessageRoleUser,
}
editedMessage := database.ChatMessage{
ID: 1,
ChatID: chatID,
Role: database.ChatMessageRoleUser,
}
gomock.InOrder(
db.EXPECT().GetChatMessagesByChatID(gomock.Any(), database.GetChatMessagesByChatIDParams{
ChatID: chatID,
AfterID: 0,
}).Return([]database.ChatMessage{initialMessage}, nil),
db.EXPECT().GetChatQueuedMessages(gomock.Any(), chatID).Return(nil, nil),
db.EXPECT().GetChatByID(gomock.Any(), chatID).Return(chat, nil),
db.EXPECT().GetChatMessagesByChatID(gomock.Any(), database.GetChatMessagesByChatIDParams{
ChatID: chatID,
AfterID: 0,
}).Return([]database.ChatMessage{editedMessage}, nil),
)
server := newSubscribeTestServer(t, db)
_, events, cancel, ok := server.Subscribe(ctx, chatID, nil, 0)
require.True(t, ok)
defer cancel()
server.publishEditedMessage(chatID, editedMessage)
event := requireStreamMessageEvent(t, events)
require.Equal(t, int64(1), event.Message.ID)
requireNoStreamEvent(t, events, 200*time.Millisecond)
}
func newSubscribeTestServer(t *testing.T, db database.Store) *Server {
t.Helper()
return &Server{
db: db,
logger: slogtest.Make(t, &slogtest.Options{IgnoreErrors: true}),
pubsub: dbpubsub.NewInMemory(),
}
}
func requireStreamMessageEvent(t *testing.T, events <-chan codersdk.ChatStreamEvent) codersdk.ChatStreamEvent {
t.Helper()
select {
case event, ok := <-events:
require.True(t, ok, "chat stream closed before delivering an event")
require.Equal(t, codersdk.ChatStreamEventTypeMessage, event.Type)
require.NotNil(t, event.Message)
return event
case <-time.After(time.Second):
t.Fatal("timed out waiting for chat stream message event")
return codersdk.ChatStreamEvent{}
}
}
func requireNoStreamEvent(t *testing.T, events <-chan codersdk.ChatStreamEvent, wait time.Duration) {
t.Helper()
select {
case event, ok := <-events:
if !ok {
t.Fatal("chat stream closed unexpectedly")
}
t.Fatalf("unexpected chat stream event: %+v", event)
case <-time.After(wait):
}
}
+33 -142
View File
@@ -41,7 +41,6 @@ import (
"github.com/coder/coder/v2/provisioner/echo"
proto "github.com/coder/coder/v2/provisionersdk/proto"
"github.com/coder/coder/v2/testutil"
"github.com/coder/quartz"
)
func TestInterruptChatBroadcastsStatusAcrossInstances(t *testing.T) {
@@ -879,8 +878,6 @@ func TestPromoteQueuedAllowsAlreadyQueuedMessageWhenUsageLimitReached(t *testing
func TestInterruptAutoPromotionIgnoresLaterUsageLimitIncrease(t *testing.T) {
t.Parallel()
const acquireInterval = 10 * time.Millisecond
db, ps := dbtestutil.NewDB(t)
ctx := testutil.Context(t, testutil.WaitLong)
@@ -891,10 +888,6 @@ func TestInterruptAutoPromotionIgnoresLaterUsageLimitIncrease(t *testing.T) {
})
require.NoError(t, err)
clock := quartz.NewMock(t)
acquireTrap := clock.Trap().NewTicker("chatd", "acquire")
defer acquireTrap.Close()
streamStarted := make(chan struct{})
interrupted := make(chan struct{})
allowFinish := make(chan struct{})
@@ -928,12 +921,18 @@ func TestInterruptAutoPromotionIgnoresLaterUsageLimitIncrease(t *testing.T) {
)
})
server := newActiveTestServer(t, db, ps, func(cfg *chatd.Config) {
cfg.Clock = clock
cfg.PendingChatAcquireInterval = acquireInterval
cfg.InFlightChatStaleAfter = testutil.WaitSuperLong
logger := slogtest.Make(t, &slogtest.Options{IgnoreErrors: true})
server := chatd.New(chatd.Config{
Logger: logger,
Database: db,
ReplicaID: uuid.New(),
Pubsub: ps,
PendingChatAcquireInterval: 10 * time.Millisecond,
InFlightChatStaleAfter: testutil.WaitSuperLong,
})
t.Cleanup(func() {
require.NoError(t, server.Close())
})
acquireTrap.MustWait(ctx).MustRelease(ctx)
user, model := seedChatDependencies(ctx, t, db)
setOpenAIProviderBaseURL(ctx, t, db, openAIURL)
@@ -946,7 +945,13 @@ func TestInterruptAutoPromotionIgnoresLaterUsageLimitIncrease(t *testing.T) {
})
require.NoError(t, err)
clock.Advance(acquireInterval).MustWait(ctx)
require.Eventually(t, func() bool {
fromDB, dbErr := db.GetChatByID(ctx, chat.ID)
if dbErr != nil {
return false
}
return fromDB.Status == database.ChatStatusRunning && fromDB.WorkerID.Valid
}, testutil.WaitMedium, testutil.IntervalFast)
require.Eventually(t, func() bool {
select {
@@ -966,6 +971,19 @@ func TestInterruptAutoPromotionIgnoresLaterUsageLimitIncrease(t *testing.T) {
require.True(t, queuedResult.Queued)
require.NotNil(t, queuedResult.QueuedMessage)
// Send "later queued" immediately after "queued" while the first
// message is still in chat_queued_messages. The existing backlog
// (len(existingQueued) > 0) guarantees this is queued regardless
// of chat status, avoiding a race where the auto-promoted "queued"
// message finishes processing before we can send this.
laterQueuedResult, err := server.SendMessage(ctx, chatd.SendMessageOptions{
ChatID: chat.ID,
Content: []codersdk.ChatMessagePart{codersdk.ChatMessageText("later queued")},
})
require.NoError(t, err)
require.True(t, laterQueuedResult.Queued)
require.NotNil(t, laterQueuedResult.QueuedMessage)
require.Eventually(t, func() bool {
select {
case <-interrupted:
@@ -975,32 +993,6 @@ func TestInterruptAutoPromotionIgnoresLaterUsageLimitIncrease(t *testing.T) {
}
}, testutil.WaitMedium, testutil.IntervalFast)
close(allowFinish)
require.Eventually(t, func() bool {
queued, dbErr := db.GetChatQueuedMessages(ctx, chat.ID)
if dbErr != nil || len(queued) != 0 {
return false
}
fromDB, dbErr := db.GetChatByID(ctx, chat.ID)
if dbErr != nil {
return false
}
return fromDB.Status == database.ChatStatusPending && !fromDB.WorkerID.Valid
}, testutil.WaitMedium, testutil.IntervalFast)
// Keep the acquire loop frozen here so "queued" stays pending.
// That makes the later send queue because the chat is still busy,
// rather than because the scheduler happened to be slow.
laterQueuedResult, err := server.SendMessage(ctx, chatd.SendMessageOptions{
ChatID: chat.ID,
Content: []codersdk.ChatMessagePart{codersdk.ChatMessageText("later queued")},
})
require.NoError(t, err)
require.True(t, laterQueuedResult.Queued)
require.NotNil(t, laterQueuedResult.QueuedMessage)
spendChat, err := db.InsertChat(ctx, database.InsertChatParams{
OwnerID: user.ID,
WorkspaceID: uuid.NullUUID{},
@@ -1038,25 +1030,7 @@ func TestInterruptAutoPromotionIgnoresLaterUsageLimitIncrease(t *testing.T) {
})
require.NoError(t, err)
clock.Advance(acquireInterval).MustWait(ctx)
require.Eventually(t, func() bool {
return requestCount.Load() >= 2
}, testutil.WaitMedium, testutil.IntervalFast)
require.Eventually(t, func() bool {
queued, dbErr := db.GetChatQueuedMessages(ctx, chat.ID)
if dbErr != nil || len(queued) != 0 {
return false
}
fromDB, dbErr := db.GetChatByID(ctx, chat.ID)
if dbErr != nil {
return false
}
return fromDB.Status == database.ChatStatusPending && !fromDB.WorkerID.Valid
}, testutil.WaitMedium, testutil.IntervalFast)
clock.Advance(acquireInterval).MustWait(ctx)
close(allowFinish)
require.Eventually(t, func() bool {
queued, dbErr := db.GetChatQueuedMessages(ctx, chat.ID)
@@ -1091,10 +1065,7 @@ func TestInterruptAutoPromotionIgnoresLaterUsageLimitIncrease(t *testing.T) {
if len(userTexts) != 3 {
return false
}
return requestCount.Load() >= 3 &&
userTexts[0] == "hello" &&
userTexts[1] == "queued" &&
userTexts[2] == "later queued"
return userTexts[0] == "hello" && userTexts[1] == "queued" && userTexts[2] == "later queued"
}, testutil.WaitLong, testutil.IntervalFast)
}
@@ -3372,83 +3343,3 @@ func TestInterruptChatPersistsPartialResponse(t *testing.T) {
require.Contains(t, foundText, "hello world",
"partial assistant response should contain the streamed text")
}
func TestProcessChatPanicRecovery(t *testing.T) {
t.Parallel()
db, ps := dbtestutil.NewDB(t)
// Wrap the database so we can trigger a panic on the main
// goroutine of processChat. The chatloop's executeTools has
// its own recover, so panicking inside a tool goroutine won't
// reach the processChat-level recovery. Instead, we panic
// during PersistStep's InTx call, which runs synchronously on
// the processChat goroutine.
panicWrapper := &panicOnInTxDB{Store: db}
openAIURL := chattest.NewOpenAI(t, func(req *chattest.OpenAIRequest) chattest.OpenAIResponse {
if !req.Stream {
return chattest.OpenAINonStreamingResponse("Panic recovery test")
}
return chattest.OpenAIStreamingResponse(
chattest.OpenAITextChunks("hello")...,
)
})
ctx := testutil.Context(t, testutil.WaitLong)
user, model := seedChatDependenciesWithProvider(ctx, t, db, "openai-compat", openAIURL)
// Pass the panic wrapper to the server, but use the real
// database for seeding so those operations don't panic.
server := newActiveTestServer(t, panicWrapper, ps)
chat, err := server.CreateChat(ctx, chatd.CreateOptions{
OwnerID: user.ID,
Title: "panic-recovery",
ModelConfigID: model.ID,
InitialUserContent: []codersdk.ChatMessagePart{
codersdk.ChatMessageText("hello"),
},
})
require.NoError(t, err)
// Enable the panic now that CreateChat's InTx has completed.
// The next InTx call is PersistStep inside the chatloop,
// running synchronously on the processChat goroutine.
panicWrapper.enablePanic()
// Wait for the panic to be recovered and the chat to
// transition to error status.
var chatResult database.Chat
require.Eventually(t, func() bool {
got, getErr := db.GetChatByID(ctx, chat.ID)
if getErr != nil {
return false
}
chatResult = got
return got.Status == database.ChatStatusError
}, testutil.WaitLong, testutil.IntervalFast)
require.True(t, chatResult.LastError.Valid, "LastError should be set")
require.Contains(t, chatResult.LastError.String, "chat processing panicked")
require.Contains(t, chatResult.LastError.String, "intentional test panic")
}
// panicOnInTxDB wraps a database.Store and panics on the first InTx
// call after enablePanic is called. Subsequent calls pass through
// so the processChat cleanup defer can update the chat status.
type panicOnInTxDB struct {
database.Store
active atomic.Bool
panicked atomic.Bool
}
func (d *panicOnInTxDB) enablePanic() { d.active.Store(true) }
func (d *panicOnInTxDB) InTx(f func(database.Store) error, opts *database.TxOptions) error {
if d.active.Load() && !d.panicked.Load() {
d.panicked.Store(true)
panic("intentional test panic")
}
return d.Store.InTx(f, opts)
}
+2 -2
View File
@@ -127,7 +127,7 @@ func (r stepResult) toResponseMessages() []fantasy.Message {
switch c.GetType() {
case fantasy.ContentTypeText:
text, ok := fantasy.AsContentType[fantasy.TextContent](c)
if !ok || strings.TrimSpace(text.Text) == "" {
if !ok {
continue
}
assistantParts = append(assistantParts, fantasy.TextPart{
@@ -136,7 +136,7 @@ func (r stepResult) toResponseMessages() []fantasy.Message {
})
case fantasy.ContentTypeReasoning:
reasoning, ok := fantasy.AsContentType[fantasy.ReasoningContent](c)
if !ok || strings.TrimSpace(reasoning.Text) == "" {
if !ok {
continue
}
assistantParts = append(assistantParts, fantasy.ReasoningPart{
-78
View File
@@ -578,84 +578,6 @@ func TestToResponseMessages_ProviderExecutedToolResultInAssistantMessage(t *test
assert.False(t, localTR.ProviderExecuted)
}
func TestToResponseMessages_FiltersEmptyTextAndReasoningParts(t *testing.T) {
t.Parallel()
sr := stepResult{
content: []fantasy.Content{
// Empty text — should be filtered.
fantasy.TextContent{Text: ""},
// Whitespace-only text — should be filtered.
fantasy.TextContent{Text: " \t\n"},
// Empty reasoning — should be filtered.
fantasy.ReasoningContent{Text: ""},
// Whitespace-only reasoning — should be filtered.
fantasy.ReasoningContent{Text: " \n"},
// Non-empty text — should pass through.
fantasy.TextContent{Text: "hello world"},
// Leading/trailing whitespace with content — kept
// with the original value (not trimmed).
fantasy.TextContent{Text: " hello "},
// Non-empty reasoning — should pass through.
fantasy.ReasoningContent{Text: "let me think"},
// Tool call — should be unaffected by filtering.
fantasy.ToolCallContent{
ToolCallID: "tc-1",
ToolName: "read_file",
Input: `{"path":"main.go"}`,
},
// Local tool result — should be unaffected by filtering.
fantasy.ToolResultContent{
ToolCallID: "tc-1",
ToolName: "read_file",
Result: fantasy.ToolResultOutputContentText{Text: "file contents"},
},
},
}
msgs := sr.toResponseMessages()
require.Len(t, msgs, 2, "expected assistant + tool messages")
// First message: assistant role with non-empty text, reasoning,
// and the tool call. The four empty/whitespace-only parts must
// have been dropped.
assistantMsg := msgs[0]
assert.Equal(t, fantasy.MessageRoleAssistant, assistantMsg.Role)
require.Len(t, assistantMsg.Content, 4,
"assistant message should have 2x TextPart, ReasoningPart, and ToolCallPart")
// Part 0: non-empty text.
textPart, ok := fantasy.AsMessagePart[fantasy.TextPart](assistantMsg.Content[0])
require.True(t, ok, "part 0 should be TextPart")
assert.Equal(t, "hello world", textPart.Text)
// Part 1: padded text — original whitespace preserved.
paddedPart, ok := fantasy.AsMessagePart[fantasy.TextPart](assistantMsg.Content[1])
require.True(t, ok, "part 1 should be TextPart")
assert.Equal(t, " hello ", paddedPart.Text)
// Part 2: non-empty reasoning.
reasoningPart, ok := fantasy.AsMessagePart[fantasy.ReasoningPart](assistantMsg.Content[2])
require.True(t, ok, "part 2 should be ReasoningPart")
assert.Equal(t, "let me think", reasoningPart.Text)
// Part 3: tool call (unaffected by text/reasoning filtering).
toolCallPart, ok := fantasy.AsMessagePart[fantasy.ToolCallPart](assistantMsg.Content[3])
require.True(t, ok, "part 3 should be ToolCallPart")
assert.Equal(t, "tc-1", toolCallPart.ToolCallID)
assert.Equal(t, "read_file", toolCallPart.ToolName)
// Second message: tool role with the local tool result.
toolMsg := msgs[1]
assert.Equal(t, fantasy.MessageRoleTool, toolMsg.Role)
require.Len(t, toolMsg.Content, 1,
"tool message should have only the local ToolResultPart")
toolResultPart, ok := fantasy.AsMessagePart[fantasy.ToolResultPart](toolMsg.Content[0])
require.True(t, ok, "tool part should be ToolResultPart")
assert.Equal(t, "tc-1", toolResultPart.ToolCallID)
}
func hasAnthropicEphemeralCacheControl(message fantasy.Message) bool {
if len(message.ProviderOptions) == 0 {
return false
+2 -25
View File
@@ -139,13 +139,9 @@ func ConvertMessagesWithFiles(
},
})
case codersdk.ChatMessageRoleUser:
userParts := partsToMessageParts(logger, pm.parts, resolved)
if len(userParts) == 0 {
continue
}
prompt = append(prompt, fantasy.Message{
Role: fantasy.MessageRoleUser,
Content: userParts,
Content: partsToMessageParts(logger, pm.parts, resolved),
})
case codersdk.ChatMessageRoleAssistant:
fantasyParts := normalizeAssistantToolCallInputs(
@@ -157,9 +153,6 @@ func ConvertMessagesWithFiles(
}
toolNameByCallID[sanitizeToolCallID(toolCall.ToolCallID)] = toolCall.ToolName
}
if len(fantasyParts) == 0 {
continue
}
prompt = append(prompt, fantasy.Message{
Role: fantasy.MessageRoleAssistant,
Content: fantasyParts,
@@ -173,13 +166,9 @@ func ConvertMessagesWithFiles(
}
}
}
toolParts := partsToMessageParts(logger, pm.parts, resolved)
if len(toolParts) == 0 {
continue
}
prompt = append(prompt, fantasy.Message{
Role: fantasy.MessageRoleTool,
Content: toolParts,
Content: partsToMessageParts(logger, pm.parts, resolved),
})
}
}
@@ -1186,23 +1175,11 @@ func partsToMessageParts(
for _, part := range parts {
switch part.Type {
case codersdk.ChatMessagePartTypeText:
// Anthropic rejects empty text content blocks with
// "text content blocks must be non-empty". Empty parts
// can arise when a stream sends TextStart/TextEnd with
// no delta in between. We filter them here rather than
// at persistence time to preserve the raw record.
if strings.TrimSpace(part.Text) == "" {
continue
}
result = append(result, fantasy.TextPart{
Text: part.Text,
ProviderOptions: providerMetadataToOptions(logger, part.ProviderMetadata),
})
case codersdk.ChatMessagePartTypeReasoning:
// Same guard as text parts above.
if strings.TrimSpace(part.Text) == "" {
continue
}
result = append(result, fantasy.ReasoningPart{
Text: part.Text,
ProviderOptions: providerMetadataToOptions(logger, part.ProviderMetadata),
-122
View File
@@ -1646,125 +1646,3 @@ func TestNulEscapeRoundTrip(t *testing.T) {
require.Equal(t, "has\x00nul", decoded[1].Text)
})
}
func TestConvertMessagesWithFiles_FiltersEmptyTextAndReasoningParts(t *testing.T) {
t.Parallel()
// Helper to build a DB message from SDK parts.
makeMsg := func(t *testing.T, role database.ChatMessageRole, parts []codersdk.ChatMessagePart) database.ChatMessage {
t.Helper()
encoded, err := chatprompt.MarshalParts(parts)
require.NoError(t, err)
return database.ChatMessage{
Role: role,
Visibility: database.ChatMessageVisibilityBoth,
Content: encoded,
ContentVersion: chatprompt.CurrentContentVersion,
}
}
t.Run("UserRole", func(t *testing.T) {
t.Parallel()
parts := []codersdk.ChatMessagePart{
codersdk.ChatMessageText(""), // empty — filtered
codersdk.ChatMessageText(" \t\n "), // whitespace — filtered
codersdk.ChatMessageReasoning(""), // empty — filtered
codersdk.ChatMessageReasoning(" \n"), // whitespace — filtered
codersdk.ChatMessageText("hello"), // kept
codersdk.ChatMessageText(" hello "), // kept with original whitespace
codersdk.ChatMessageReasoning("thinking deeply"), // kept
codersdk.ChatMessageToolCall("call-1", "my_tool", json.RawMessage(`{"x":1}`)),
codersdk.ChatMessageToolResult("call-1", "my_tool", json.RawMessage(`{"ok":true}`), false),
}
prompt, err := chatprompt.ConvertMessagesWithFiles(
context.Background(),
[]database.ChatMessage{makeMsg(t, database.ChatMessageRoleUser, parts)},
nil,
slogtest.Make(t, nil),
)
require.NoError(t, err)
require.Len(t, prompt, 1)
resultParts := prompt[0].Content
require.Len(t, resultParts, 5, "expected 5 parts after filtering empty text/reasoning")
textPart, ok := fantasy.AsMessagePart[fantasy.TextPart](resultParts[0])
require.True(t, ok, "expected TextPart at index 0")
require.Equal(t, "hello", textPart.Text)
// Leading/trailing whitespace is preserved — only
// all-whitespace parts are dropped.
paddedPart, ok := fantasy.AsMessagePart[fantasy.TextPart](resultParts[1])
require.True(t, ok, "expected TextPart at index 1")
require.Equal(t, " hello ", paddedPart.Text)
reasoningPart, ok := fantasy.AsMessagePart[fantasy.ReasoningPart](resultParts[2])
require.True(t, ok, "expected ReasoningPart at index 2")
require.Equal(t, "thinking deeply", reasoningPart.Text)
toolCallPart, ok := fantasy.AsMessagePart[fantasy.ToolCallPart](resultParts[3])
require.True(t, ok, "expected ToolCallPart at index 3")
require.Equal(t, "call-1", toolCallPart.ToolCallID)
toolResultPart, ok := fantasy.AsMessagePart[fantasy.ToolResultPart](resultParts[4])
require.True(t, ok, "expected ToolResultPart at index 4")
require.Equal(t, "call-1", toolResultPart.ToolCallID)
})
t.Run("AssistantRole", func(t *testing.T) {
t.Parallel()
parts := []codersdk.ChatMessagePart{
codersdk.ChatMessageText(""), // empty — filtered
codersdk.ChatMessageText(" "), // whitespace — filtered
codersdk.ChatMessageReasoning(""), // empty — filtered
codersdk.ChatMessageText(" reply "), // kept with whitespace
codersdk.ChatMessageToolCall("tc-1", "read_file", json.RawMessage(`{"path":"x"}`)),
}
prompt, err := chatprompt.ConvertMessagesWithFiles(
context.Background(),
[]database.ChatMessage{makeMsg(t, database.ChatMessageRoleAssistant, parts)},
nil,
slogtest.Make(t, nil),
)
require.NoError(t, err)
// 2 messages: assistant + synthetic tool result injected
// by injectMissingToolResults for the unmatched tool call.
require.Len(t, prompt, 2)
resultParts := prompt[0].Content
require.Len(t, resultParts, 2, "expected text + tool-call after filtering")
textPart, ok := fantasy.AsMessagePart[fantasy.TextPart](resultParts[0])
require.True(t, ok, "expected TextPart")
require.Equal(t, " reply ", textPart.Text)
tcPart, ok := fantasy.AsMessagePart[fantasy.ToolCallPart](resultParts[1])
require.True(t, ok, "expected ToolCallPart")
require.Equal(t, "tc-1", tcPart.ToolCallID)
})
t.Run("AllEmptyDropsMessage", func(t *testing.T) {
t.Parallel()
// When every part is filtered, the message itself should
// be dropped rather than appending an empty-content message.
parts := []codersdk.ChatMessagePart{
codersdk.ChatMessageText(""),
codersdk.ChatMessageText(" "),
codersdk.ChatMessageReasoning(""),
}
prompt, err := chatprompt.ConvertMessagesWithFiles(
context.Background(),
[]database.ChatMessage{makeMsg(t, database.ChatMessageRoleAssistant, parts)},
nil,
slogtest.Make(t, nil),
)
require.NoError(t, err)
require.Empty(t, prompt, "all-empty message should be dropped entirely")
})
}
+1 -9
View File
@@ -1083,7 +1083,6 @@ func openAIProviderOptionsFromChatConfig(
SafetyIdentifier: normalizedStringPointer(options.SafetyIdentifier),
ServiceTier: openAIServiceTierFromChat(options.ServiceTier),
StrictJSONSchema: options.StrictJSONSchema,
Store: boolPtrOrDefault(options.Store, true),
TextVerbosity: OpenAITextVerbosityFromChat(options.TextVerbosity),
User: normalizedStringPointer(options.User),
}
@@ -1100,7 +1099,7 @@ func openAIProviderOptionsFromChatConfig(
MaxCompletionTokens: options.MaxCompletionTokens,
TextVerbosity: normalizedStringPointer(options.TextVerbosity),
Prediction: options.Prediction,
Store: boolPtrOrDefault(options.Store, true),
Store: options.Store,
Metadata: options.Metadata,
PromptCacheKey: normalizedStringPointer(options.PromptCacheKey),
SafetyIdentifier: normalizedStringPointer(options.SafetyIdentifier),
@@ -1281,13 +1280,6 @@ func useOpenAIResponsesOptions(model fantasy.LanguageModel) bool {
}
}
func boolPtrOrDefault(value *bool, def bool) *bool {
if value != nil {
return value
}
return &def
}
func normalizedStringPointer(value *string) *string {
if value == nil {
return nil
-150
View File
@@ -272,156 +272,6 @@ func logMessages(t *testing.T, msgs []codersdk.ChatMessage) {
}
}
// TestOpenAIReasoningRoundTrip is an integration test that verifies
// reasoning items from OpenAI's Responses API survive the full
// persist → reconstruct → re-send cycle when Store: true. It sends
// a query to a reasoning model, waits for completion, then sends a
// follow-up message. If reasoning items are sent back without their
// required following output item, the API rejects the second request:
//
// Item 'rs_xxx' of type 'reasoning' was provided without its
// required following item.
//
// The test requires OPENAI_API_KEY to be set.
func TestOpenAIReasoningRoundTrip(t *testing.T) {
t.Parallel()
apiKey := os.Getenv("OPENAI_API_KEY")
if apiKey == "" {
t.Skip("OPENAI_API_KEY not set; skipping OpenAI integration test")
}
baseURL := os.Getenv("OPENAI_BASE_URL")
ctx := testutil.Context(t, testutil.WaitSuperLong)
// Stand up a full coderd with the agents experiment.
deploymentValues := coderdtest.DeploymentValues(t)
deploymentValues.Experiments = []string{string(codersdk.ExperimentAgents)}
client := coderdtest.New(t, &coderdtest.Options{
DeploymentValues: deploymentValues,
})
_ = coderdtest.CreateFirstUser(t, client)
// Configure an OpenAI provider with the real API key.
_, err := client.CreateChatProvider(ctx, codersdk.CreateChatProviderConfigRequest{
Provider: "openai",
APIKey: apiKey,
BaseURL: baseURL,
})
require.NoError(t, err)
// Create a model config for a reasoning model with Store: true
// (the default). Using o4-mini because it always produces
// reasoning items.
contextLimit := int64(200000)
isDefault := true
reasoningSummary := "auto"
_, err = client.CreateChatModelConfig(ctx, codersdk.CreateChatModelConfigRequest{
Provider: "openai",
Model: "o4-mini",
ContextLimit: &contextLimit,
IsDefault: &isDefault,
ModelConfig: &codersdk.ChatModelCallConfig{
ProviderOptions: &codersdk.ChatModelProviderOptions{
OpenAI: &codersdk.ChatModelOpenAIProviderOptions{
Store: ptr.Ref(true),
ReasoningSummary: &reasoningSummary,
},
},
},
})
require.NoError(t, err)
// --- Step 1: Send a message that triggers reasoning ---
t.Log("Creating chat with reasoning query...")
chat, err := client.CreateChat(ctx, codersdk.CreateChatRequest{
Content: []codersdk.ChatInputPart{
{
Type: codersdk.ChatInputPartTypeText,
Text: "What is 2+2? Be brief.",
},
},
})
require.NoError(t, err)
t.Logf("Chat created: %s (status=%s)", chat.ID, chat.Status)
// Stream events until the chat reaches a terminal status.
events, closer, err := client.StreamChat(ctx, chat.ID, nil)
require.NoError(t, err)
defer closer.Close()
waitForChatDone(ctx, t, events, "step 1")
// Verify the chat completed and messages were persisted.
chatData, err := client.GetChat(ctx, chat.ID)
require.NoError(t, err)
chatMsgs, err := client.GetChatMessages(ctx, chat.ID, nil)
require.NoError(t, err)
t.Logf("Chat status after step 1: %s, messages: %d",
chatData.Status, len(chatMsgs.Messages))
logMessages(t, chatMsgs.Messages)
require.Equal(t, codersdk.ChatStatusWaiting, chatData.Status,
"chat should be in waiting status after step 1")
// Verify the assistant message has reasoning content.
assistantMsg := findAssistantWithText(t, chatMsgs.Messages)
require.NotNil(t, assistantMsg,
"expected an assistant message with text content after step 1")
partTypes := partTypeSet(assistantMsg.Content)
require.Contains(t, partTypes, codersdk.ChatMessagePartTypeReasoning,
"assistant message should contain reasoning parts from o4-mini")
require.Contains(t, partTypes, codersdk.ChatMessagePartTypeText,
"assistant message should contain a text part")
// --- Step 2: Send a follow-up message ---
// This is the critical test: if reasoning items are sent back
// without their required following item, the API will reject
// the request with:
// Item 'rs_xxx' of type 'reasoning' was provided without its
// required following item.
t.Log("Sending follow-up message...")
_, err = client.CreateChatMessage(ctx, chat.ID,
codersdk.CreateChatMessageRequest{
Content: []codersdk.ChatInputPart{
{
Type: codersdk.ChatInputPartTypeText,
Text: "And what is 3+3? Be brief.",
},
},
})
require.NoError(t, err)
// Stream the follow-up response.
events2, closer2, err := client.StreamChat(ctx, chat.ID, nil)
require.NoError(t, err)
defer closer2.Close()
waitForChatDone(ctx, t, events2, "step 2")
// Verify the follow-up completed and produced content.
chatData2, err := client.GetChat(ctx, chat.ID)
require.NoError(t, err)
chatMsgs2, err := client.GetChatMessages(ctx, chat.ID, nil)
require.NoError(t, err)
t.Logf("Chat status after step 2: %s, messages: %d",
chatData2.Status, len(chatMsgs2.Messages))
logMessages(t, chatMsgs2.Messages)
require.Equal(t, codersdk.ChatStatusWaiting, chatData2.Status,
"chat should be in waiting status after step 2")
require.Greater(t, len(chatMsgs2.Messages), len(chatMsgs.Messages),
"follow-up should have added more messages")
// The last assistant message should have text.
lastAssistant := findLastAssistantWithText(t, chatMsgs2.Messages)
require.NotNil(t, lastAssistant,
"expected an assistant message with text in the follow-up")
t.Log("OpenAI reasoning round-trip test passed.")
}
// partTypeSet returns the set of part types present in a message.
func partTypeSet(parts []codersdk.ChatMessagePart) map[codersdk.ChatMessagePartType]struct{} {
set := make(map[codersdk.ChatMessagePartType]struct{}, len(parts))
-136
View File
@@ -332,139 +332,3 @@ func TestSpawnComputerUseAgent_UsesComputerUseModelNotParent(t *testing.T) {
assert.Equal(t, "anthropic", chattool.ComputerUseModelProvider)
assert.NotEmpty(t, chattool.ComputerUseModelName)
}
func TestIsSubagentDescendant(t *testing.T) {
t.Parallel()
db, ps := dbtestutil.NewDB(t)
server := newInternalTestServer(t, db, ps, chatprovider.ProviderAPIKeys{})
ctx := chatdTestContext(t)
user, model := seedInternalChatDeps(ctx, t, db)
// Build a chain: root -> child -> grandchild.
root, err := server.CreateChat(ctx, CreateOptions{
OwnerID: user.ID,
Title: "root",
ModelConfigID: model.ID,
InitialUserContent: []codersdk.ChatMessagePart{codersdk.ChatMessageText("root")},
})
require.NoError(t, err)
child, err := server.CreateChat(ctx, CreateOptions{
OwnerID: user.ID,
ParentChatID: uuid.NullUUID{
UUID: root.ID,
Valid: true,
},
RootChatID: uuid.NullUUID{
UUID: root.ID,
Valid: true,
},
Title: "child",
ModelConfigID: model.ID,
InitialUserContent: []codersdk.ChatMessagePart{codersdk.ChatMessageText("child")},
})
require.NoError(t, err)
grandchild, err := server.CreateChat(ctx, CreateOptions{
OwnerID: user.ID,
ParentChatID: uuid.NullUUID{
UUID: child.ID,
Valid: true,
},
RootChatID: uuid.NullUUID{
UUID: root.ID,
Valid: true,
},
Title: "grandchild",
ModelConfigID: model.ID,
InitialUserContent: []codersdk.ChatMessagePart{codersdk.ChatMessageText("grandchild")},
})
require.NoError(t, err)
// Build a separate, unrelated chain.
unrelated, err := server.CreateChat(ctx, CreateOptions{
OwnerID: user.ID,
Title: "unrelated-root",
ModelConfigID: model.ID,
InitialUserContent: []codersdk.ChatMessagePart{codersdk.ChatMessageText("unrelated")},
})
require.NoError(t, err)
unrelatedChild, err := server.CreateChat(ctx, CreateOptions{
OwnerID: user.ID,
ParentChatID: uuid.NullUUID{
UUID: unrelated.ID,
Valid: true,
},
RootChatID: uuid.NullUUID{
UUID: unrelated.ID,
Valid: true,
},
Title: "unrelated-child",
ModelConfigID: model.ID,
InitialUserContent: []codersdk.ChatMessagePart{codersdk.ChatMessageText("unrelated-child")},
})
require.NoError(t, err)
tests := []struct {
name string
ancestor uuid.UUID
target uuid.UUID
want bool
}{
{
name: "SameID",
ancestor: root.ID,
target: root.ID,
want: false,
},
{
name: "DirectChild",
ancestor: root.ID,
target: child.ID,
want: true,
},
{
name: "GrandChild",
ancestor: root.ID,
target: grandchild.ID,
want: true,
},
{
name: "Unrelated",
ancestor: root.ID,
target: unrelatedChild.ID,
want: false,
},
{
name: "RootChat",
ancestor: child.ID,
target: root.ID,
want: false,
},
{
name: "BrokenChain",
ancestor: root.ID,
target: uuid.New(),
want: false,
},
{
name: "NotDescendant",
ancestor: unrelated.ID,
target: child.ID,
want: false,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
t.Parallel()
ctx := chatdTestContext(t)
got, err := isSubagentDescendant(ctx, db, tt.ancestor, tt.target)
require.NoError(t, err)
assert.Equal(t, tt.want, got)
})
}
}
-72
View File
@@ -284,41 +284,6 @@ func (api *API) postChats(rw http.ResponseWriter, r *http.Request) {
return
}
// Validate MCP server IDs exist.
if len(req.MCPServerIDs) > 0 {
//nolint:gocritic // Need to validate MCP server IDs exist.
existingConfigs, err := api.Database.GetMCPServerConfigsByIDs(dbauthz.AsSystemRestricted(ctx), req.MCPServerIDs)
if err != nil {
httpapi.Write(ctx, rw, http.StatusInternalServerError, codersdk.Response{
Message: "Failed to validate MCP server IDs.",
Detail: err.Error(),
})
return
}
if len(existingConfigs) != len(req.MCPServerIDs) {
found := make(map[uuid.UUID]struct{}, len(existingConfigs))
for _, c := range existingConfigs {
found[c.ID] = struct{}{}
}
var missing []string
for _, id := range req.MCPServerIDs {
if _, ok := found[id]; !ok {
missing = append(missing, id.String())
}
}
httpapi.Write(ctx, rw, http.StatusBadRequest, codersdk.Response{
Message: "One or more MCP server IDs are invalid.",
Detail: fmt.Sprintf("Invalid IDs: %s", strings.Join(missing, ", ")),
})
return
}
}
mcpServerIDs := req.MCPServerIDs
if mcpServerIDs == nil {
mcpServerIDs = []uuid.UUID{}
}
chat, err := api.chatDaemon.CreateChat(ctx, chatd.CreateOptions{
OwnerID: apiKey.UserID,
WorkspaceID: workspaceSelection.WorkspaceID,
@@ -326,7 +291,6 @@ func (api *API) postChats(rw http.ResponseWriter, r *http.Request) {
ModelConfigID: modelConfigID,
SystemPrompt: api.resolvedChatSystemPrompt(ctx),
InitialUserContent: contentBlocks,
MCPServerIDs: mcpServerIDs,
})
if err != nil {
if maybeWriteLimitErr(ctx, rw, err) {
@@ -1492,36 +1456,6 @@ func (api *API) postChatMessages(rw http.ResponseWriter, r *http.Request) {
return
}
// Validate MCP server IDs exist.
if req.MCPServerIDs != nil && len(*req.MCPServerIDs) > 0 {
//nolint:gocritic // Need to validate MCP server IDs exist.
existingConfigs, err := api.Database.GetMCPServerConfigsByIDs(dbauthz.AsSystemRestricted(ctx), *req.MCPServerIDs)
if err != nil {
httpapi.Write(ctx, rw, http.StatusInternalServerError, codersdk.Response{
Message: "Failed to validate MCP server IDs.",
Detail: err.Error(),
})
return
}
if len(existingConfigs) != len(*req.MCPServerIDs) {
found := make(map[uuid.UUID]struct{}, len(existingConfigs))
for _, c := range existingConfigs {
found[c.ID] = struct{}{}
}
var missing []string
for _, id := range *req.MCPServerIDs {
if _, ok := found[id]; !ok {
missing = append(missing, id.String())
}
}
httpapi.Write(ctx, rw, http.StatusBadRequest, codersdk.Response{
Message: "One or more MCP server IDs are invalid.",
Detail: fmt.Sprintf("Invalid IDs: %s", strings.Join(missing, ", ")),
})
return
}
}
sendResult, sendErr := api.chatDaemon.SendMessage(
ctx,
chatd.SendMessageOptions{
@@ -1530,7 +1464,6 @@ func (api *API) postChatMessages(rw http.ResponseWriter, r *http.Request) {
Content: contentBlocks,
ModelConfigID: req.ModelConfigID,
BusyBehavior: chatd.SendMessageBusyBehaviorQueue,
MCPServerIDs: req.MCPServerIDs,
},
)
if sendErr != nil {
@@ -3046,10 +2979,6 @@ func truncateRunes(value string, maxLen int) string {
}
func convertChat(c database.Chat, diffStatus *database.ChatDiffStatus) codersdk.Chat {
mcpServerIDs := c.MCPServerIDs
if mcpServerIDs == nil {
mcpServerIDs = []uuid.UUID{}
}
chat := codersdk.Chat{
ID: c.ID,
OwnerID: c.OwnerID,
@@ -3059,7 +2988,6 @@ func convertChat(c database.Chat, diffStatus *database.ChatDiffStatus) codersdk.
Archived: c.Archived,
CreatedAt: c.CreatedAt,
UpdatedAt: c.UpdatedAt,
MCPServerIDs: mcpServerIDs,
}
if c.LastError.Valid {
chat.LastError = &c.LastError.String
+2 -23
View File
@@ -1044,12 +1044,10 @@ func New(options *Options) *API {
// OAuth2 metadata endpoint for RFC 8414 discovery
r.Route("/.well-known/oauth-authorization-server", func(r chi.Router) {
r.Use(httpmw.RequireExperimentWithDevBypass(api.Experiments, codersdk.ExperimentOAuth2))
r.Get("/*", api.oauth2AuthorizationServerMetadata())
})
// OAuth2 protected resource metadata endpoint for RFC 9728 discovery
r.Route("/.well-known/oauth-protected-resource", func(r chi.Router) {
r.Use(httpmw.RequireExperimentWithDevBypass(api.Experiments, codersdk.ExperimentOAuth2))
r.Get("/*", api.oauth2ProtectedResourceMetadata())
})
@@ -1233,27 +1231,10 @@ func New(options *Options) *API {
r.Route("/mcp", func(r chi.Router) {
r.Use(
apiKeyMiddleware,
httpmw.RequireExperimentWithDevBypass(api.Experiments, codersdk.ExperimentOAuth2, codersdk.ExperimentMCPServerHTTP),
)
// MCP server configuration endpoints.
r.Route("/servers", func(r chi.Router) {
r.Use(httpmw.RequireExperimentWithDevBypass(api.Experiments, codersdk.ExperimentAgents))
r.Get("/", api.listMCPServerConfigs)
r.Post("/", api.createMCPServerConfig)
r.Route("/{mcpServer}", func(r chi.Router) {
r.Get("/", api.getMCPServerConfig)
r.Patch("/", api.updateMCPServerConfig)
r.Delete("/", api.deleteMCPServerConfig)
// OAuth2 user flow
r.Get("/oauth2/connect", api.mcpServerOAuth2Connect)
r.Get("/oauth2/callback", api.mcpServerOAuth2Callback)
r.Delete("/oauth2/disconnect", api.mcpServerOAuth2Disconnect)
})
})
// MCP HTTP transport endpoint with mandatory authentication
r.Route("/http", func(r chi.Router) {
r.Use(httpmw.RequireExperimentWithDevBypass(api.Experiments, codersdk.ExperimentOAuth2, codersdk.ExperimentMCPServerHTTP))
r.Mount("/", api.mcpHTTPHandler())
})
r.Mount("/http", api.mcpHTTPHandler())
})
r.Route("/watch-all-workspacebuilds", func(r chi.Router) {
r.Use(
@@ -1515,8 +1496,6 @@ func New(options *Options) *API {
r.Post("/", api.postUser)
r.Get("/", api.users)
r.Post("/logout", api.postLogout)
r.Post("/me/session/token-to-cookie", api.postSessionTokenCookie)
r.Get("/oidc-claims", api.userOIDCClaims)
// These routes query information about site wide roles.
r.Route("/roles", func(r chi.Router) {
r.Get("/", api.AssignableSiteRoles)
-3
View File
@@ -20,9 +20,6 @@ const (
CheckUsersEmailNotEmpty CheckConstraint = "users_email_not_empty" // users
CheckUsersServiceAccountLoginType CheckConstraint = "users_service_account_login_type" // users
CheckUsersUsernameMinLength CheckConstraint = "users_username_min_length" // users
CheckMcpServerConfigsAuthTypeCheck CheckConstraint = "mcp_server_configs_auth_type_check" // mcp_server_configs
CheckMcpServerConfigsAvailabilityCheck CheckConstraint = "mcp_server_configs_availability_check" // mcp_server_configs
CheckMcpServerConfigsTransportCheck CheckConstraint = "mcp_server_configs_transport_check" // mcp_server_configs
CheckMaxProvisionerLogsLength CheckConstraint = "max_provisioner_logs_length" // provisioner_jobs
CheckMaxLogsLength CheckConstraint = "max_logs_length" // workspace_agents
CheckSubsystemsNotNone CheckConstraint = "subsystems_not_none" // workspace_agents
-109
View File
@@ -1691,13 +1691,6 @@ func (q *querier) CleanTailnetTunnels(ctx context.Context) error {
return q.db.CleanTailnetTunnels(ctx)
}
func (q *querier) CleanupDeletedMCPServerIDsFromChats(ctx context.Context) error {
if err := q.authorizeContext(ctx, policy.ActionUpdate, rbac.ResourceChat); err != nil {
return err
}
return q.db.CleanupDeletedMCPServerIDsFromChats(ctx)
}
func (q *querier) CountAIBridgeInterceptions(ctx context.Context, arg database.CountAIBridgeInterceptionsParams) (int64, error) {
prep, err := prepareSQLFilter(ctx, q.auth, policy.ActionRead, rbac.ResourceAibridgeInterception.Type)
if err != nil {
@@ -1927,20 +1920,6 @@ func (q *querier) DeleteLicense(ctx context.Context, id int32) (int32, error) {
return id, nil
}
func (q *querier) DeleteMCPServerConfigByID(ctx context.Context, id uuid.UUID) error {
if err := q.authorizeContext(ctx, policy.ActionUpdate, rbac.ResourceDeploymentConfig); err != nil {
return err
}
return q.db.DeleteMCPServerConfigByID(ctx, id)
}
func (q *querier) DeleteMCPServerUserToken(ctx context.Context, arg database.DeleteMCPServerUserTokenParams) error {
if err := q.authorizeContext(ctx, policy.ActionUpdate, rbac.ResourceDeploymentConfig); err != nil {
return err
}
return q.db.DeleteMCPServerUserToken(ctx, arg)
}
func (q *querier) DeleteOAuth2ProviderAppByClientID(ctx context.Context, id uuid.UUID) error {
if err := q.authorizeContext(ctx, policy.ActionDelete, rbac.ResourceOauth2App); err != nil {
return err
@@ -2784,13 +2763,6 @@ func (q *querier) GetEnabledChatProviders(ctx context.Context) ([]database.ChatP
return q.db.GetEnabledChatProviders(ctx)
}
func (q *querier) GetEnabledMCPServerConfigs(ctx context.Context) ([]database.MCPServerConfig, error) {
if err := q.authorizeContext(ctx, policy.ActionRead, rbac.ResourceDeploymentConfig); err != nil {
return nil, err
}
return q.db.GetEnabledMCPServerConfigs(ctx)
}
func (q *querier) GetExternalAuthLink(ctx context.Context, arg database.GetExternalAuthLinkParams) (database.ExternalAuthLink, error) {
return fetchWithAction(q.log, q.auth, policy.ActionReadPersonal, q.db.GetExternalAuthLink)(ctx, arg)
}
@@ -2849,13 +2821,6 @@ func (q *querier) GetFilteredInboxNotificationsByUserID(ctx context.Context, arg
return fetchWithPostFilter(q.auth, policy.ActionRead, q.db.GetFilteredInboxNotificationsByUserID)(ctx, arg)
}
func (q *querier) GetForcedMCPServerConfigs(ctx context.Context) ([]database.MCPServerConfig, error) {
if err := q.authorizeContext(ctx, policy.ActionRead, rbac.ResourceDeploymentConfig); err != nil {
return nil, err
}
return q.db.GetForcedMCPServerConfigs(ctx)
}
func (q *querier) GetGitSSHKey(ctx context.Context, userID uuid.UUID) (database.GitSSHKey, error) {
return fetchWithAction(q.log, q.auth, policy.ActionReadPersonal, q.db.GetGitSSHKey)(ctx, userID)
}
@@ -2996,48 +2961,6 @@ func (q *querier) GetLogoURL(ctx context.Context) (string, error) {
return q.db.GetLogoURL(ctx)
}
func (q *querier) GetMCPServerConfigByID(ctx context.Context, id uuid.UUID) (database.MCPServerConfig, error) {
if err := q.authorizeContext(ctx, policy.ActionRead, rbac.ResourceDeploymentConfig); err != nil {
return database.MCPServerConfig{}, err
}
return q.db.GetMCPServerConfigByID(ctx, id)
}
func (q *querier) GetMCPServerConfigBySlug(ctx context.Context, slug string) (database.MCPServerConfig, error) {
if err := q.authorizeContext(ctx, policy.ActionRead, rbac.ResourceDeploymentConfig); err != nil {
return database.MCPServerConfig{}, err
}
return q.db.GetMCPServerConfigBySlug(ctx, slug)
}
func (q *querier) GetMCPServerConfigs(ctx context.Context) ([]database.MCPServerConfig, error) {
if err := q.authorizeContext(ctx, policy.ActionRead, rbac.ResourceDeploymentConfig); err != nil {
return nil, err
}
return q.db.GetMCPServerConfigs(ctx)
}
func (q *querier) GetMCPServerConfigsByIDs(ctx context.Context, ids []uuid.UUID) ([]database.MCPServerConfig, error) {
if err := q.authorizeContext(ctx, policy.ActionRead, rbac.ResourceDeploymentConfig); err != nil {
return nil, err
}
return q.db.GetMCPServerConfigsByIDs(ctx, ids)
}
func (q *querier) GetMCPServerUserToken(ctx context.Context, arg database.GetMCPServerUserTokenParams) (database.MCPServerUserToken, error) {
if err := q.authorizeContext(ctx, policy.ActionRead, rbac.ResourceDeploymentConfig); err != nil {
return database.MCPServerUserToken{}, err
}
return q.db.GetMCPServerUserToken(ctx, arg)
}
func (q *querier) GetMCPServerUserTokensByUserID(ctx context.Context, userID uuid.UUID) ([]database.MCPServerUserToken, error) {
if err := q.authorizeContext(ctx, policy.ActionRead, rbac.ResourceDeploymentConfig); err != nil {
return nil, err
}
return q.db.GetMCPServerUserTokensByUserID(ctx, userID)
}
func (q *querier) GetNotificationMessagesByStatus(ctx context.Context, arg database.GetNotificationMessagesByStatusParams) ([]database.NotificationMessage, error) {
if err := q.authorizeContext(ctx, policy.ActionRead, rbac.ResourceNotificationMessage); err != nil {
return nil, err
@@ -4804,13 +4727,6 @@ func (q *querier) InsertLicense(ctx context.Context, arg database.InsertLicenseP
return q.db.InsertLicense(ctx, arg)
}
func (q *querier) InsertMCPServerConfig(ctx context.Context, arg database.InsertMCPServerConfigParams) (database.MCPServerConfig, error) {
if err := q.authorizeContext(ctx, policy.ActionUpdate, rbac.ResourceDeploymentConfig); err != nil {
return database.MCPServerConfig{}, err
}
return q.db.InsertMCPServerConfig(ctx, arg)
}
func (q *querier) InsertMemoryResourceMonitor(ctx context.Context, arg database.InsertMemoryResourceMonitorParams) (database.WorkspaceAgentMemoryResourceMonitor, error) {
if err := q.authorizeContext(ctx, policy.ActionCreate, rbac.ResourceWorkspaceAgentResourceMonitor); err != nil {
return database.WorkspaceAgentMemoryResourceMonitor{}, err
@@ -5570,17 +5486,6 @@ func (q *querier) UpdateChatHeartbeat(ctx context.Context, arg database.UpdateCh
return q.db.UpdateChatHeartbeat(ctx, arg)
}
func (q *querier) UpdateChatMCPServerIDs(ctx context.Context, arg database.UpdateChatMCPServerIDsParams) (database.Chat, error) {
chat, err := q.db.GetChatByID(ctx, arg.ID)
if err != nil {
return database.Chat{}, err
}
if err := q.authorizeContext(ctx, policy.ActionUpdate, chat); err != nil {
return database.Chat{}, err
}
return q.db.UpdateChatMCPServerIDs(ctx, arg)
}
func (q *querier) UpdateChatMessageByID(ctx context.Context, arg database.UpdateChatMessageByIDParams) (database.ChatMessage, error) {
// Authorize update on the parent chat of the edited message.
msg, err := q.db.GetChatMessageByID(ctx, arg.ID)
@@ -5745,13 +5650,6 @@ func (q *querier) UpdateInboxNotificationReadStatus(ctx context.Context, args da
return update(q.log, q.auth, fetchFunc, q.db.UpdateInboxNotificationReadStatus)(ctx, args)
}
func (q *querier) UpdateMCPServerConfig(ctx context.Context, arg database.UpdateMCPServerConfigParams) (database.MCPServerConfig, error) {
if err := q.authorizeContext(ctx, policy.ActionUpdate, rbac.ResourceDeploymentConfig); err != nil {
return database.MCPServerConfig{}, err
}
return q.db.UpdateMCPServerConfig(ctx, arg)
}
func (q *querier) UpdateMemberRoles(ctx context.Context, arg database.UpdateMemberRolesParams) (database.OrganizationMember, error) {
// Authorized fetch will check that the actor has read access to the org member since the org member is returned.
member, err := database.ExpectOne(q.OrganizationMembers(ctx, database.OrganizationMembersParams{
@@ -6797,13 +6695,6 @@ func (q *querier) UpsertLogoURL(ctx context.Context, value string) error {
return q.db.UpsertLogoURL(ctx, value)
}
func (q *querier) UpsertMCPServerUserToken(ctx context.Context, arg database.UpsertMCPServerUserTokenParams) (database.MCPServerUserToken, error) {
if err := q.authorizeContext(ctx, policy.ActionUpdate, rbac.ResourceDeploymentConfig); err != nil {
return database.MCPServerUserToken{}, err
}
return q.db.UpsertMCPServerUserToken(ctx, arg)
}
func (q *querier) UpsertNotificationReportGeneratorLog(ctx context.Context, arg database.UpsertNotificationReportGeneratorLogParams) error {
if err := q.authorizeContext(ctx, policy.ActionCreate, rbac.ResourceSystem); err != nil {
return err
+2 -110
View File
@@ -1005,114 +1005,6 @@ func (s *MethodTestSuite) TestChats() {
dbm.EXPECT().DeleteChatUsageLimitUserOverride(gomock.Any(), userID).Return(nil).AnyTimes()
check.Args(userID).Asserts(rbac.ResourceDeploymentConfig, policy.ActionUpdate)
}))
s.Run("CleanupDeletedMCPServerIDsFromChats", s.Mocked(func(dbm *dbmock.MockStore, _ *gofakeit.Faker, check *expects) {
dbm.EXPECT().CleanupDeletedMCPServerIDsFromChats(gomock.Any()).Return(nil).AnyTimes()
check.Args().Asserts(rbac.ResourceChat, policy.ActionUpdate)
}))
s.Run("DeleteMCPServerConfigByID", s.Mocked(func(dbm *dbmock.MockStore, _ *gofakeit.Faker, check *expects) {
id := uuid.New()
dbm.EXPECT().DeleteMCPServerConfigByID(gomock.Any(), id).Return(nil).AnyTimes()
check.Args(id).Asserts(rbac.ResourceDeploymentConfig, policy.ActionUpdate)
}))
s.Run("DeleteMCPServerUserToken", s.Mocked(func(dbm *dbmock.MockStore, _ *gofakeit.Faker, check *expects) {
arg := database.DeleteMCPServerUserTokenParams{
MCPServerConfigID: uuid.New(),
UserID: uuid.New(),
}
dbm.EXPECT().DeleteMCPServerUserToken(gomock.Any(), arg).Return(nil).AnyTimes()
check.Args(arg).Asserts(rbac.ResourceDeploymentConfig, policy.ActionUpdate)
}))
s.Run("GetEnabledMCPServerConfigs", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) {
configA := testutil.Fake(s.T(), faker, database.MCPServerConfig{})
configB := testutil.Fake(s.T(), faker, database.MCPServerConfig{})
dbm.EXPECT().GetEnabledMCPServerConfigs(gomock.Any()).Return([]database.MCPServerConfig{configA, configB}, nil).AnyTimes()
check.Args().Asserts(rbac.ResourceDeploymentConfig, policy.ActionRead).Returns([]database.MCPServerConfig{configA, configB})
}))
s.Run("GetForcedMCPServerConfigs", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) {
configA := testutil.Fake(s.T(), faker, database.MCPServerConfig{})
configB := testutil.Fake(s.T(), faker, database.MCPServerConfig{})
dbm.EXPECT().GetForcedMCPServerConfigs(gomock.Any()).Return([]database.MCPServerConfig{configA, configB}, nil).AnyTimes()
check.Args().Asserts(rbac.ResourceDeploymentConfig, policy.ActionRead).Returns([]database.MCPServerConfig{configA, configB})
}))
s.Run("GetMCPServerConfigByID", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) {
config := testutil.Fake(s.T(), faker, database.MCPServerConfig{})
dbm.EXPECT().GetMCPServerConfigByID(gomock.Any(), config.ID).Return(config, nil).AnyTimes()
check.Args(config.ID).Asserts(rbac.ResourceDeploymentConfig, policy.ActionRead).Returns(config)
}))
s.Run("GetMCPServerConfigBySlug", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) {
slug := "test-mcp-server"
config := testutil.Fake(s.T(), faker, database.MCPServerConfig{Slug: slug})
dbm.EXPECT().GetMCPServerConfigBySlug(gomock.Any(), slug).Return(config, nil).AnyTimes()
check.Args(slug).Asserts(rbac.ResourceDeploymentConfig, policy.ActionRead).Returns(config)
}))
s.Run("GetMCPServerConfigs", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) {
configA := testutil.Fake(s.T(), faker, database.MCPServerConfig{})
configB := testutil.Fake(s.T(), faker, database.MCPServerConfig{})
dbm.EXPECT().GetMCPServerConfigs(gomock.Any()).Return([]database.MCPServerConfig{configA, configB}, nil).AnyTimes()
check.Args().Asserts(rbac.ResourceDeploymentConfig, policy.ActionRead).Returns([]database.MCPServerConfig{configA, configB})
}))
s.Run("GetMCPServerConfigsByIDs", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) {
configA := testutil.Fake(s.T(), faker, database.MCPServerConfig{})
configB := testutil.Fake(s.T(), faker, database.MCPServerConfig{})
ids := []uuid.UUID{configA.ID, configB.ID}
dbm.EXPECT().GetMCPServerConfigsByIDs(gomock.Any(), ids).Return([]database.MCPServerConfig{configA, configB}, nil).AnyTimes()
check.Args(ids).Asserts(rbac.ResourceDeploymentConfig, policy.ActionRead).Returns([]database.MCPServerConfig{configA, configB})
}))
s.Run("GetMCPServerUserToken", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) {
arg := database.GetMCPServerUserTokenParams{
MCPServerConfigID: uuid.New(),
UserID: uuid.New(),
}
token := testutil.Fake(s.T(), faker, database.MCPServerUserToken{MCPServerConfigID: arg.MCPServerConfigID, UserID: arg.UserID})
dbm.EXPECT().GetMCPServerUserToken(gomock.Any(), arg).Return(token, nil).AnyTimes()
check.Args(arg).Asserts(rbac.ResourceDeploymentConfig, policy.ActionRead).Returns(token)
}))
s.Run("GetMCPServerUserTokensByUserID", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) {
userID := uuid.New()
tokens := []database.MCPServerUserToken{testutil.Fake(s.T(), faker, database.MCPServerUserToken{UserID: userID})}
dbm.EXPECT().GetMCPServerUserTokensByUserID(gomock.Any(), userID).Return(tokens, nil).AnyTimes()
check.Args(userID).Asserts(rbac.ResourceDeploymentConfig, policy.ActionRead).Returns(tokens)
}))
s.Run("InsertMCPServerConfig", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) {
arg := database.InsertMCPServerConfigParams{
DisplayName: "Test MCP Server",
Slug: "test-mcp-server",
}
config := testutil.Fake(s.T(), faker, database.MCPServerConfig{DisplayName: arg.DisplayName, Slug: arg.Slug})
dbm.EXPECT().InsertMCPServerConfig(gomock.Any(), arg).Return(config, nil).AnyTimes()
check.Args(arg).Asserts(rbac.ResourceDeploymentConfig, policy.ActionUpdate).Returns(config)
}))
s.Run("UpdateChatMCPServerIDs", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) {
chat := testutil.Fake(s.T(), faker, database.Chat{})
arg := database.UpdateChatMCPServerIDsParams{
ID: chat.ID,
MCPServerIDs: []uuid.UUID{uuid.New()},
}
dbm.EXPECT().GetChatByID(gomock.Any(), chat.ID).Return(chat, nil).AnyTimes()
dbm.EXPECT().UpdateChatMCPServerIDs(gomock.Any(), arg).Return(chat, nil).AnyTimes()
check.Args(arg).Asserts(chat, policy.ActionUpdate).Returns(chat)
}))
s.Run("UpdateMCPServerConfig", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) {
config := testutil.Fake(s.T(), faker, database.MCPServerConfig{})
arg := database.UpdateMCPServerConfigParams{
ID: config.ID,
DisplayName: "Updated MCP Server",
Slug: "updated-mcp-server",
}
dbm.EXPECT().UpdateMCPServerConfig(gomock.Any(), arg).Return(config, nil).AnyTimes()
check.Args(arg).Asserts(rbac.ResourceDeploymentConfig, policy.ActionUpdate).Returns(config)
}))
s.Run("UpsertMCPServerUserToken", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) {
arg := database.UpsertMCPServerUserTokenParams{
MCPServerConfigID: uuid.New(),
UserID: uuid.New(),
AccessToken: "test-access-token",
TokenType: "bearer",
}
token := testutil.Fake(s.T(), faker, database.MCPServerUserToken{MCPServerConfigID: arg.MCPServerConfigID, UserID: arg.UserID})
dbm.EXPECT().UpsertMCPServerUserToken(gomock.Any(), arg).Return(token, nil).AnyTimes()
check.Args(arg).Asserts(rbac.ResourceDeploymentConfig, policy.ActionUpdate).Returns(token)
}))
}
func (s *MethodTestSuite) TestFile() {
@@ -1489,8 +1381,8 @@ func (s *MethodTestSuite) TestLicense() {
check.Args().Asserts().Returns("value")
}))
s.Run("GetDefaultProxyConfig", s.Mocked(func(dbm *dbmock.MockStore, _ *gofakeit.Faker, check *expects) {
dbm.EXPECT().GetDefaultProxyConfig(gomock.Any()).Return(database.GetDefaultProxyConfigRow{DisplayName: "Default", IconURL: "/emojis/1f3e1.png"}, nil).AnyTimes()
check.Args().Asserts().Returns(database.GetDefaultProxyConfigRow{DisplayName: "Default", IconURL: "/emojis/1f3e1.png"})
dbm.EXPECT().GetDefaultProxyConfig(gomock.Any()).Return(database.GetDefaultProxyConfigRow{DisplayName: "Default", IconUrl: "/emojis/1f3e1.png"}, nil).AnyTimes()
check.Args().Asserts().Returns(database.GetDefaultProxyConfigRow{DisplayName: "Default", IconUrl: "/emojis/1f3e1.png"})
}))
s.Run("GetLogoURL", s.Mocked(func(dbm *dbmock.MockStore, _ *gofakeit.Faker, check *expects) {
dbm.EXPECT().GetLogoURL(gomock.Any()).Return("value", nil).AnyTimes()
-120
View File
@@ -264,14 +264,6 @@ func (m queryMetricsStore) CleanTailnetTunnels(ctx context.Context) error {
return r0
}
func (m queryMetricsStore) CleanupDeletedMCPServerIDsFromChats(ctx context.Context) error {
start := time.Now()
r0 := m.s.CleanupDeletedMCPServerIDsFromChats(ctx)
m.queryLatencies.WithLabelValues("CleanupDeletedMCPServerIDsFromChats").Observe(time.Since(start).Seconds())
m.queryCounts.WithLabelValues(httpmw.ExtractHTTPRoute(ctx), httpmw.ExtractHTTPMethod(ctx), "CleanupDeletedMCPServerIDsFromChats").Inc()
return r0
}
func (m queryMetricsStore) CountAIBridgeInterceptions(ctx context.Context, arg database.CountAIBridgeInterceptionsParams) (int64, error) {
start := time.Now()
r0, r1 := m.s.CountAIBridgeInterceptions(ctx, arg)
@@ -488,22 +480,6 @@ func (m queryMetricsStore) DeleteLicense(ctx context.Context, id int32) (int32,
return r0, r1
}
func (m queryMetricsStore) DeleteMCPServerConfigByID(ctx context.Context, id uuid.UUID) error {
start := time.Now()
r0 := m.s.DeleteMCPServerConfigByID(ctx, id)
m.queryLatencies.WithLabelValues("DeleteMCPServerConfigByID").Observe(time.Since(start).Seconds())
m.queryCounts.WithLabelValues(httpmw.ExtractHTTPRoute(ctx), httpmw.ExtractHTTPMethod(ctx), "DeleteMCPServerConfigByID").Inc()
return r0
}
func (m queryMetricsStore) DeleteMCPServerUserToken(ctx context.Context, arg database.DeleteMCPServerUserTokenParams) error {
start := time.Now()
r0 := m.s.DeleteMCPServerUserToken(ctx, arg)
m.queryLatencies.WithLabelValues("DeleteMCPServerUserToken").Observe(time.Since(start).Seconds())
m.queryCounts.WithLabelValues(httpmw.ExtractHTTPRoute(ctx), httpmw.ExtractHTTPMethod(ctx), "DeleteMCPServerUserToken").Inc()
return r0
}
func (m queryMetricsStore) DeleteOAuth2ProviderAppByClientID(ctx context.Context, id uuid.UUID) error {
start := time.Now()
r0 := m.s.DeleteOAuth2ProviderAppByClientID(ctx, id)
@@ -1352,14 +1328,6 @@ func (m queryMetricsStore) GetEnabledChatProviders(ctx context.Context) ([]datab
return r0, r1
}
func (m queryMetricsStore) GetEnabledMCPServerConfigs(ctx context.Context) ([]database.MCPServerConfig, error) {
start := time.Now()
r0, r1 := m.s.GetEnabledMCPServerConfigs(ctx)
m.queryLatencies.WithLabelValues("GetEnabledMCPServerConfigs").Observe(time.Since(start).Seconds())
m.queryCounts.WithLabelValues(httpmw.ExtractHTTPRoute(ctx), httpmw.ExtractHTTPMethod(ctx), "GetEnabledMCPServerConfigs").Inc()
return r0, r1
}
func (m queryMetricsStore) GetExternalAuthLink(ctx context.Context, arg database.GetExternalAuthLinkParams) (database.ExternalAuthLink, error) {
start := time.Now()
r0, r1 := m.s.GetExternalAuthLink(ctx, arg)
@@ -1416,14 +1384,6 @@ func (m queryMetricsStore) GetFilteredInboxNotificationsByUserID(ctx context.Con
return r0, r1
}
func (m queryMetricsStore) GetForcedMCPServerConfigs(ctx context.Context) ([]database.MCPServerConfig, error) {
start := time.Now()
r0, r1 := m.s.GetForcedMCPServerConfigs(ctx)
m.queryLatencies.WithLabelValues("GetForcedMCPServerConfigs").Observe(time.Since(start).Seconds())
m.queryCounts.WithLabelValues(httpmw.ExtractHTTPRoute(ctx), httpmw.ExtractHTTPMethod(ctx), "GetForcedMCPServerConfigs").Inc()
return r0, r1
}
func (m queryMetricsStore) GetGitSSHKey(ctx context.Context, userID uuid.UUID) (database.GitSSHKey, error) {
start := time.Now()
r0, r1 := m.s.GetGitSSHKey(ctx, userID)
@@ -1584,54 +1544,6 @@ func (m queryMetricsStore) GetLogoURL(ctx context.Context) (string, error) {
return r0, r1
}
func (m queryMetricsStore) GetMCPServerConfigByID(ctx context.Context, id uuid.UUID) (database.MCPServerConfig, error) {
start := time.Now()
r0, r1 := m.s.GetMCPServerConfigByID(ctx, id)
m.queryLatencies.WithLabelValues("GetMCPServerConfigByID").Observe(time.Since(start).Seconds())
m.queryCounts.WithLabelValues(httpmw.ExtractHTTPRoute(ctx), httpmw.ExtractHTTPMethod(ctx), "GetMCPServerConfigByID").Inc()
return r0, r1
}
func (m queryMetricsStore) GetMCPServerConfigBySlug(ctx context.Context, slug string) (database.MCPServerConfig, error) {
start := time.Now()
r0, r1 := m.s.GetMCPServerConfigBySlug(ctx, slug)
m.queryLatencies.WithLabelValues("GetMCPServerConfigBySlug").Observe(time.Since(start).Seconds())
m.queryCounts.WithLabelValues(httpmw.ExtractHTTPRoute(ctx), httpmw.ExtractHTTPMethod(ctx), "GetMCPServerConfigBySlug").Inc()
return r0, r1
}
func (m queryMetricsStore) GetMCPServerConfigs(ctx context.Context) ([]database.MCPServerConfig, error) {
start := time.Now()
r0, r1 := m.s.GetMCPServerConfigs(ctx)
m.queryLatencies.WithLabelValues("GetMCPServerConfigs").Observe(time.Since(start).Seconds())
m.queryCounts.WithLabelValues(httpmw.ExtractHTTPRoute(ctx), httpmw.ExtractHTTPMethod(ctx), "GetMCPServerConfigs").Inc()
return r0, r1
}
func (m queryMetricsStore) GetMCPServerConfigsByIDs(ctx context.Context, ids []uuid.UUID) ([]database.MCPServerConfig, error) {
start := time.Now()
r0, r1 := m.s.GetMCPServerConfigsByIDs(ctx, ids)
m.queryLatencies.WithLabelValues("GetMCPServerConfigsByIDs").Observe(time.Since(start).Seconds())
m.queryCounts.WithLabelValues(httpmw.ExtractHTTPRoute(ctx), httpmw.ExtractHTTPMethod(ctx), "GetMCPServerConfigsByIDs").Inc()
return r0, r1
}
func (m queryMetricsStore) GetMCPServerUserToken(ctx context.Context, arg database.GetMCPServerUserTokenParams) (database.MCPServerUserToken, error) {
start := time.Now()
r0, r1 := m.s.GetMCPServerUserToken(ctx, arg)
m.queryLatencies.WithLabelValues("GetMCPServerUserToken").Observe(time.Since(start).Seconds())
m.queryCounts.WithLabelValues(httpmw.ExtractHTTPRoute(ctx), httpmw.ExtractHTTPMethod(ctx), "GetMCPServerUserToken").Inc()
return r0, r1
}
func (m queryMetricsStore) GetMCPServerUserTokensByUserID(ctx context.Context, userID uuid.UUID) ([]database.MCPServerUserToken, error) {
start := time.Now()
r0, r1 := m.s.GetMCPServerUserTokensByUserID(ctx, userID)
m.queryLatencies.WithLabelValues("GetMCPServerUserTokensByUserID").Observe(time.Since(start).Seconds())
m.queryCounts.WithLabelValues(httpmw.ExtractHTTPRoute(ctx), httpmw.ExtractHTTPMethod(ctx), "GetMCPServerUserTokensByUserID").Inc()
return r0, r1
}
func (m queryMetricsStore) GetNotificationMessagesByStatus(ctx context.Context, arg database.GetNotificationMessagesByStatusParams) ([]database.NotificationMessage, error) {
start := time.Now()
r0, r1 := m.s.GetNotificationMessagesByStatus(ctx, arg)
@@ -3272,14 +3184,6 @@ func (m queryMetricsStore) InsertLicense(ctx context.Context, arg database.Inser
return r0, r1
}
func (m queryMetricsStore) InsertMCPServerConfig(ctx context.Context, arg database.InsertMCPServerConfigParams) (database.MCPServerConfig, error) {
start := time.Now()
r0, r1 := m.s.InsertMCPServerConfig(ctx, arg)
m.queryLatencies.WithLabelValues("InsertMCPServerConfig").Observe(time.Since(start).Seconds())
m.queryCounts.WithLabelValues(httpmw.ExtractHTTPRoute(ctx), httpmw.ExtractHTTPMethod(ctx), "InsertMCPServerConfig").Inc()
return r0, r1
}
func (m queryMetricsStore) InsertMemoryResourceMonitor(ctx context.Context, arg database.InsertMemoryResourceMonitorParams) (database.WorkspaceAgentMemoryResourceMonitor, error) {
start := time.Now()
r0, r1 := m.s.InsertMemoryResourceMonitor(ctx, arg)
@@ -3952,14 +3856,6 @@ func (m queryMetricsStore) UpdateChatHeartbeat(ctx context.Context, arg database
return r0, r1
}
func (m queryMetricsStore) UpdateChatMCPServerIDs(ctx context.Context, arg database.UpdateChatMCPServerIDsParams) (database.Chat, error) {
start := time.Now()
r0, r1 := m.s.UpdateChatMCPServerIDs(ctx, arg)
m.queryLatencies.WithLabelValues("UpdateChatMCPServerIDs").Observe(time.Since(start).Seconds())
m.queryCounts.WithLabelValues(httpmw.ExtractHTTPRoute(ctx), httpmw.ExtractHTTPMethod(ctx), "UpdateChatMCPServerIDs").Inc()
return r0, r1
}
func (m queryMetricsStore) UpdateChatMessageByID(ctx context.Context, arg database.UpdateChatMessageByIDParams) (database.ChatMessage, error) {
start := time.Now()
r0, r1 := m.s.UpdateChatMessageByID(ctx, arg)
@@ -4064,14 +3960,6 @@ func (m queryMetricsStore) UpdateInboxNotificationReadStatus(ctx context.Context
return r0
}
func (m queryMetricsStore) UpdateMCPServerConfig(ctx context.Context, arg database.UpdateMCPServerConfigParams) (database.MCPServerConfig, error) {
start := time.Now()
r0, r1 := m.s.UpdateMCPServerConfig(ctx, arg)
m.queryLatencies.WithLabelValues("UpdateMCPServerConfig").Observe(time.Since(start).Seconds())
m.queryCounts.WithLabelValues(httpmw.ExtractHTTPRoute(ctx), httpmw.ExtractHTTPMethod(ctx), "UpdateMCPServerConfig").Inc()
return r0, r1
}
func (m queryMetricsStore) UpdateMemberRoles(ctx context.Context, arg database.UpdateMemberRolesParams) (database.OrganizationMember, error) {
start := time.Now()
r0, r1 := m.s.UpdateMemberRoles(ctx, arg)
@@ -4808,14 +4696,6 @@ func (m queryMetricsStore) UpsertLogoURL(ctx context.Context, value string) erro
return r0
}
func (m queryMetricsStore) UpsertMCPServerUserToken(ctx context.Context, arg database.UpsertMCPServerUserTokenParams) (database.MCPServerUserToken, error) {
start := time.Now()
r0, r1 := m.s.UpsertMCPServerUserToken(ctx, arg)
m.queryLatencies.WithLabelValues("UpsertMCPServerUserToken").Observe(time.Since(start).Seconds())
m.queryCounts.WithLabelValues(httpmw.ExtractHTTPRoute(ctx), httpmw.ExtractHTTPMethod(ctx), "UpsertMCPServerUserToken").Inc()
return r0, r1
}
func (m queryMetricsStore) UpsertNotificationReportGeneratorLog(ctx context.Context, arg database.UpsertNotificationReportGeneratorLogParams) error {
start := time.Now()
r0 := m.s.UpsertNotificationReportGeneratorLog(ctx, arg)
-222
View File
@@ -334,20 +334,6 @@ func (mr *MockStoreMockRecorder) CleanTailnetTunnels(ctx any) *gomock.Call {
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "CleanTailnetTunnels", reflect.TypeOf((*MockStore)(nil).CleanTailnetTunnels), ctx)
}
// CleanupDeletedMCPServerIDsFromChats mocks base method.
func (m *MockStore) CleanupDeletedMCPServerIDsFromChats(ctx context.Context) error {
m.ctrl.T.Helper()
ret := m.ctrl.Call(m, "CleanupDeletedMCPServerIDsFromChats", ctx)
ret0, _ := ret[0].(error)
return ret0
}
// CleanupDeletedMCPServerIDsFromChats indicates an expected call of CleanupDeletedMCPServerIDsFromChats.
func (mr *MockStoreMockRecorder) CleanupDeletedMCPServerIDsFromChats(ctx any) *gomock.Call {
mr.mock.ctrl.T.Helper()
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "CleanupDeletedMCPServerIDsFromChats", reflect.TypeOf((*MockStore)(nil).CleanupDeletedMCPServerIDsFromChats), ctx)
}
// CountAIBridgeInterceptions mocks base method.
func (m *MockStore) CountAIBridgeInterceptions(ctx context.Context, arg database.CountAIBridgeInterceptionsParams) (int64, error) {
m.ctrl.T.Helper()
@@ -783,34 +769,6 @@ func (mr *MockStoreMockRecorder) DeleteLicense(ctx, id any) *gomock.Call {
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "DeleteLicense", reflect.TypeOf((*MockStore)(nil).DeleteLicense), ctx, id)
}
// DeleteMCPServerConfigByID mocks base method.
func (m *MockStore) DeleteMCPServerConfigByID(ctx context.Context, id uuid.UUID) error {
m.ctrl.T.Helper()
ret := m.ctrl.Call(m, "DeleteMCPServerConfigByID", ctx, id)
ret0, _ := ret[0].(error)
return ret0
}
// DeleteMCPServerConfigByID indicates an expected call of DeleteMCPServerConfigByID.
func (mr *MockStoreMockRecorder) DeleteMCPServerConfigByID(ctx, id any) *gomock.Call {
mr.mock.ctrl.T.Helper()
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "DeleteMCPServerConfigByID", reflect.TypeOf((*MockStore)(nil).DeleteMCPServerConfigByID), ctx, id)
}
// DeleteMCPServerUserToken mocks base method.
func (m *MockStore) DeleteMCPServerUserToken(ctx context.Context, arg database.DeleteMCPServerUserTokenParams) error {
m.ctrl.T.Helper()
ret := m.ctrl.Call(m, "DeleteMCPServerUserToken", ctx, arg)
ret0, _ := ret[0].(error)
return ret0
}
// DeleteMCPServerUserToken indicates an expected call of DeleteMCPServerUserToken.
func (mr *MockStoreMockRecorder) DeleteMCPServerUserToken(ctx, arg any) *gomock.Call {
mr.mock.ctrl.T.Helper()
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "DeleteMCPServerUserToken", reflect.TypeOf((*MockStore)(nil).DeleteMCPServerUserToken), ctx, arg)
}
// DeleteOAuth2ProviderAppByClientID mocks base method.
func (m *MockStore) DeleteOAuth2ProviderAppByClientID(ctx context.Context, id uuid.UUID) error {
m.ctrl.T.Helper()
@@ -2479,21 +2437,6 @@ func (mr *MockStoreMockRecorder) GetEnabledChatProviders(ctx any) *gomock.Call {
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetEnabledChatProviders", reflect.TypeOf((*MockStore)(nil).GetEnabledChatProviders), ctx)
}
// GetEnabledMCPServerConfigs mocks base method.
func (m *MockStore) GetEnabledMCPServerConfigs(ctx context.Context) ([]database.MCPServerConfig, error) {
m.ctrl.T.Helper()
ret := m.ctrl.Call(m, "GetEnabledMCPServerConfigs", ctx)
ret0, _ := ret[0].([]database.MCPServerConfig)
ret1, _ := ret[1].(error)
return ret0, ret1
}
// GetEnabledMCPServerConfigs indicates an expected call of GetEnabledMCPServerConfigs.
func (mr *MockStoreMockRecorder) GetEnabledMCPServerConfigs(ctx any) *gomock.Call {
mr.mock.ctrl.T.Helper()
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetEnabledMCPServerConfigs", reflect.TypeOf((*MockStore)(nil).GetEnabledMCPServerConfigs), ctx)
}
// GetExternalAuthLink mocks base method.
func (m *MockStore) GetExternalAuthLink(ctx context.Context, arg database.GetExternalAuthLinkParams) (database.ExternalAuthLink, error) {
m.ctrl.T.Helper()
@@ -2599,21 +2542,6 @@ func (mr *MockStoreMockRecorder) GetFilteredInboxNotificationsByUserID(ctx, arg
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetFilteredInboxNotificationsByUserID", reflect.TypeOf((*MockStore)(nil).GetFilteredInboxNotificationsByUserID), ctx, arg)
}
// GetForcedMCPServerConfigs mocks base method.
func (m *MockStore) GetForcedMCPServerConfigs(ctx context.Context) ([]database.MCPServerConfig, error) {
m.ctrl.T.Helper()
ret := m.ctrl.Call(m, "GetForcedMCPServerConfigs", ctx)
ret0, _ := ret[0].([]database.MCPServerConfig)
ret1, _ := ret[1].(error)
return ret0, ret1
}
// GetForcedMCPServerConfigs indicates an expected call of GetForcedMCPServerConfigs.
func (mr *MockStoreMockRecorder) GetForcedMCPServerConfigs(ctx any) *gomock.Call {
mr.mock.ctrl.T.Helper()
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetForcedMCPServerConfigs", reflect.TypeOf((*MockStore)(nil).GetForcedMCPServerConfigs), ctx)
}
// GetGitSSHKey mocks base method.
func (m *MockStore) GetGitSSHKey(ctx context.Context, userID uuid.UUID) (database.GitSSHKey, error) {
m.ctrl.T.Helper()
@@ -2914,96 +2842,6 @@ func (mr *MockStoreMockRecorder) GetLogoURL(ctx any) *gomock.Call {
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetLogoURL", reflect.TypeOf((*MockStore)(nil).GetLogoURL), ctx)
}
// GetMCPServerConfigByID mocks base method.
func (m *MockStore) GetMCPServerConfigByID(ctx context.Context, id uuid.UUID) (database.MCPServerConfig, error) {
m.ctrl.T.Helper()
ret := m.ctrl.Call(m, "GetMCPServerConfigByID", ctx, id)
ret0, _ := ret[0].(database.MCPServerConfig)
ret1, _ := ret[1].(error)
return ret0, ret1
}
// GetMCPServerConfigByID indicates an expected call of GetMCPServerConfigByID.
func (mr *MockStoreMockRecorder) GetMCPServerConfigByID(ctx, id any) *gomock.Call {
mr.mock.ctrl.T.Helper()
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetMCPServerConfigByID", reflect.TypeOf((*MockStore)(nil).GetMCPServerConfigByID), ctx, id)
}
// GetMCPServerConfigBySlug mocks base method.
func (m *MockStore) GetMCPServerConfigBySlug(ctx context.Context, slug string) (database.MCPServerConfig, error) {
m.ctrl.T.Helper()
ret := m.ctrl.Call(m, "GetMCPServerConfigBySlug", ctx, slug)
ret0, _ := ret[0].(database.MCPServerConfig)
ret1, _ := ret[1].(error)
return ret0, ret1
}
// GetMCPServerConfigBySlug indicates an expected call of GetMCPServerConfigBySlug.
func (mr *MockStoreMockRecorder) GetMCPServerConfigBySlug(ctx, slug any) *gomock.Call {
mr.mock.ctrl.T.Helper()
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetMCPServerConfigBySlug", reflect.TypeOf((*MockStore)(nil).GetMCPServerConfigBySlug), ctx, slug)
}
// GetMCPServerConfigs mocks base method.
func (m *MockStore) GetMCPServerConfigs(ctx context.Context) ([]database.MCPServerConfig, error) {
m.ctrl.T.Helper()
ret := m.ctrl.Call(m, "GetMCPServerConfigs", ctx)
ret0, _ := ret[0].([]database.MCPServerConfig)
ret1, _ := ret[1].(error)
return ret0, ret1
}
// GetMCPServerConfigs indicates an expected call of GetMCPServerConfigs.
func (mr *MockStoreMockRecorder) GetMCPServerConfigs(ctx any) *gomock.Call {
mr.mock.ctrl.T.Helper()
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetMCPServerConfigs", reflect.TypeOf((*MockStore)(nil).GetMCPServerConfigs), ctx)
}
// GetMCPServerConfigsByIDs mocks base method.
func (m *MockStore) GetMCPServerConfigsByIDs(ctx context.Context, ids []uuid.UUID) ([]database.MCPServerConfig, error) {
m.ctrl.T.Helper()
ret := m.ctrl.Call(m, "GetMCPServerConfigsByIDs", ctx, ids)
ret0, _ := ret[0].([]database.MCPServerConfig)
ret1, _ := ret[1].(error)
return ret0, ret1
}
// GetMCPServerConfigsByIDs indicates an expected call of GetMCPServerConfigsByIDs.
func (mr *MockStoreMockRecorder) GetMCPServerConfigsByIDs(ctx, ids any) *gomock.Call {
mr.mock.ctrl.T.Helper()
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetMCPServerConfigsByIDs", reflect.TypeOf((*MockStore)(nil).GetMCPServerConfigsByIDs), ctx, ids)
}
// GetMCPServerUserToken mocks base method.
func (m *MockStore) GetMCPServerUserToken(ctx context.Context, arg database.GetMCPServerUserTokenParams) (database.MCPServerUserToken, error) {
m.ctrl.T.Helper()
ret := m.ctrl.Call(m, "GetMCPServerUserToken", ctx, arg)
ret0, _ := ret[0].(database.MCPServerUserToken)
ret1, _ := ret[1].(error)
return ret0, ret1
}
// GetMCPServerUserToken indicates an expected call of GetMCPServerUserToken.
func (mr *MockStoreMockRecorder) GetMCPServerUserToken(ctx, arg any) *gomock.Call {
mr.mock.ctrl.T.Helper()
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetMCPServerUserToken", reflect.TypeOf((*MockStore)(nil).GetMCPServerUserToken), ctx, arg)
}
// GetMCPServerUserTokensByUserID mocks base method.
func (m *MockStore) GetMCPServerUserTokensByUserID(ctx context.Context, userID uuid.UUID) ([]database.MCPServerUserToken, error) {
m.ctrl.T.Helper()
ret := m.ctrl.Call(m, "GetMCPServerUserTokensByUserID", ctx, userID)
ret0, _ := ret[0].([]database.MCPServerUserToken)
ret1, _ := ret[1].(error)
return ret0, ret1
}
// GetMCPServerUserTokensByUserID indicates an expected call of GetMCPServerUserTokensByUserID.
func (mr *MockStoreMockRecorder) GetMCPServerUserTokensByUserID(ctx, userID any) *gomock.Call {
mr.mock.ctrl.T.Helper()
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetMCPServerUserTokensByUserID", reflect.TypeOf((*MockStore)(nil).GetMCPServerUserTokensByUserID), ctx, userID)
}
// GetNotificationMessagesByStatus mocks base method.
func (m *MockStore) GetNotificationMessagesByStatus(ctx context.Context, arg database.GetNotificationMessagesByStatusParams) ([]database.NotificationMessage, error) {
m.ctrl.T.Helper()
@@ -6119,21 +5957,6 @@ func (mr *MockStoreMockRecorder) InsertLicense(ctx, arg any) *gomock.Call {
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "InsertLicense", reflect.TypeOf((*MockStore)(nil).InsertLicense), ctx, arg)
}
// InsertMCPServerConfig mocks base method.
func (m *MockStore) InsertMCPServerConfig(ctx context.Context, arg database.InsertMCPServerConfigParams) (database.MCPServerConfig, error) {
m.ctrl.T.Helper()
ret := m.ctrl.Call(m, "InsertMCPServerConfig", ctx, arg)
ret0, _ := ret[0].(database.MCPServerConfig)
ret1, _ := ret[1].(error)
return ret0, ret1
}
// InsertMCPServerConfig indicates an expected call of InsertMCPServerConfig.
func (mr *MockStoreMockRecorder) InsertMCPServerConfig(ctx, arg any) *gomock.Call {
mr.mock.ctrl.T.Helper()
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "InsertMCPServerConfig", reflect.TypeOf((*MockStore)(nil).InsertMCPServerConfig), ctx, arg)
}
// InsertMemoryResourceMonitor mocks base method.
func (m *MockStore) InsertMemoryResourceMonitor(ctx context.Context, arg database.InsertMemoryResourceMonitorParams) (database.WorkspaceAgentMemoryResourceMonitor, error) {
m.ctrl.T.Helper()
@@ -7433,21 +7256,6 @@ func (mr *MockStoreMockRecorder) UpdateChatHeartbeat(ctx, arg any) *gomock.Call
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "UpdateChatHeartbeat", reflect.TypeOf((*MockStore)(nil).UpdateChatHeartbeat), ctx, arg)
}
// UpdateChatMCPServerIDs mocks base method.
func (m *MockStore) UpdateChatMCPServerIDs(ctx context.Context, arg database.UpdateChatMCPServerIDsParams) (database.Chat, error) {
m.ctrl.T.Helper()
ret := m.ctrl.Call(m, "UpdateChatMCPServerIDs", ctx, arg)
ret0, _ := ret[0].(database.Chat)
ret1, _ := ret[1].(error)
return ret0, ret1
}
// UpdateChatMCPServerIDs indicates an expected call of UpdateChatMCPServerIDs.
func (mr *MockStoreMockRecorder) UpdateChatMCPServerIDs(ctx, arg any) *gomock.Call {
mr.mock.ctrl.T.Helper()
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "UpdateChatMCPServerIDs", reflect.TypeOf((*MockStore)(nil).UpdateChatMCPServerIDs), ctx, arg)
}
// UpdateChatMessageByID mocks base method.
func (m *MockStore) UpdateChatMessageByID(ctx context.Context, arg database.UpdateChatMessageByIDParams) (database.ChatMessage, error) {
m.ctrl.T.Helper()
@@ -7641,21 +7449,6 @@ func (mr *MockStoreMockRecorder) UpdateInboxNotificationReadStatus(ctx, arg any)
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "UpdateInboxNotificationReadStatus", reflect.TypeOf((*MockStore)(nil).UpdateInboxNotificationReadStatus), ctx, arg)
}
// UpdateMCPServerConfig mocks base method.
func (m *MockStore) UpdateMCPServerConfig(ctx context.Context, arg database.UpdateMCPServerConfigParams) (database.MCPServerConfig, error) {
m.ctrl.T.Helper()
ret := m.ctrl.Call(m, "UpdateMCPServerConfig", ctx, arg)
ret0, _ := ret[0].(database.MCPServerConfig)
ret1, _ := ret[1].(error)
return ret0, ret1
}
// UpdateMCPServerConfig indicates an expected call of UpdateMCPServerConfig.
func (mr *MockStoreMockRecorder) UpdateMCPServerConfig(ctx, arg any) *gomock.Call {
mr.mock.ctrl.T.Helper()
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "UpdateMCPServerConfig", reflect.TypeOf((*MockStore)(nil).UpdateMCPServerConfig), ctx, arg)
}
// UpdateMemberRoles mocks base method.
func (m *MockStore) UpdateMemberRoles(ctx context.Context, arg database.UpdateMemberRolesParams) (database.OrganizationMember, error) {
m.ctrl.T.Helper()
@@ -8980,21 +8773,6 @@ func (mr *MockStoreMockRecorder) UpsertLogoURL(ctx, value any) *gomock.Call {
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "UpsertLogoURL", reflect.TypeOf((*MockStore)(nil).UpsertLogoURL), ctx, value)
}
// UpsertMCPServerUserToken mocks base method.
func (m *MockStore) UpsertMCPServerUserToken(ctx context.Context, arg database.UpsertMCPServerUserTokenParams) (database.MCPServerUserToken, error) {
m.ctrl.T.Helper()
ret := m.ctrl.Call(m, "UpsertMCPServerUserToken", ctx, arg)
ret0, _ := ret[0].(database.MCPServerUserToken)
ret1, _ := ret[1].(error)
return ret0, ret1
}
// UpsertMCPServerUserToken indicates an expected call of UpsertMCPServerUserToken.
func (mr *MockStoreMockRecorder) UpsertMCPServerUserToken(ctx, arg any) *gomock.Call {
mr.mock.ctrl.T.Helper()
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "UpsertMCPServerUserToken", reflect.TypeOf((*MockStore)(nil).UpsertMCPServerUserToken), ctx, arg)
}
// UpsertNotificationReportGeneratorLog mocks base method.
func (m *MockStore) UpsertNotificationReportGeneratorLog(ctx context.Context, arg database.UpsertNotificationReportGeneratorLogParams) error {
m.ctrl.T.Helper()
+1 -94
View File
@@ -1393,8 +1393,7 @@ CREATE TABLE chats (
last_model_config_id uuid NOT NULL,
archived boolean DEFAULT false NOT NULL,
last_error text,
mode chat_mode,
mcp_server_ids uuid[] DEFAULT '{}'::uuid[] NOT NULL
mode chat_mode
);
CREATE TABLE connection_logs (
@@ -1671,53 +1670,6 @@ CREATE SEQUENCE licenses_id_seq
ALTER SEQUENCE licenses_id_seq OWNED BY licenses.id;
CREATE TABLE mcp_server_configs (
id uuid DEFAULT gen_random_uuid() NOT NULL,
display_name text NOT NULL,
slug text NOT NULL,
description text DEFAULT ''::text NOT NULL,
icon_url text DEFAULT ''::text NOT NULL,
transport text DEFAULT 'streamable_http'::text NOT NULL,
url text NOT NULL,
auth_type text DEFAULT 'none'::text NOT NULL,
oauth2_client_id text DEFAULT ''::text NOT NULL,
oauth2_client_secret text DEFAULT ''::text NOT NULL,
oauth2_client_secret_key_id text,
oauth2_auth_url text DEFAULT ''::text NOT NULL,
oauth2_token_url text DEFAULT ''::text NOT NULL,
oauth2_scopes text DEFAULT ''::text NOT NULL,
api_key_header text DEFAULT 'Authorization'::text NOT NULL,
api_key_value text DEFAULT ''::text NOT NULL,
api_key_value_key_id text,
custom_headers text DEFAULT '{}'::text NOT NULL,
custom_headers_key_id text,
tool_allow_list text[] DEFAULT '{}'::text[] NOT NULL,
tool_deny_list text[] DEFAULT '{}'::text[] NOT NULL,
availability text DEFAULT 'default_off'::text NOT NULL,
enabled boolean DEFAULT false NOT NULL,
created_by uuid,
updated_by uuid,
created_at timestamp with time zone DEFAULT now() NOT NULL,
updated_at timestamp with time zone DEFAULT now() NOT NULL,
CONSTRAINT mcp_server_configs_auth_type_check CHECK ((auth_type = ANY (ARRAY['none'::text, 'oauth2'::text, 'api_key'::text, 'custom_headers'::text]))),
CONSTRAINT mcp_server_configs_availability_check CHECK ((availability = ANY (ARRAY['force_on'::text, 'default_on'::text, 'default_off'::text]))),
CONSTRAINT mcp_server_configs_transport_check CHECK ((transport = ANY (ARRAY['streamable_http'::text, 'sse'::text])))
);
CREATE TABLE mcp_server_user_tokens (
id uuid DEFAULT gen_random_uuid() NOT NULL,
mcp_server_config_id uuid NOT NULL,
user_id uuid NOT NULL,
access_token text NOT NULL,
access_token_key_id text,
refresh_token text DEFAULT ''::text NOT NULL,
refresh_token_key_id text,
token_type text DEFAULT 'Bearer'::text NOT NULL,
expiry timestamp with time zone,
created_at timestamp with time zone DEFAULT now() NOT NULL,
updated_at timestamp with time zone DEFAULT now() NOT NULL
);
CREATE TABLE notification_messages (
id uuid NOT NULL,
notification_template_id uuid NOT NULL,
@@ -3391,18 +3343,6 @@ ALTER TABLE ONLY licenses
ALTER TABLE ONLY licenses
ADD CONSTRAINT licenses_pkey PRIMARY KEY (id);
ALTER TABLE ONLY mcp_server_configs
ADD CONSTRAINT mcp_server_configs_pkey PRIMARY KEY (id);
ALTER TABLE ONLY mcp_server_configs
ADD CONSTRAINT mcp_server_configs_slug_key UNIQUE (slug);
ALTER TABLE ONLY mcp_server_user_tokens
ADD CONSTRAINT mcp_server_user_tokens_mcp_server_config_id_user_id_key UNIQUE (mcp_server_config_id, user_id);
ALTER TABLE ONLY mcp_server_user_tokens
ADD CONSTRAINT mcp_server_user_tokens_pkey PRIMARY KEY (id);
ALTER TABLE ONLY notification_messages
ADD CONSTRAINT notification_messages_pkey PRIMARY KEY (id);
@@ -3751,12 +3691,6 @@ CREATE INDEX idx_inbox_notifications_user_id_read_at ON inbox_notifications USIN
CREATE INDEX idx_inbox_notifications_user_id_template_id_targets ON inbox_notifications USING btree (user_id, template_id, targets);
CREATE INDEX idx_mcp_server_configs_enabled ON mcp_server_configs USING btree (enabled) WHERE (enabled = true);
CREATE INDEX idx_mcp_server_configs_forced ON mcp_server_configs USING btree (enabled, availability) WHERE ((enabled = true) AND (availability = 'force_on'::text));
CREATE INDEX idx_mcp_server_user_tokens_user_id ON mcp_server_user_tokens USING btree (user_id);
CREATE INDEX idx_notification_messages_status ON notification_messages USING btree (status);
CREATE INDEX idx_organization_member_organization_id_uuid ON organization_members USING btree (organization_id);
@@ -4081,33 +4015,6 @@ ALTER TABLE ONLY jfrog_xray_scans
ALTER TABLE ONLY jfrog_xray_scans
ADD CONSTRAINT jfrog_xray_scans_workspace_id_fkey FOREIGN KEY (workspace_id) REFERENCES workspaces(id) ON DELETE CASCADE;
ALTER TABLE ONLY mcp_server_configs
ADD CONSTRAINT mcp_server_configs_api_key_value_key_id_fkey FOREIGN KEY (api_key_value_key_id) REFERENCES dbcrypt_keys(active_key_digest);
ALTER TABLE ONLY mcp_server_configs
ADD CONSTRAINT mcp_server_configs_created_by_fkey FOREIGN KEY (created_by) REFERENCES users(id) ON DELETE SET NULL;
ALTER TABLE ONLY mcp_server_configs
ADD CONSTRAINT mcp_server_configs_custom_headers_key_id_fkey FOREIGN KEY (custom_headers_key_id) REFERENCES dbcrypt_keys(active_key_digest);
ALTER TABLE ONLY mcp_server_configs
ADD CONSTRAINT mcp_server_configs_oauth2_client_secret_key_id_fkey FOREIGN KEY (oauth2_client_secret_key_id) REFERENCES dbcrypt_keys(active_key_digest);
ALTER TABLE ONLY mcp_server_configs
ADD CONSTRAINT mcp_server_configs_updated_by_fkey FOREIGN KEY (updated_by) REFERENCES users(id) ON DELETE SET NULL;
ALTER TABLE ONLY mcp_server_user_tokens
ADD CONSTRAINT mcp_server_user_tokens_access_token_key_id_fkey FOREIGN KEY (access_token_key_id) REFERENCES dbcrypt_keys(active_key_digest);
ALTER TABLE ONLY mcp_server_user_tokens
ADD CONSTRAINT mcp_server_user_tokens_mcp_server_config_id_fkey FOREIGN KEY (mcp_server_config_id) REFERENCES mcp_server_configs(id) ON DELETE CASCADE;
ALTER TABLE ONLY mcp_server_user_tokens
ADD CONSTRAINT mcp_server_user_tokens_refresh_token_key_id_fkey FOREIGN KEY (refresh_token_key_id) REFERENCES dbcrypt_keys(active_key_digest);
ALTER TABLE ONLY mcp_server_user_tokens
ADD CONSTRAINT mcp_server_user_tokens_user_id_fkey FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE CASCADE;
ALTER TABLE ONLY notification_messages
ADD CONSTRAINT notification_messages_notification_template_id_fkey FOREIGN KEY (notification_template_id) REFERENCES notification_templates(id) ON DELETE CASCADE;
@@ -40,15 +40,6 @@ const (
ForeignKeyInboxNotificationsUserID ForeignKeyConstraint = "inbox_notifications_user_id_fkey" // ALTER TABLE ONLY inbox_notifications ADD CONSTRAINT inbox_notifications_user_id_fkey FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE CASCADE;
ForeignKeyJfrogXrayScansAgentID ForeignKeyConstraint = "jfrog_xray_scans_agent_id_fkey" // ALTER TABLE ONLY jfrog_xray_scans ADD CONSTRAINT jfrog_xray_scans_agent_id_fkey FOREIGN KEY (agent_id) REFERENCES workspace_agents(id) ON DELETE CASCADE;
ForeignKeyJfrogXrayScansWorkspaceID ForeignKeyConstraint = "jfrog_xray_scans_workspace_id_fkey" // ALTER TABLE ONLY jfrog_xray_scans ADD CONSTRAINT jfrog_xray_scans_workspace_id_fkey FOREIGN KEY (workspace_id) REFERENCES workspaces(id) ON DELETE CASCADE;
ForeignKeyMcpServerConfigsAPIKeyValueKeyID ForeignKeyConstraint = "mcp_server_configs_api_key_value_key_id_fkey" // ALTER TABLE ONLY mcp_server_configs ADD CONSTRAINT mcp_server_configs_api_key_value_key_id_fkey FOREIGN KEY (api_key_value_key_id) REFERENCES dbcrypt_keys(active_key_digest);
ForeignKeyMcpServerConfigsCreatedBy ForeignKeyConstraint = "mcp_server_configs_created_by_fkey" // ALTER TABLE ONLY mcp_server_configs ADD CONSTRAINT mcp_server_configs_created_by_fkey FOREIGN KEY (created_by) REFERENCES users(id) ON DELETE SET NULL;
ForeignKeyMcpServerConfigsCustomHeadersKeyID ForeignKeyConstraint = "mcp_server_configs_custom_headers_key_id_fkey" // ALTER TABLE ONLY mcp_server_configs ADD CONSTRAINT mcp_server_configs_custom_headers_key_id_fkey FOREIGN KEY (custom_headers_key_id) REFERENCES dbcrypt_keys(active_key_digest);
ForeignKeyMcpServerConfigsOauth2ClientSecretKeyID ForeignKeyConstraint = "mcp_server_configs_oauth2_client_secret_key_id_fkey" // ALTER TABLE ONLY mcp_server_configs ADD CONSTRAINT mcp_server_configs_oauth2_client_secret_key_id_fkey FOREIGN KEY (oauth2_client_secret_key_id) REFERENCES dbcrypt_keys(active_key_digest);
ForeignKeyMcpServerConfigsUpdatedBy ForeignKeyConstraint = "mcp_server_configs_updated_by_fkey" // ALTER TABLE ONLY mcp_server_configs ADD CONSTRAINT mcp_server_configs_updated_by_fkey FOREIGN KEY (updated_by) REFERENCES users(id) ON DELETE SET NULL;
ForeignKeyMcpServerUserTokensAccessTokenKeyID ForeignKeyConstraint = "mcp_server_user_tokens_access_token_key_id_fkey" // ALTER TABLE ONLY mcp_server_user_tokens ADD CONSTRAINT mcp_server_user_tokens_access_token_key_id_fkey FOREIGN KEY (access_token_key_id) REFERENCES dbcrypt_keys(active_key_digest);
ForeignKeyMcpServerUserTokensMcpServerConfigID ForeignKeyConstraint = "mcp_server_user_tokens_mcp_server_config_id_fkey" // ALTER TABLE ONLY mcp_server_user_tokens ADD CONSTRAINT mcp_server_user_tokens_mcp_server_config_id_fkey FOREIGN KEY (mcp_server_config_id) REFERENCES mcp_server_configs(id) ON DELETE CASCADE;
ForeignKeyMcpServerUserTokensRefreshTokenKeyID ForeignKeyConstraint = "mcp_server_user_tokens_refresh_token_key_id_fkey" // ALTER TABLE ONLY mcp_server_user_tokens ADD CONSTRAINT mcp_server_user_tokens_refresh_token_key_id_fkey FOREIGN KEY (refresh_token_key_id) REFERENCES dbcrypt_keys(active_key_digest);
ForeignKeyMcpServerUserTokensUserID ForeignKeyConstraint = "mcp_server_user_tokens_user_id_fkey" // ALTER TABLE ONLY mcp_server_user_tokens ADD CONSTRAINT mcp_server_user_tokens_user_id_fkey FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE CASCADE;
ForeignKeyNotificationMessagesNotificationTemplateID ForeignKeyConstraint = "notification_messages_notification_template_id_fkey" // ALTER TABLE ONLY notification_messages ADD CONSTRAINT notification_messages_notification_template_id_fkey FOREIGN KEY (notification_template_id) REFERENCES notification_templates(id) ON DELETE CASCADE;
ForeignKeyNotificationMessagesUserID ForeignKeyConstraint = "notification_messages_user_id_fkey" // ALTER TABLE ONLY notification_messages ADD CONSTRAINT notification_messages_user_id_fkey FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE CASCADE;
ForeignKeyNotificationPreferencesNotificationTemplateID ForeignKeyConstraint = "notification_preferences_notification_template_id_fkey" // ALTER TABLE ONLY notification_preferences ADD CONSTRAINT notification_preferences_notification_template_id_fkey FOREIGN KEY (notification_template_id) REFERENCES notification_templates(id) ON DELETE CASCADE;
@@ -1,6 +0,0 @@
ALTER TABLE chats DROP COLUMN IF EXISTS mcp_server_ids;
DROP INDEX IF EXISTS idx_mcp_server_configs_enabled;
DROP INDEX IF EXISTS idx_mcp_server_configs_forced;
DROP INDEX IF EXISTS idx_mcp_server_user_tokens_user_id;
DROP TABLE IF EXISTS mcp_server_user_tokens;
DROP TABLE IF EXISTS mcp_server_configs;
@@ -1,75 +0,0 @@
CREATE TABLE mcp_server_configs (
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
-- Display
display_name TEXT NOT NULL,
slug TEXT NOT NULL UNIQUE,
description TEXT NOT NULL DEFAULT '',
icon_url TEXT NOT NULL DEFAULT '',
-- Connection
transport TEXT NOT NULL DEFAULT 'streamable_http'
CHECK (transport IN ('streamable_http', 'sse')),
url TEXT NOT NULL,
-- Authentication
auth_type TEXT NOT NULL DEFAULT 'none'
CHECK (auth_type IN ('none', 'oauth2', 'api_key', 'custom_headers')),
-- OAuth2 config (when auth_type = 'oauth2')
oauth2_client_id TEXT NOT NULL DEFAULT '',
oauth2_client_secret TEXT NOT NULL DEFAULT '',
oauth2_client_secret_key_id TEXT REFERENCES dbcrypt_keys(active_key_digest),
oauth2_auth_url TEXT NOT NULL DEFAULT '',
oauth2_token_url TEXT NOT NULL DEFAULT '',
oauth2_scopes TEXT NOT NULL DEFAULT '',
-- API key config (when auth_type = 'api_key')
api_key_header TEXT NOT NULL DEFAULT 'Authorization',
api_key_value TEXT NOT NULL DEFAULT '',
api_key_value_key_id TEXT REFERENCES dbcrypt_keys(active_key_digest),
-- Custom headers (when auth_type = 'custom_headers')
custom_headers TEXT NOT NULL DEFAULT '{}',
custom_headers_key_id TEXT REFERENCES dbcrypt_keys(active_key_digest),
-- Tool governance
tool_allow_list TEXT[] NOT NULL DEFAULT '{}',
tool_deny_list TEXT[] NOT NULL DEFAULT '{}',
-- Availability policy
availability TEXT NOT NULL DEFAULT 'default_off'
CHECK (availability IN ('force_on', 'default_on', 'default_off')),
-- Lifecycle
enabled BOOLEAN NOT NULL DEFAULT false,
created_by UUID REFERENCES users(id) ON DELETE SET NULL,
updated_by UUID REFERENCES users(id) ON DELETE SET NULL,
created_at TIMESTAMPTZ NOT NULL DEFAULT now(),
updated_at TIMESTAMPTZ NOT NULL DEFAULT now()
);
CREATE TABLE mcp_server_user_tokens (
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
mcp_server_config_id UUID NOT NULL REFERENCES mcp_server_configs(id) ON DELETE CASCADE,
user_id UUID NOT NULL REFERENCES users(id) ON DELETE CASCADE,
access_token TEXT NOT NULL,
access_token_key_id TEXT REFERENCES dbcrypt_keys(active_key_digest),
refresh_token TEXT NOT NULL DEFAULT '',
refresh_token_key_id TEXT REFERENCES dbcrypt_keys(active_key_digest),
token_type TEXT NOT NULL DEFAULT 'Bearer',
expiry TIMESTAMPTZ,
created_at TIMESTAMPTZ NOT NULL DEFAULT now(),
updated_at TIMESTAMPTZ NOT NULL DEFAULT now(),
UNIQUE (mcp_server_config_id, user_id)
);
-- Add MCP server selection to chats (per-chat, like model_config_id)
ALTER TABLE chats ADD COLUMN mcp_server_ids UUID[] NOT NULL DEFAULT '{}';
CREATE INDEX idx_mcp_server_configs_enabled ON mcp_server_configs(enabled) WHERE enabled = TRUE;
CREATE INDEX idx_mcp_server_configs_forced ON mcp_server_configs(enabled, availability) WHERE enabled = TRUE AND availability = 'force_on';
CREATE INDEX idx_mcp_server_user_tokens_user_id ON mcp_server_user_tokens(user_id);
@@ -1,48 +0,0 @@
INSERT INTO mcp_server_configs (
id,
display_name,
slug,
url,
transport,
auth_type,
availability,
enabled,
created_by,
updated_by,
created_at,
updated_at
) VALUES (
'a1b2c3d4-e5f6-7890-abcd-ef1234567890',
'Fixture MCP Server',
'fixture-mcp-server',
'https://mcp.example.com/sse',
'sse',
'none',
'default_on',
TRUE,
'30095c71-380b-457a-8995-97b8ee6e5307', -- admin@coder.com
'30095c71-380b-457a-8995-97b8ee6e5307', -- admin@coder.com
'2024-01-01 00:00:00+00',
'2024-01-01 00:00:00+00'
);
INSERT INTO mcp_server_user_tokens (
id,
mcp_server_config_id,
user_id,
access_token,
token_type,
created_at,
updated_at
)
SELECT
'b2c3d4e5-f6a7-8901-bcde-f12345678901',
'a1b2c3d4-e5f6-7890-abcd-ef1234567890',
id,
'fixture-access-token',
'Bearer',
'2024-01-01 00:00:00+00',
'2024-01-01 00:00:00+00'
FROM users
ORDER BY created_at, id
LIMIT 1;
-1
View File
@@ -787,7 +787,6 @@ func (q *sqlQuerier) GetAuthorizedChats(ctx context.Context, arg GetChatsParams,
&i.Archived,
&i.LastError,
&i.Mode,
pq.Array(&i.MCPServerIDs),
); err != nil {
return nil, err
}
-45
View File
@@ -4167,7 +4167,6 @@ type Chat struct {
Archived bool `db:"archived" json:"archived"`
LastError sql.NullString `db:"last_error" json:"last_error"`
Mode NullChatMode `db:"mode" json:"mode"`
MCPServerIDs []uuid.UUID `db:"mcp_server_ids" json:"mcp_server_ids"`
}
type ChatDiffStatus struct {
@@ -4453,50 +4452,6 @@ type License struct {
UUID uuid.UUID `db:"uuid" json:"uuid"`
}
type MCPServerConfig struct {
ID uuid.UUID `db:"id" json:"id"`
DisplayName string `db:"display_name" json:"display_name"`
Slug string `db:"slug" json:"slug"`
Description string `db:"description" json:"description"`
IconURL string `db:"icon_url" json:"icon_url"`
Transport string `db:"transport" json:"transport"`
Url string `db:"url" json:"url"`
AuthType string `db:"auth_type" json:"auth_type"`
OAuth2ClientID string `db:"oauth2_client_id" json:"oauth2_client_id"`
OAuth2ClientSecret string `db:"oauth2_client_secret" json:"oauth2_client_secret"`
OAuth2ClientSecretKeyID sql.NullString `db:"oauth2_client_secret_key_id" json:"oauth2_client_secret_key_id"`
OAuth2AuthURL string `db:"oauth2_auth_url" json:"oauth2_auth_url"`
OAuth2TokenURL string `db:"oauth2_token_url" json:"oauth2_token_url"`
OAuth2Scopes string `db:"oauth2_scopes" json:"oauth2_scopes"`
APIKeyHeader string `db:"api_key_header" json:"api_key_header"`
APIKeyValue string `db:"api_key_value" json:"api_key_value"`
APIKeyValueKeyID sql.NullString `db:"api_key_value_key_id" json:"api_key_value_key_id"`
CustomHeaders string `db:"custom_headers" json:"custom_headers"`
CustomHeadersKeyID sql.NullString `db:"custom_headers_key_id" json:"custom_headers_key_id"`
ToolAllowList []string `db:"tool_allow_list" json:"tool_allow_list"`
ToolDenyList []string `db:"tool_deny_list" json:"tool_deny_list"`
Availability string `db:"availability" json:"availability"`
Enabled bool `db:"enabled" json:"enabled"`
CreatedBy uuid.NullUUID `db:"created_by" json:"created_by"`
UpdatedBy uuid.NullUUID `db:"updated_by" json:"updated_by"`
CreatedAt time.Time `db:"created_at" json:"created_at"`
UpdatedAt time.Time `db:"updated_at" json:"updated_at"`
}
type MCPServerUserToken struct {
ID uuid.UUID `db:"id" json:"id"`
MCPServerConfigID uuid.UUID `db:"mcp_server_config_id" json:"mcp_server_config_id"`
UserID uuid.UUID `db:"user_id" json:"user_id"`
AccessToken string `db:"access_token" json:"access_token"`
AccessTokenKeyID sql.NullString `db:"access_token_key_id" json:"access_token_key_id"`
RefreshToken string `db:"refresh_token" json:"refresh_token"`
RefreshTokenKeyID sql.NullString `db:"refresh_token_key_id" json:"refresh_token_key_id"`
TokenType string `db:"token_type" json:"token_type"`
Expiry sql.NullTime `db:"expiry" json:"expiry"`
CreatedAt time.Time `db:"created_at" json:"created_at"`
UpdatedAt time.Time `db:"updated_at" json:"updated_at"`
}
type NotificationMessage struct {
ID uuid.UUID `db:"id" json:"id"`
NotificationTemplateID uuid.UUID `db:"notification_template_id" json:"notification_template_id"`
-15
View File
@@ -74,7 +74,6 @@ type sqlcQuerier interface {
CleanTailnetCoordinators(ctx context.Context) error
CleanTailnetLostPeers(ctx context.Context) error
CleanTailnetTunnels(ctx context.Context) error
CleanupDeletedMCPServerIDsFromChats(ctx context.Context) error
CountAIBridgeInterceptions(ctx context.Context, arg CountAIBridgeInterceptionsParams) (int64, error)
CountAuditLogs(ctx context.Context, arg CountAuditLogsParams) (int64, error)
CountConnectionLogs(ctx context.Context, arg CountConnectionLogsParams) (int64, error)
@@ -111,8 +110,6 @@ type sqlcQuerier interface {
DeleteGroupByID(ctx context.Context, id uuid.UUID) error
DeleteGroupMemberFromGroup(ctx context.Context, arg DeleteGroupMemberFromGroupParams) error
DeleteLicense(ctx context.Context, id int32) (int32, error)
DeleteMCPServerConfigByID(ctx context.Context, id uuid.UUID) error
DeleteMCPServerUserToken(ctx context.Context, arg DeleteMCPServerUserTokenParams) error
DeleteOAuth2ProviderAppByClientID(ctx context.Context, id uuid.UUID) error
DeleteOAuth2ProviderAppByID(ctx context.Context, id uuid.UUID) error
DeleteOAuth2ProviderAppCodeByID(ctx context.Context, id uuid.UUID) error
@@ -272,7 +269,6 @@ type sqlcQuerier interface {
GetEligibleProvisionerDaemonsByProvisionerJobIDs(ctx context.Context, provisionerJobIds []uuid.UUID) ([]GetEligibleProvisionerDaemonsByProvisionerJobIDsRow, error)
GetEnabledChatModelConfigs(ctx context.Context) ([]ChatModelConfig, error)
GetEnabledChatProviders(ctx context.Context) ([]ChatProvider, error)
GetEnabledMCPServerConfigs(ctx context.Context) ([]MCPServerConfig, error)
GetExternalAuthLink(ctx context.Context, arg GetExternalAuthLinkParams) (ExternalAuthLink, error)
GetExternalAuthLinksByUserID(ctx context.Context, userID uuid.UUID) ([]ExternalAuthLink, error)
GetFailedWorkspaceBuildsByTemplateID(ctx context.Context, arg GetFailedWorkspaceBuildsByTemplateIDParams) ([]GetFailedWorkspaceBuildsByTemplateIDRow, error)
@@ -288,7 +284,6 @@ type sqlcQuerier interface {
// param created_at_opt: The created_at timestamp to filter by. This parameter is usd for pagination - it fetches notifications created before the specified timestamp if it is not the zero value
// param limit_opt: The limit of notifications to fetch. If the limit is not specified, it defaults to 25
GetFilteredInboxNotificationsByUserID(ctx context.Context, arg GetFilteredInboxNotificationsByUserIDParams) ([]InboxNotification, error)
GetForcedMCPServerConfigs(ctx context.Context) ([]MCPServerConfig, error)
GetGitSSHKey(ctx context.Context, userID uuid.UUID) (GitSSHKey, error)
GetGroupByID(ctx context.Context, id uuid.UUID) (Group, error)
GetGroupByOrgAndName(ctx context.Context, arg GetGroupByOrgAndNameParams) (Group, error)
@@ -317,12 +312,6 @@ type sqlcQuerier interface {
GetLicenseByID(ctx context.Context, id int32) (License, error)
GetLicenses(ctx context.Context) ([]License, error)
GetLogoURL(ctx context.Context) (string, error)
GetMCPServerConfigByID(ctx context.Context, id uuid.UUID) (MCPServerConfig, error)
GetMCPServerConfigBySlug(ctx context.Context, slug string) (MCPServerConfig, error)
GetMCPServerConfigs(ctx context.Context) ([]MCPServerConfig, error)
GetMCPServerConfigsByIDs(ctx context.Context, ids []uuid.UUID) ([]MCPServerConfig, error)
GetMCPServerUserToken(ctx context.Context, arg GetMCPServerUserTokenParams) (MCPServerUserToken, error)
GetMCPServerUserTokensByUserID(ctx context.Context, userID uuid.UUID) ([]MCPServerUserToken, error)
GetNotificationMessagesByStatus(ctx context.Context, arg GetNotificationMessagesByStatusParams) ([]NotificationMessage, error)
// Fetch the notification report generator log indicating recent activity.
GetNotificationReportGeneratorLogByTemplate(ctx context.Context, templateID uuid.UUID) (NotificationReportGeneratorLog, error)
@@ -686,7 +675,6 @@ type sqlcQuerier interface {
InsertGroupMember(ctx context.Context, arg InsertGroupMemberParams) error
InsertInboxNotification(ctx context.Context, arg InsertInboxNotificationParams) (InboxNotification, error)
InsertLicense(ctx context.Context, arg InsertLicenseParams) (License, error)
InsertMCPServerConfig(ctx context.Context, arg InsertMCPServerConfigParams) (MCPServerConfig, error)
InsertMemoryResourceMonitor(ctx context.Context, arg InsertMemoryResourceMonitorParams) (WorkspaceAgentMemoryResourceMonitor, error)
// Inserts any group by name that does not exist. All new groups are given
// a random uuid, are inserted into the same organization. They have the default
@@ -808,7 +796,6 @@ type sqlcQuerier interface {
// Bumps the heartbeat timestamp for a running chat so that other
// replicas know the worker is still alive.
UpdateChatHeartbeat(ctx context.Context, arg UpdateChatHeartbeatParams) (int64, error)
UpdateChatMCPServerIDs(ctx context.Context, arg UpdateChatMCPServerIDsParams) (Chat, error)
UpdateChatMessageByID(ctx context.Context, arg UpdateChatMessageByIDParams) (ChatMessage, error)
UpdateChatModelConfig(ctx context.Context, arg UpdateChatModelConfigParams) (ChatModelConfig, error)
UpdateChatProvider(ctx context.Context, arg UpdateChatProviderParams) (ChatProvider, error)
@@ -826,7 +813,6 @@ type sqlcQuerier interface {
UpdateGroupByID(ctx context.Context, arg UpdateGroupByIDParams) (Group, error)
UpdateInactiveUsersToDormant(ctx context.Context, arg UpdateInactiveUsersToDormantParams) ([]UpdateInactiveUsersToDormantRow, error)
UpdateInboxNotificationReadStatus(ctx context.Context, arg UpdateInboxNotificationReadStatusParams) error
UpdateMCPServerConfig(ctx context.Context, arg UpdateMCPServerConfigParams) (MCPServerConfig, error)
UpdateMemberRoles(ctx context.Context, arg UpdateMemberRolesParams) (OrganizationMember, error)
UpdateMemoryResourceMonitor(ctx context.Context, arg UpdateMemoryResourceMonitorParams) error
UpdateNotificationTemplateMethodByID(ctx context.Context, arg UpdateNotificationTemplateMethodByIDParams) (NotificationTemplate, error)
@@ -931,7 +917,6 @@ type sqlcQuerier interface {
UpsertHealthSettings(ctx context.Context, value string) error
UpsertLastUpdateCheck(ctx context.Context, value string) error
UpsertLogoURL(ctx context.Context, value string) error
UpsertMCPServerUserToken(ctx context.Context, arg UpsertMCPServerUserTokenParams) (MCPServerUserToken, error)
// Insert or update notification report generator logs with recent activity.
UpsertNotificationReportGeneratorLog(ctx context.Context, arg UpsertNotificationReportGeneratorLogParams) error
UpsertNotificationsSettings(ctx context.Context, value string) error
+5 -5
View File
@@ -1653,12 +1653,12 @@ func TestDefaultProxy(t *testing.T) {
require.NoError(t, err, "get def proxy")
require.Equal(t, defProxy.DisplayName, "Default")
require.Equal(t, defProxy.IconURL, "/emojis/1f3e1.png")
require.Equal(t, defProxy.IconUrl, "/emojis/1f3e1.png")
// Set the proxy values
args := database.UpsertDefaultProxyParams{
DisplayName: "displayname",
IconURL: "/icon.png",
IconUrl: "/icon.png",
}
err = db.UpsertDefaultProxy(ctx, args)
require.NoError(t, err, "insert def proxy")
@@ -1666,12 +1666,12 @@ func TestDefaultProxy(t *testing.T) {
defProxy, err = db.GetDefaultProxyConfig(ctx)
require.NoError(t, err, "get def proxy")
require.Equal(t, defProxy.DisplayName, args.DisplayName)
require.Equal(t, defProxy.IconURL, args.IconURL)
require.Equal(t, defProxy.IconUrl, args.IconUrl)
// Upsert values
args = database.UpsertDefaultProxyParams{
DisplayName: "newdisplayname",
IconURL: "/newicon.png",
IconUrl: "/newicon.png",
}
err = db.UpsertDefaultProxy(ctx, args)
require.NoError(t, err, "upsert def proxy")
@@ -1679,7 +1679,7 @@ func TestDefaultProxy(t *testing.T) {
defProxy, err = db.GetDefaultProxyConfig(ctx)
require.NoError(t, err, "get def proxy")
require.Equal(t, defProxy.DisplayName, args.DisplayName)
require.Equal(t, defProxy.IconURL, args.IconURL)
require.Equal(t, defProxy.IconUrl, args.IconUrl)
// Ensure other site configs are the same
found, err := db.GetDeploymentID(ctx)
File diff suppressed because it is too large Load Diff
+2 -15
View File
@@ -180,8 +180,7 @@ INSERT INTO chats (
root_chat_id,
last_model_config_id,
title,
mode,
mcp_server_ids
mode
) VALUES (
@owner_id::uuid,
sqlc.narg('workspace_id')::uuid,
@@ -189,8 +188,7 @@ INSERT INTO chats (
sqlc.narg('root_chat_id')::uuid,
@last_model_config_id::uuid,
@title::text,
sqlc.narg('mode')::chat_mode,
COALESCE(@mcp_server_ids::uuid[], '{}'::uuid[])
sqlc.narg('mode')::chat_mode
)
RETURNING
*;
@@ -297,17 +295,6 @@ WHERE
RETURNING
*;
-- name: UpdateChatMCPServerIDs :one
UPDATE
chats
SET
mcp_server_ids = @mcp_server_ids::uuid[],
updated_at = NOW()
WHERE
id = @id::uuid
RETURNING
*;
-- name: AcquireChats :many
-- Acquires up to @num_chats pending chats for processing. Uses SKIP LOCKED
-- to prevent multiple replicas from acquiring the same chat.
@@ -1,213 +0,0 @@
-- name: GetMCPServerConfigByID :one
SELECT
*
FROM
mcp_server_configs
WHERE
id = @id::uuid;
-- name: GetMCPServerConfigBySlug :one
SELECT
*
FROM
mcp_server_configs
WHERE
slug = @slug::text;
-- name: GetMCPServerConfigs :many
SELECT
*
FROM
mcp_server_configs
ORDER BY
display_name ASC;
-- name: GetEnabledMCPServerConfigs :many
SELECT
*
FROM
mcp_server_configs
WHERE
enabled = TRUE
ORDER BY
display_name ASC;
-- name: GetMCPServerConfigsByIDs :many
SELECT
*
FROM
mcp_server_configs
WHERE
id = ANY(@ids::uuid[])
ORDER BY
display_name ASC;
-- name: GetForcedMCPServerConfigs :many
SELECT
*
FROM
mcp_server_configs
WHERE
enabled = TRUE
AND availability = 'force_on'
ORDER BY
display_name ASC;
-- name: InsertMCPServerConfig :one
INSERT INTO mcp_server_configs (
display_name,
slug,
description,
icon_url,
transport,
url,
auth_type,
oauth2_client_id,
oauth2_client_secret,
oauth2_client_secret_key_id,
oauth2_auth_url,
oauth2_token_url,
oauth2_scopes,
api_key_header,
api_key_value,
api_key_value_key_id,
custom_headers,
custom_headers_key_id,
tool_allow_list,
tool_deny_list,
availability,
enabled,
created_by,
updated_by
) VALUES (
@display_name::text,
@slug::text,
@description::text,
@icon_url::text,
@transport::text,
@url::text,
@auth_type::text,
@oauth2_client_id::text,
@oauth2_client_secret::text,
sqlc.narg('oauth2_client_secret_key_id')::text,
@oauth2_auth_url::text,
@oauth2_token_url::text,
@oauth2_scopes::text,
@api_key_header::text,
@api_key_value::text,
sqlc.narg('api_key_value_key_id')::text,
@custom_headers::text,
sqlc.narg('custom_headers_key_id')::text,
@tool_allow_list::text[],
@tool_deny_list::text[],
@availability::text,
@enabled::boolean,
@created_by::uuid,
@updated_by::uuid
)
RETURNING
*;
-- name: UpdateMCPServerConfig :one
UPDATE
mcp_server_configs
SET
display_name = @display_name::text,
slug = @slug::text,
description = @description::text,
icon_url = @icon_url::text,
transport = @transport::text,
url = @url::text,
auth_type = @auth_type::text,
oauth2_client_id = @oauth2_client_id::text,
oauth2_client_secret = @oauth2_client_secret::text,
oauth2_client_secret_key_id = sqlc.narg('oauth2_client_secret_key_id')::text,
oauth2_auth_url = @oauth2_auth_url::text,
oauth2_token_url = @oauth2_token_url::text,
oauth2_scopes = @oauth2_scopes::text,
api_key_header = @api_key_header::text,
api_key_value = @api_key_value::text,
api_key_value_key_id = sqlc.narg('api_key_value_key_id')::text,
custom_headers = @custom_headers::text,
custom_headers_key_id = sqlc.narg('custom_headers_key_id')::text,
tool_allow_list = @tool_allow_list::text[],
tool_deny_list = @tool_deny_list::text[],
availability = @availability::text,
enabled = @enabled::boolean,
updated_by = @updated_by::uuid,
updated_at = NOW()
WHERE
id = @id::uuid
RETURNING
*;
-- name: DeleteMCPServerConfigByID :exec
DELETE FROM
mcp_server_configs
WHERE
id = @id::uuid;
-- name: GetMCPServerUserToken :one
SELECT
*
FROM
mcp_server_user_tokens
WHERE
mcp_server_config_id = @mcp_server_config_id::uuid
AND user_id = @user_id::uuid;
-- name: GetMCPServerUserTokensByUserID :many
SELECT
*
FROM
mcp_server_user_tokens
WHERE
user_id = @user_id::uuid;
-- name: UpsertMCPServerUserToken :one
INSERT INTO mcp_server_user_tokens (
mcp_server_config_id,
user_id,
access_token,
access_token_key_id,
refresh_token,
refresh_token_key_id,
token_type,
expiry
) VALUES (
@mcp_server_config_id::uuid,
@user_id::uuid,
@access_token::text,
sqlc.narg('access_token_key_id')::text,
@refresh_token::text,
sqlc.narg('refresh_token_key_id')::text,
@token_type::text,
sqlc.narg('expiry')::timestamptz
)
ON CONFLICT (mcp_server_config_id, user_id) DO UPDATE SET
access_token = @access_token::text,
access_token_key_id = sqlc.narg('access_token_key_id')::text,
refresh_token = @refresh_token::text,
refresh_token_key_id = sqlc.narg('refresh_token_key_id')::text,
token_type = @token_type::text,
expiry = sqlc.narg('expiry')::timestamptz,
updated_at = NOW()
RETURNING
*;
-- name: DeleteMCPServerUserToken :exec
DELETE FROM
mcp_server_user_tokens
WHERE
mcp_server_config_id = @mcp_server_config_id::uuid
AND user_id = @user_id::uuid;
-- name: CleanupDeletedMCPServerIDsFromChats :exec
UPDATE chats
SET mcp_server_ids = (
SELECT COALESCE(array_agg(sid), '{}')
FROM unnest(chats.mcp_server_ids) AS sid
WHERE sid IN (SELECT id FROM mcp_server_configs)
)
WHERE mcp_server_ids != '{}'
AND NOT (mcp_server_ids <@ COALESCE((SELECT array_agg(id) FROM mcp_server_configs), '{}'));
-22
View File
@@ -236,28 +236,6 @@ sql:
aibridge_token_usage: AIBridgeTokenUsage
aibridge_user_prompt: AIBridgeUserPrompt
aibridge_model_thought: AIBridgeModelThought
mcp_server_config: MCPServerConfig
mcp_server_configs: MCPServerConfigs
mcp_server_user_token: MCPServerUserToken
mcp_server_user_tokens: MCPServerUserTokens
mcp_server_tool_snapshot: MCPServerToolSnapshot
mcp_server_tool_snapshots: MCPServerToolSnapshots
mcp_server_config_id: MCPServerConfigID
mcp_server_ids: MCPServerIDs
icon_url: IconURL
oauth2_client_id: OAuth2ClientID
oauth2_client_secret: OAuth2ClientSecret
oauth2_client_secret_key_id: OAuth2ClientSecretKeyID
oauth2_auth_url: OAuth2AuthURL
oauth2_token_url: OAuth2TokenURL
oauth2_scopes: OAuth2Scopes
api_key_header: APIKeyHeader
api_key_value: APIKeyValue
api_key_value_key_id: APIKeyValueKeyID
custom_headers_key_id: CustomHeadersKeyID
tools_json: ToolsJSON
access_token_key_id: AccessTokenKeyID
refresh_token_key_id: RefreshTokenKeyID
rules:
- name: do-not-use-public-schema-in-queries
message: "do not use public schema in queries"
-4
View File
@@ -42,10 +42,6 @@ const (
UniqueJfrogXrayScansPkey UniqueConstraint = "jfrog_xray_scans_pkey" // ALTER TABLE ONLY jfrog_xray_scans ADD CONSTRAINT jfrog_xray_scans_pkey PRIMARY KEY (agent_id, workspace_id);
UniqueLicensesJWTKey UniqueConstraint = "licenses_jwt_key" // ALTER TABLE ONLY licenses ADD CONSTRAINT licenses_jwt_key UNIQUE (jwt);
UniqueLicensesPkey UniqueConstraint = "licenses_pkey" // ALTER TABLE ONLY licenses ADD CONSTRAINT licenses_pkey PRIMARY KEY (id);
UniqueMcpServerConfigsPkey UniqueConstraint = "mcp_server_configs_pkey" // ALTER TABLE ONLY mcp_server_configs ADD CONSTRAINT mcp_server_configs_pkey PRIMARY KEY (id);
UniqueMcpServerConfigsSlugKey UniqueConstraint = "mcp_server_configs_slug_key" // ALTER TABLE ONLY mcp_server_configs ADD CONSTRAINT mcp_server_configs_slug_key UNIQUE (slug);
UniqueMcpServerUserTokensMcpServerConfigIDUserIDKey UniqueConstraint = "mcp_server_user_tokens_mcp_server_config_id_user_id_key" // ALTER TABLE ONLY mcp_server_user_tokens ADD CONSTRAINT mcp_server_user_tokens_mcp_server_config_id_user_id_key UNIQUE (mcp_server_config_id, user_id);
UniqueMcpServerUserTokensPkey UniqueConstraint = "mcp_server_user_tokens_pkey" // ALTER TABLE ONLY mcp_server_user_tokens ADD CONSTRAINT mcp_server_user_tokens_pkey PRIMARY KEY (id);
UniqueNotificationMessagesPkey UniqueConstraint = "notification_messages_pkey" // ALTER TABLE ONLY notification_messages ADD CONSTRAINT notification_messages_pkey PRIMARY KEY (id);
UniqueNotificationPreferencesPkey UniqueConstraint = "notification_preferences_pkey" // ALTER TABLE ONLY notification_preferences ADD CONSTRAINT notification_preferences_pkey PRIMARY KEY (user_id, notification_template_id);
UniqueNotificationReportGeneratorLogsPkey UniqueConstraint = "notification_report_generator_logs_pkey" // ALTER TABLE ONLY notification_report_generator_logs ADD CONSTRAINT notification_report_generator_logs_pkey PRIMARY KEY (notification_template_id);
-36
View File
@@ -10,7 +10,6 @@ import (
"github.com/coder/coder/v2/coderd/database"
"github.com/coder/coder/v2/coderd/util/slice"
"github.com/coder/coder/v2/codersdk"
"github.com/coder/terraform-provider-coder/v2/provider"
)
type parameterValueSource int
@@ -110,7 +109,6 @@ func ResolveParameters(
for _, parameter := range output.Parameters {
parameterNames[parameter.Name] = struct{}{}
// Validate mutability constraints.
if !firstBuild && !parameter.Mutable {
// previousValuesMap should be used over the first render output
// for the previous state of parameters. The previous build
@@ -144,40 +142,6 @@ func ResolveParameters(
}
}
// Validate monotonic constraints. Monotonic parameters
// require the value to only increase or only decrease
// relative to the previous build.
if !firstBuild {
prevStr, hasPrev := previousValuesMap[parameter.Name]
// Only validate on currently valid parameters. Do not load extra diagnostics if
// the parameter is already invalid.
if hasPrev && parameter.Value.Valid() {
MonotonicValidationLoop:
for _, v := range parameter.Validations {
if v.Monotonic == nil || *v.Monotonic == "" {
continue
}
validation := &provider.Validation{
Monotonic: *v.Monotonic,
MinDisabled: true,
MaxDisabled: true,
}
prev := prevStr
if err := validation.Valid(provider.OptionType(parameter.Type), parameter.Value.AsString(), &prev); err != nil {
parameterError.Extend(parameter.Name, hcl.Diagnostics{
&hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: fmt.Sprintf("Parameter %q monotonicity", parameter.Name),
Detail: err.Error(),
},
})
break MonotonicValidationLoop
}
}
}
}
// TODO: Fix the `hcl.Diagnostics(...)` type casting. It should not be needed.
if hcl.Diagnostics(parameter.Diagnostics).HasErrors() {
// All validation errors are raised here for each parameter.
-83
View File
@@ -11,7 +11,6 @@ import (
"github.com/coder/coder/v2/coderd/dynamicparameters"
"github.com/coder/coder/v2/coderd/dynamicparameters/rendermock"
"github.com/coder/coder/v2/coderd/httpapi/httperror"
"github.com/coder/coder/v2/coderd/util/ptr"
"github.com/coder/coder/v2/codersdk"
"github.com/coder/coder/v2/testutil"
"github.com/coder/preview"
@@ -123,86 +122,4 @@ func TestResolveParameters(t *testing.T) {
require.Len(t, respErr.Validations, 1)
require.Contains(t, respErr.Validations[0].Error(), "is not mutable")
})
t.Run("Monotonic", func(t *testing.T) {
t.Parallel()
tests := []struct {
name string
monotonic string
prev string // empty means no previous value
cur string
firstBuild bool
expectErr string // empty means no error expected
}{
// Increasing
{name: "increasing/increase allowed", monotonic: "increasing", prev: "5", cur: "10"},
{name: "increasing/same allowed", monotonic: "increasing", prev: "5", cur: "5"},
{name: "increasing/decrease rejected", monotonic: "increasing", prev: "10", cur: "5", expectErr: "must be equal or greater than previous value"},
// Decreasing
{name: "decreasing/decrease allowed", monotonic: "decreasing", prev: "10", cur: "5"},
{name: "decreasing/same allowed", monotonic: "decreasing", prev: "5", cur: "5"},
{name: "decreasing/increase rejected", monotonic: "decreasing", prev: "5", cur: "10", expectErr: "must be equal or lower than previous value"},
// First build — not enforced
{name: "increasing/first build", monotonic: "increasing", cur: "1", firstBuild: true},
// No previous value — not enforced
{name: "increasing/no previous", monotonic: "increasing", cur: "5"},
}
for _, tc := range tests {
t.Run(tc.name, func(t *testing.T) {
t.Parallel()
ctrl := gomock.NewController(t)
render := rendermock.NewMockRenderer(ctrl)
render.EXPECT().
Render(gomock.Any(), gomock.Any(), gomock.Any()).
AnyTimes().
Return(&preview.Output{
Parameters: []previewtypes.Parameter{
{
ParameterData: previewtypes.ParameterData{
Name: "param",
Type: previewtypes.ParameterTypeNumber,
FormType: provider.ParameterFormTypeInput,
Mutable: true,
Validations: []*previewtypes.ParameterValidation{
{Monotonic: ptr.Ref(tc.monotonic)},
},
},
Value: previewtypes.StringLiteral(tc.cur),
Diagnostics: nil,
},
},
}, nil)
var previousValues []database.WorkspaceBuildParameter
if tc.prev != "" {
previousValues = []database.WorkspaceBuildParameter{
{Name: "param", Value: tc.prev},
}
}
ctx := testutil.Context(t, testutil.WaitShort)
_, err := dynamicparameters.ResolveParameters(ctx, uuid.New(), render, tc.firstBuild,
previousValues,
[]codersdk.WorkspaceBuildParameter{
{Name: "param", Value: tc.cur},
},
[]database.TemplateVersionPresetParameter{},
)
if tc.expectErr != "" {
require.Error(t, err)
resp, ok := httperror.IsResponder(err)
require.True(t, ok)
_, respErr := resp.Response()
require.Len(t, respErr.Validations, 1)
require.Contains(t, respErr.Validations[0].Error(), tc.expectErr)
} else {
require.NoError(t, err)
}
})
}
})
}
-921
View File
@@ -1,921 +0,0 @@
package coderd
import (
"context"
"database/sql"
"encoding/json"
"fmt"
"net/http"
"strings"
"github.com/go-chi/chi/v5"
"github.com/google/uuid"
"golang.org/x/oauth2"
"github.com/coder/coder/v2/coderd/database"
"github.com/coder/coder/v2/coderd/database/dbauthz"
"github.com/coder/coder/v2/coderd/httpapi"
"github.com/coder/coder/v2/coderd/httpmw"
"github.com/coder/coder/v2/coderd/rbac"
"github.com/coder/coder/v2/coderd/rbac/policy"
"github.com/coder/coder/v2/codersdk"
)
// @Summary List MCP server configs
// @x-apidocgen {"skip": true}
// EXPERIMENTAL: this endpoint is experimental and is subject to change.
//
//nolint:revive // HTTP handler writes to ResponseWriter.
func (api *API) listMCPServerConfigs(rw http.ResponseWriter, r *http.Request) {
ctx := r.Context()
apiKey := httpmw.APIKey(r)
// Admin users can see all MCP server configs (including disabled
// ones) for management purposes. Non-admin users see only enabled
// configs, which is sufficient for using the chat feature.
isAdmin := api.Authorize(r, policy.ActionRead, rbac.ResourceDeploymentConfig)
var configs []database.MCPServerConfig
var err error
if isAdmin {
configs, err = api.Database.GetMCPServerConfigs(ctx)
} else {
//nolint:gocritic // All authenticated users need to read enabled MCP server configs to use the chat feature.
configs, err = api.Database.GetEnabledMCPServerConfigs(dbauthz.AsSystemRestricted(ctx))
}
if err != nil {
httpapi.Write(ctx, rw, http.StatusInternalServerError, codersdk.Response{
Message: "Failed to list MCP server configs.",
Detail: err.Error(),
})
return
}
// Look up the calling user's OAuth2 tokens so we can populate
// auth_connected per server.
//nolint:gocritic // Need to check user tokens across all servers.
userTokens, err := api.Database.GetMCPServerUserTokensByUserID(dbauthz.AsSystemRestricted(ctx), apiKey.UserID)
if err != nil {
httpapi.Write(ctx, rw, http.StatusInternalServerError, codersdk.Response{
Message: "Failed to get user tokens.",
Detail: err.Error(),
})
return
}
tokenMap := make(map[uuid.UUID]bool, len(userTokens))
for _, t := range userTokens {
tokenMap[t.MCPServerConfigID] = true
}
resp := make([]codersdk.MCPServerConfig, 0, len(configs))
for _, config := range configs {
var sdkConfig codersdk.MCPServerConfig
if isAdmin {
sdkConfig = convertMCPServerConfig(config)
} else {
sdkConfig = convertMCPServerConfigRedacted(config)
}
if config.AuthType == "oauth2" {
sdkConfig.AuthConnected = tokenMap[config.ID]
} else {
sdkConfig.AuthConnected = true
}
resp = append(resp, sdkConfig)
}
httpapi.Write(ctx, rw, http.StatusOK, resp)
}
// @Summary Create MCP server config
// @x-apidocgen {"skip": true}
// EXPERIMENTAL: this endpoint is experimental and is subject to change.
//
//nolint:revive // HTTP handler writes to ResponseWriter.
func (api *API) createMCPServerConfig(rw http.ResponseWriter, r *http.Request) {
ctx := r.Context()
apiKey := httpmw.APIKey(r)
if !api.Authorize(r, policy.ActionUpdate, rbac.ResourceDeploymentConfig) {
httpapi.Forbidden(rw)
return
}
var req codersdk.CreateMCPServerConfigRequest
if !httpapi.Read(ctx, rw, r, &req) {
return
}
// Validate auth-type-dependent fields.
switch req.AuthType {
case "oauth2":
if req.OAuth2ClientID == "" || req.OAuth2AuthURL == "" || req.OAuth2TokenURL == "" {
httpapi.Write(ctx, rw, http.StatusBadRequest, codersdk.Response{
Message: "OAuth2 auth type requires oauth2_client_id, oauth2_auth_url, and oauth2_token_url.",
})
return
}
case "api_key":
if req.APIKeyHeader == "" || req.APIKeyValue == "" {
httpapi.Write(ctx, rw, http.StatusBadRequest, codersdk.Response{
Message: "API key auth type requires api_key_header and api_key_value.",
})
return
}
case "custom_headers":
if len(req.CustomHeaders) == 0 {
httpapi.Write(ctx, rw, http.StatusBadRequest, codersdk.Response{
Message: "Custom headers auth type requires at least one custom header.",
})
return
}
}
customHeadersJSON, err := marshalCustomHeaders(req.CustomHeaders)
if err != nil {
httpapi.Write(ctx, rw, http.StatusBadRequest, codersdk.Response{
Message: "Invalid custom headers.",
Detail: err.Error(),
})
return
}
inserted, err := api.Database.InsertMCPServerConfig(ctx, database.InsertMCPServerConfigParams{
DisplayName: strings.TrimSpace(req.DisplayName),
Slug: strings.TrimSpace(req.Slug),
Description: strings.TrimSpace(req.Description),
IconURL: strings.TrimSpace(req.IconURL),
Transport: strings.TrimSpace(req.Transport),
Url: strings.TrimSpace(req.URL),
AuthType: strings.TrimSpace(req.AuthType),
OAuth2ClientID: strings.TrimSpace(req.OAuth2ClientID),
OAuth2ClientSecret: strings.TrimSpace(req.OAuth2ClientSecret),
OAuth2ClientSecretKeyID: sql.NullString{},
OAuth2AuthURL: strings.TrimSpace(req.OAuth2AuthURL),
OAuth2TokenURL: strings.TrimSpace(req.OAuth2TokenURL),
OAuth2Scopes: strings.TrimSpace(req.OAuth2Scopes),
APIKeyHeader: strings.TrimSpace(req.APIKeyHeader),
APIKeyValue: strings.TrimSpace(req.APIKeyValue),
APIKeyValueKeyID: sql.NullString{},
CustomHeaders: customHeadersJSON,
CustomHeadersKeyID: sql.NullString{},
ToolAllowList: coalesceStringSlice(trimStringSlice(req.ToolAllowList)),
ToolDenyList: coalesceStringSlice(trimStringSlice(req.ToolDenyList)),
Availability: strings.TrimSpace(req.Availability),
Enabled: req.Enabled,
CreatedBy: apiKey.UserID,
UpdatedBy: apiKey.UserID,
})
if err != nil {
switch {
case database.IsUniqueViolation(err):
httpapi.Write(ctx, rw, http.StatusConflict, codersdk.Response{
Message: "MCP server config already exists.",
Detail: err.Error(),
})
return
case database.IsCheckViolation(err):
httpapi.Write(ctx, rw, http.StatusBadRequest, codersdk.Response{
Message: "Invalid MCP server config.",
Detail: err.Error(),
})
return
default:
httpapi.Write(ctx, rw, http.StatusInternalServerError, codersdk.Response{
Message: "Failed to create MCP server config.",
Detail: err.Error(),
})
return
}
}
httpapi.Write(ctx, rw, http.StatusCreated, convertMCPServerConfig(inserted))
}
// @Summary Get MCP server config
// @x-apidocgen {"skip": true}
// EXPERIMENTAL: this endpoint is experimental and is subject to change.
//
//nolint:revive // HTTP handler writes to ResponseWriter.
func (api *API) getMCPServerConfig(rw http.ResponseWriter, r *http.Request) {
ctx := r.Context()
apiKey := httpmw.APIKey(r)
mcpServerID, ok := parseMCPServerConfigID(rw, r)
if !ok {
return
}
isAdmin := api.Authorize(r, policy.ActionRead, rbac.ResourceDeploymentConfig)
var config database.MCPServerConfig
var err error
if isAdmin {
config, err = api.Database.GetMCPServerConfigByID(ctx, mcpServerID)
} else {
//nolint:gocritic // All authenticated users can view enabled MCP server configs.
config, err = api.Database.GetMCPServerConfigByID(dbauthz.AsSystemRestricted(ctx), mcpServerID)
if err == nil && !config.Enabled {
httpapi.ResourceNotFound(rw)
return
}
}
if err != nil {
if httpapi.Is404Error(err) {
httpapi.ResourceNotFound(rw)
return
}
httpapi.Write(ctx, rw, http.StatusInternalServerError, codersdk.Response{
Message: "Failed to get MCP server config.",
Detail: err.Error(),
})
return
}
var sdkConfig codersdk.MCPServerConfig
if isAdmin {
sdkConfig = convertMCPServerConfig(config)
} else {
sdkConfig = convertMCPServerConfigRedacted(config)
}
// Populate AuthConnected for the calling user.
if config.AuthType == "oauth2" {
//nolint:gocritic // Need to check user token for this server.
userTokens, err := api.Database.GetMCPServerUserTokensByUserID(dbauthz.AsSystemRestricted(ctx), apiKey.UserID)
if err != nil {
httpapi.Write(ctx, rw, http.StatusInternalServerError, codersdk.Response{
Message: "Failed to get user tokens.",
Detail: err.Error(),
})
return
}
for _, t := range userTokens {
if t.MCPServerConfigID == config.ID {
sdkConfig.AuthConnected = true
break
}
}
} else {
sdkConfig.AuthConnected = true
}
httpapi.Write(ctx, rw, http.StatusOK, sdkConfig)
}
// @Summary Update MCP server config
// @x-apidocgen {"skip": true}
// EXPERIMENTAL: this endpoint is experimental and is subject to change.
//
//nolint:revive // HTTP handler writes to ResponseWriter.
func (api *API) updateMCPServerConfig(rw http.ResponseWriter, r *http.Request) {
ctx := r.Context()
apiKey := httpmw.APIKey(r)
if !api.Authorize(r, policy.ActionUpdate, rbac.ResourceDeploymentConfig) {
httpapi.Forbidden(rw)
return
}
mcpServerID, ok := parseMCPServerConfigID(rw, r)
if !ok {
return
}
var req codersdk.UpdateMCPServerConfigRequest
if !httpapi.Read(ctx, rw, r, &req) {
return
}
// Pre-validate custom headers before entering the transaction.
var customHeadersJSON string
if req.CustomHeaders != nil {
var chErr error
customHeadersJSON, chErr = marshalCustomHeaders(*req.CustomHeaders)
if chErr != nil {
httpapi.Write(ctx, rw, http.StatusBadRequest, codersdk.Response{
Message: "Invalid custom headers.",
Detail: chErr.Error(),
})
return
}
}
var updated database.MCPServerConfig
err := api.Database.InTx(func(tx database.Store) error {
existing, err := tx.GetMCPServerConfigByID(ctx, mcpServerID)
if err != nil {
return err
}
displayName := existing.DisplayName
if req.DisplayName != nil {
displayName = strings.TrimSpace(*req.DisplayName)
}
slug := existing.Slug
if req.Slug != nil {
slug = strings.TrimSpace(*req.Slug)
}
description := existing.Description
if req.Description != nil {
description = strings.TrimSpace(*req.Description)
}
iconURL := existing.IconURL
if req.IconURL != nil {
iconURL = strings.TrimSpace(*req.IconURL)
}
transport := existing.Transport
if req.Transport != nil {
transport = strings.TrimSpace(*req.Transport)
}
serverURL := existing.Url
if req.URL != nil {
serverURL = strings.TrimSpace(*req.URL)
}
authType := existing.AuthType
if req.AuthType != nil {
authType = strings.TrimSpace(*req.AuthType)
}
oauth2ClientID := existing.OAuth2ClientID
if req.OAuth2ClientID != nil {
oauth2ClientID = strings.TrimSpace(*req.OAuth2ClientID)
}
oauth2ClientSecret := existing.OAuth2ClientSecret
oauth2ClientSecretKeyID := existing.OAuth2ClientSecretKeyID
if req.OAuth2ClientSecret != nil {
oauth2ClientSecret = strings.TrimSpace(*req.OAuth2ClientSecret)
// Clear the key ID when the secret is explicitly updated.
oauth2ClientSecretKeyID = sql.NullString{}
}
oauth2AuthURL := existing.OAuth2AuthURL
if req.OAuth2AuthURL != nil {
oauth2AuthURL = strings.TrimSpace(*req.OAuth2AuthURL)
}
oauth2TokenURL := existing.OAuth2TokenURL
if req.OAuth2TokenURL != nil {
oauth2TokenURL = strings.TrimSpace(*req.OAuth2TokenURL)
}
oauth2Scopes := existing.OAuth2Scopes
if req.OAuth2Scopes != nil {
oauth2Scopes = strings.TrimSpace(*req.OAuth2Scopes)
}
apiKeyHeader := existing.APIKeyHeader
if req.APIKeyHeader != nil {
apiKeyHeader = strings.TrimSpace(*req.APIKeyHeader)
}
apiKeyValue := existing.APIKeyValue
apiKeyValueKeyID := existing.APIKeyValueKeyID
if req.APIKeyValue != nil {
apiKeyValue = strings.TrimSpace(*req.APIKeyValue)
// Clear the key ID when the value is explicitly updated.
apiKeyValueKeyID = sql.NullString{}
}
customHeaders := existing.CustomHeaders
customHeadersKeyID := existing.CustomHeadersKeyID
if req.CustomHeaders != nil {
customHeaders = customHeadersJSON
// Clear the key ID when headers are explicitly updated.
customHeadersKeyID = sql.NullString{}
}
toolAllowList := existing.ToolAllowList
if req.ToolAllowList != nil {
toolAllowList = coalesceStringSlice(trimStringSlice(*req.ToolAllowList))
}
toolDenyList := existing.ToolDenyList
if req.ToolDenyList != nil {
toolDenyList = coalesceStringSlice(trimStringSlice(*req.ToolDenyList))
}
availability := existing.Availability
if req.Availability != nil {
availability = strings.TrimSpace(*req.Availability)
}
enabled := existing.Enabled
if req.Enabled != nil {
enabled = *req.Enabled
}
// When auth_type changes, clear fields belonging to the
// previous auth type so stale secrets don't persist.
if authType != existing.AuthType {
switch authType {
case "none":
oauth2ClientID = ""
oauth2ClientSecret = ""
oauth2ClientSecretKeyID = sql.NullString{}
oauth2AuthURL = ""
oauth2TokenURL = ""
oauth2Scopes = ""
apiKeyHeader = ""
apiKeyValue = ""
apiKeyValueKeyID = sql.NullString{}
customHeaders = "{}"
customHeadersKeyID = sql.NullString{}
case "oauth2":
apiKeyHeader = ""
apiKeyValue = ""
apiKeyValueKeyID = sql.NullString{}
customHeaders = "{}"
customHeadersKeyID = sql.NullString{}
case "api_key":
oauth2ClientID = ""
oauth2ClientSecret = ""
oauth2ClientSecretKeyID = sql.NullString{}
oauth2AuthURL = ""
oauth2TokenURL = ""
oauth2Scopes = ""
customHeaders = "{}"
customHeadersKeyID = sql.NullString{}
case "custom_headers":
oauth2ClientID = ""
oauth2ClientSecret = ""
oauth2ClientSecretKeyID = sql.NullString{}
oauth2AuthURL = ""
oauth2TokenURL = ""
oauth2Scopes = ""
apiKeyHeader = ""
apiKeyValue = ""
apiKeyValueKeyID = sql.NullString{}
}
}
updated, err = tx.UpdateMCPServerConfig(ctx, database.UpdateMCPServerConfigParams{
DisplayName: displayName,
Slug: slug,
Description: description,
IconURL: iconURL,
Transport: transport,
Url: serverURL,
AuthType: authType,
OAuth2ClientID: oauth2ClientID,
OAuth2ClientSecret: oauth2ClientSecret,
OAuth2ClientSecretKeyID: oauth2ClientSecretKeyID,
OAuth2AuthURL: oauth2AuthURL,
OAuth2TokenURL: oauth2TokenURL,
OAuth2Scopes: oauth2Scopes,
APIKeyHeader: apiKeyHeader,
APIKeyValue: apiKeyValue,
APIKeyValueKeyID: apiKeyValueKeyID,
CustomHeaders: customHeaders,
CustomHeadersKeyID: customHeadersKeyID,
ToolAllowList: toolAllowList,
ToolDenyList: toolDenyList,
Availability: availability,
Enabled: enabled,
UpdatedBy: apiKey.UserID,
ID: existing.ID,
})
return err
}, nil)
if err != nil {
switch {
case httpapi.Is404Error(err):
httpapi.ResourceNotFound(rw)
return
case database.IsUniqueViolation(err):
httpapi.Write(ctx, rw, http.StatusConflict, codersdk.Response{
Message: "MCP server config slug already exists.",
Detail: err.Error(),
})
return
case database.IsCheckViolation(err):
httpapi.Write(ctx, rw, http.StatusBadRequest, codersdk.Response{
Message: "Invalid MCP server config.",
Detail: err.Error(),
})
return
default:
httpapi.Write(ctx, rw, http.StatusInternalServerError, codersdk.Response{
Message: "Failed to update MCP server config.",
Detail: err.Error(),
})
return
}
}
httpapi.Write(ctx, rw, http.StatusOK, convertMCPServerConfig(updated))
}
// @Summary Delete MCP server config
// @x-apidocgen {"skip": true}
// EXPERIMENTAL: this endpoint is experimental and is subject to change.
func (api *API) deleteMCPServerConfig(rw http.ResponseWriter, r *http.Request) {
ctx := r.Context()
if !api.Authorize(r, policy.ActionUpdate, rbac.ResourceDeploymentConfig) {
httpapi.Forbidden(rw)
return
}
mcpServerID, ok := parseMCPServerConfigID(rw, r)
if !ok {
return
}
if _, err := api.Database.GetMCPServerConfigByID(ctx, mcpServerID); err != nil {
if httpapi.Is404Error(err) {
httpapi.ResourceNotFound(rw)
return
}
httpapi.Write(ctx, rw, http.StatusInternalServerError, codersdk.Response{
Message: "Failed to get MCP server config.",
Detail: err.Error(),
})
return
}
if err := api.Database.DeleteMCPServerConfigByID(ctx, mcpServerID); err != nil {
httpapi.Write(ctx, rw, http.StatusInternalServerError, codersdk.Response{
Message: "Failed to delete MCP server config.",
Detail: err.Error(),
})
return
}
rw.WriteHeader(http.StatusNoContent)
}
// @Summary Initiate MCP server OAuth2 connect
// @x-apidocgen {"skip": true}
// EXPERIMENTAL: this endpoint is experimental and is subject to change.
// Redirects the user to the MCP server's OAuth2 authorization URL.
//
//nolint:revive // HTTP handler writes to ResponseWriter.
func (api *API) mcpServerOAuth2Connect(rw http.ResponseWriter, r *http.Request) {
ctx := r.Context()
mcpServerID, ok := parseMCPServerConfigID(rw, r)
if !ok {
return
}
//nolint:gocritic // Any authenticated user can initiate OAuth2 for an enabled MCP server.
config, err := api.Database.GetMCPServerConfigByID(dbauthz.AsSystemRestricted(ctx), mcpServerID)
if err != nil {
if httpapi.Is404Error(err) {
httpapi.ResourceNotFound(rw)
return
}
httpapi.Write(ctx, rw, http.StatusInternalServerError, codersdk.Response{
Message: "Failed to get MCP server config.",
Detail: err.Error(),
})
return
}
if !config.Enabled {
httpapi.Write(ctx, rw, http.StatusBadRequest, codersdk.Response{
Message: "MCP server is not enabled.",
})
return
}
if config.AuthType != "oauth2" {
httpapi.Write(ctx, rw, http.StatusBadRequest, codersdk.Response{
Message: "MCP server does not use OAuth2 authentication.",
})
return
}
if config.OAuth2AuthURL == "" {
httpapi.Write(ctx, rw, http.StatusBadRequest, codersdk.Response{
Message: "MCP server OAuth2 authorization URL is not configured.",
})
return
}
// Build the authorization URL. The frontend opens this in a popup.
// The callback URL is on our server; after the exchange we store
// the token and close the popup.
state := uuid.New().String()
http.SetCookie(rw, api.DeploymentValues.HTTPCookies.Apply(&http.Cookie{
Name: "mcp_oauth2_state_" + config.ID.String(),
Value: state,
Path: fmt.Sprintf("/api/experimental/mcp/servers/%s/oauth2/callback", config.ID),
MaxAge: 600, // 10 minutes
HttpOnly: true,
SameSite: http.SameSiteLaxMode,
}))
oauth2Config := &oauth2.Config{
ClientID: config.OAuth2ClientID,
ClientSecret: config.OAuth2ClientSecret,
Endpoint: oauth2.Endpoint{
AuthURL: config.OAuth2AuthURL,
TokenURL: config.OAuth2TokenURL,
},
RedirectURL: fmt.Sprintf("%s/api/experimental/mcp/servers/%s/oauth2/callback", api.AccessURL.String(), config.ID),
}
var scopes []string
if config.OAuth2Scopes != "" {
scopes = strings.Split(config.OAuth2Scopes, " ")
}
oauth2Config.Scopes = scopes
authURL := oauth2Config.AuthCodeURL(state)
http.Redirect(rw, r, authURL, http.StatusTemporaryRedirect)
}
// @Summary Handle MCP server OAuth2 callback
// @x-apidocgen {"skip": true}
// EXPERIMENTAL: this endpoint is experimental and is subject to change.
// Exchanges the authorization code for tokens and stores them.
//
//nolint:revive // HTTP handler writes to ResponseWriter.
func (api *API) mcpServerOAuth2Callback(rw http.ResponseWriter, r *http.Request) {
ctx := r.Context()
apiKey := httpmw.APIKey(r)
mcpServerID, ok := parseMCPServerConfigID(rw, r)
if !ok {
return
}
//nolint:gocritic // Any authenticated user can complete OAuth2 for an enabled MCP server.
config, err := api.Database.GetMCPServerConfigByID(dbauthz.AsSystemRestricted(ctx), mcpServerID)
if err != nil {
if httpapi.Is404Error(err) {
httpapi.ResourceNotFound(rw)
return
}
httpapi.Write(ctx, rw, http.StatusInternalServerError, codersdk.Response{
Message: "Failed to get MCP server config.",
Detail: err.Error(),
})
return
}
if !config.Enabled {
httpapi.Write(ctx, rw, http.StatusBadRequest, codersdk.Response{
Message: "MCP server is not enabled.",
})
return
}
if config.AuthType != "oauth2" {
httpapi.Write(ctx, rw, http.StatusBadRequest, codersdk.Response{
Message: "MCP server does not use OAuth2 authentication.",
})
return
}
// Check if the OAuth2 provider returned an error (e.g., user
// denied consent).
if oauthError := r.URL.Query().Get("error"); oauthError != "" {
desc := r.URL.Query().Get("error_description")
if desc == "" {
desc = oauthError
}
httpapi.Write(ctx, rw, http.StatusBadRequest, codersdk.Response{
Message: "OAuth2 provider returned an error.",
Detail: desc,
})
return
}
code := r.URL.Query().Get("code")
if code == "" {
httpapi.Write(ctx, rw, http.StatusBadRequest, codersdk.Response{
Message: "Missing authorization code.",
})
return
}
// Validate the state parameter for CSRF protection.
expectedState := ""
if cookie, err := r.Cookie("mcp_oauth2_state_" + config.ID.String()); err == nil {
expectedState = cookie.Value
}
actualState := r.URL.Query().Get("state")
if expectedState == "" || actualState != expectedState {
httpapi.Write(ctx, rw, http.StatusBadRequest, codersdk.Response{
Message: "Invalid or missing OAuth2 state parameter.",
})
return
}
// Clear the state cookie.
http.SetCookie(rw, api.DeploymentValues.HTTPCookies.Apply(&http.Cookie{
Name: "mcp_oauth2_state_" + config.ID.String(),
Value: "",
Path: fmt.Sprintf("/api/experimental/mcp/servers/%s/oauth2/callback", config.ID),
MaxAge: -1,
HttpOnly: true,
SameSite: http.SameSiteLaxMode,
}))
// Exchange the authorization code for tokens.
oauth2Config := &oauth2.Config{
ClientID: config.OAuth2ClientID,
ClientSecret: config.OAuth2ClientSecret,
Endpoint: oauth2.Endpoint{
AuthURL: config.OAuth2AuthURL,
TokenURL: config.OAuth2TokenURL,
},
RedirectURL: fmt.Sprintf("%s/api/experimental/mcp/servers/%s/oauth2/callback", api.AccessURL.String(), config.ID),
}
var scopes []string
if config.OAuth2Scopes != "" {
scopes = strings.Split(config.OAuth2Scopes, " ")
}
oauth2Config.Scopes = scopes
// Use the deployment's HTTP client for the token exchange to
// respect proxy settings and avoid using http.DefaultClient.
exchangeCtx := context.WithValue(ctx, oauth2.HTTPClient, api.HTTPClient)
token, err := oauth2Config.Exchange(exchangeCtx, code)
if err != nil {
httpapi.Write(ctx, rw, http.StatusBadGateway, codersdk.Response{
Message: "Failed to exchange authorization code for token.",
Detail: "The OAuth2 token exchange with the upstream provider failed.",
})
return
}
// Store the token for the user.
refreshToken := ""
if token.RefreshToken != "" {
refreshToken = token.RefreshToken
}
var expiry sql.NullTime
if !token.Expiry.IsZero() {
expiry = sql.NullTime{Time: token.Expiry, Valid: true}
}
//nolint:gocritic // Users store their own tokens.
_, err = api.Database.UpsertMCPServerUserToken(dbauthz.AsSystemRestricted(ctx), database.UpsertMCPServerUserTokenParams{
MCPServerConfigID: mcpServerID,
UserID: apiKey.UserID,
AccessToken: token.AccessToken,
AccessTokenKeyID: sql.NullString{},
RefreshToken: refreshToken,
RefreshTokenKeyID: sql.NullString{},
TokenType: token.TokenType,
Expiry: expiry,
})
if err != nil {
httpapi.Write(ctx, rw, http.StatusInternalServerError, codersdk.Response{
Message: "Failed to store OAuth2 token.",
Detail: err.Error(),
})
return
}
// Respond with a simple HTML page that closes the popup window.
rw.Header().Set("Content-Security-Policy", "default-src 'none'; script-src 'unsafe-inline'")
rw.Header().Set("Content-Type", "text/html; charset=utf-8")
rw.WriteHeader(http.StatusOK)
_, _ = rw.Write([]byte(`<!DOCTYPE html><html><body><script>
if (window.opener) {
window.opener.postMessage({type: "mcp-oauth2-complete", serverID: "` + config.ID.String() + `"}, "` + api.AccessURL.String() + `");
window.close();
} else {
document.body.innerText = "Authentication successful. You may close this window.";
}
</script></body></html>`))
}
// @Summary Disconnect MCP server OAuth2 token
// @x-apidocgen {"skip": true}
// EXPERIMENTAL: this endpoint is experimental and is subject to change.
// Removes the user's stored OAuth2 token for an MCP server.
func (api *API) mcpServerOAuth2Disconnect(rw http.ResponseWriter, r *http.Request) {
ctx := r.Context()
apiKey := httpmw.APIKey(r)
mcpServerID, ok := parseMCPServerConfigID(rw, r)
if !ok {
return
}
//nolint:gocritic // Users manage their own tokens.
err := api.Database.DeleteMCPServerUserToken(dbauthz.AsSystemRestricted(ctx), database.DeleteMCPServerUserTokenParams{
MCPServerConfigID: mcpServerID,
UserID: apiKey.UserID,
})
if err != nil {
httpapi.Write(ctx, rw, http.StatusInternalServerError, codersdk.Response{
Message: "Failed to disconnect OAuth2 token.",
Detail: err.Error(),
})
return
}
rw.WriteHeader(http.StatusNoContent)
}
// parseMCPServerConfigID extracts the MCP server config UUID from the
// "mcpServer" path parameter.
func parseMCPServerConfigID(rw http.ResponseWriter, r *http.Request) (uuid.UUID, bool) {
mcpServerID, err := uuid.Parse(chi.URLParam(r, "mcpServer"))
if err != nil {
httpapi.Write(r.Context(), rw, http.StatusBadRequest, codersdk.Response{
Message: "Invalid MCP server config ID.",
Detail: err.Error(),
})
return uuid.Nil, false
}
return mcpServerID, true
}
// convertMCPServerConfig converts a database MCP server config to the
// SDK type. Secrets are never returned; only has_* booleans are set.
// Admin-only fields (OAuth2 client ID, auth URLs, etc.) are included.
func convertMCPServerConfig(config database.MCPServerConfig) codersdk.MCPServerConfig {
return codersdk.MCPServerConfig{
ID: config.ID,
DisplayName: config.DisplayName,
Slug: config.Slug,
Description: config.Description,
IconURL: config.IconURL,
Transport: config.Transport,
URL: config.Url,
AuthType: config.AuthType,
OAuth2ClientID: config.OAuth2ClientID,
HasOAuth2Secret: config.OAuth2ClientSecret != "",
OAuth2AuthURL: config.OAuth2AuthURL,
OAuth2TokenURL: config.OAuth2TokenURL,
OAuth2Scopes: config.OAuth2Scopes,
APIKeyHeader: config.APIKeyHeader,
HasAPIKey: config.APIKeyValue != "",
HasCustomHeaders: len(config.CustomHeaders) > 0 && config.CustomHeaders != "{}",
ToolAllowList: coalesceStringSlice(config.ToolAllowList),
ToolDenyList: coalesceStringSlice(config.ToolDenyList),
Availability: config.Availability,
Enabled: config.Enabled,
CreatedAt: config.CreatedAt,
UpdatedAt: config.UpdatedAt,
}
}
// convertMCPServerConfigRedacted is the same as convertMCPServerConfig
// but strips admin-only fields (OAuth2 details, API key header) for
// non-admin callers.
func convertMCPServerConfigRedacted(config database.MCPServerConfig) codersdk.MCPServerConfig {
c := convertMCPServerConfig(config)
c.URL = ""
c.Transport = ""
c.OAuth2ClientID = ""
c.OAuth2AuthURL = ""
c.OAuth2TokenURL = ""
c.OAuth2Scopes = ""
c.APIKeyHeader = ""
return c
}
// marshalCustomHeaders encodes a map of custom headers to JSON for
// database storage. A nil map produces an empty JSON object.
func marshalCustomHeaders(headers map[string]string) (string, error) {
if headers == nil {
return "{}", nil
}
encoded, err := json.Marshal(headers)
if err != nil {
return "", err
}
return string(encoded), nil
}
// trimStringSlice trims whitespace from each element and drops empty
// strings.
func trimStringSlice(ss []string) []string {
if ss == nil {
return nil
}
out := make([]string, 0, len(ss))
for _, s := range ss {
if trimmed := strings.TrimSpace(s); trimmed != "" {
out = append(out, trimmed)
}
}
return out
}
// coalesceStringSlice returns ss if non-nil, otherwise an empty
// non-nil slice. This prevents pq.Array from sending NULL for
// NOT NULL text[] columns.
func coalesceStringSlice(ss []string) []string {
if ss == nil {
return []string{}
}
return ss
}
-489
View File
@@ -1,489 +0,0 @@
package coderd_test
import (
"encoding/json"
"net/http"
"strings"
"testing"
"github.com/google/uuid"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/coder/coder/v2/coderd/coderdtest"
"github.com/coder/coder/v2/codersdk"
"github.com/coder/coder/v2/testutil"
)
// mcpDeploymentValues returns deployment values with the agents
// experiment enabled, which is required by the MCP server config
// endpoints.
func mcpDeploymentValues(t testing.TB) *codersdk.DeploymentValues {
t.Helper()
values := coderdtest.DeploymentValues(t)
values.Experiments = []string{string(codersdk.ExperimentAgents)}
return values
}
// newMCPClient creates a test server with the agents experiment
// enabled and returns the admin client.
func newMCPClient(t testing.TB) *codersdk.Client {
t.Helper()
return coderdtest.New(t, &coderdtest.Options{
DeploymentValues: mcpDeploymentValues(t),
})
}
// createMCPServerConfig is a helper that creates a minimal enabled
// MCP server config with auth_type=none.
func createMCPServerConfig(t testing.TB, client *codersdk.Client, slug string, enabled bool) codersdk.MCPServerConfig {
t.Helper()
ctx := testutil.Context(t, testutil.WaitLong)
config, err := client.CreateMCPServerConfig(ctx, codersdk.CreateMCPServerConfigRequest{
DisplayName: "Test Server " + slug,
Slug: slug,
Description: "A test MCP server.",
IconURL: "https://example.com/icon.png",
Transport: "streamable_http",
URL: "https://mcp.example.com/" + slug,
AuthType: "none",
Availability: "default_on",
Enabled: enabled,
ToolAllowList: []string{},
ToolDenyList: []string{},
})
require.NoError(t, err)
return config
}
func TestMCPServerConfigsCRUD(t *testing.T) {
t.Parallel()
ctx := testutil.Context(t, testutil.WaitLong)
client := newMCPClient(t)
_ = coderdtest.CreateFirstUser(t, client)
// Create a config with all fields populated including OAuth2
// secrets so we can verify they are not leaked.
created, err := client.CreateMCPServerConfig(ctx, codersdk.CreateMCPServerConfigRequest{
DisplayName: "My MCP Server",
Slug: "my-mcp-server",
Description: "Integration test server.",
IconURL: "https://example.com/icon.png",
Transport: "streamable_http",
URL: "https://mcp.example.com/v1",
AuthType: "oauth2",
OAuth2ClientID: "client-id-123",
OAuth2ClientSecret: "super-secret-value",
OAuth2AuthURL: "https://auth.example.com/authorize",
OAuth2TokenURL: "https://auth.example.com/token",
OAuth2Scopes: "read write",
Availability: "default_on",
Enabled: true,
ToolAllowList: []string{},
ToolDenyList: []string{},
})
require.NoError(t, err)
require.NotEqual(t, uuid.Nil, created.ID)
require.Equal(t, "My MCP Server", created.DisplayName)
require.Equal(t, "my-mcp-server", created.Slug)
require.Equal(t, "Integration test server.", created.Description)
require.Equal(t, "streamable_http", created.Transport)
require.Equal(t, "https://mcp.example.com/v1", created.URL)
require.Equal(t, "oauth2", created.AuthType)
require.Equal(t, "client-id-123", created.OAuth2ClientID)
require.Equal(t, "default_on", created.Availability)
require.True(t, created.Enabled)
// Verify the secret is indicated but never returned.
require.True(t, created.HasOAuth2Secret)
// Verify the config appears in the list.
configs, err := client.MCPServerConfigs(ctx)
require.NoError(t, err)
require.Len(t, configs, 1)
require.Equal(t, created.ID, configs[0].ID)
require.True(t, configs[0].HasOAuth2Secret)
// Update display name and availability.
newName := "Renamed Server"
newAvail := "force_on"
updated, err := client.UpdateMCPServerConfig(ctx, created.ID, codersdk.UpdateMCPServerConfigRequest{
DisplayName: &newName,
Availability: &newAvail,
})
require.NoError(t, err)
require.Equal(t, "Renamed Server", updated.DisplayName)
require.Equal(t, "force_on", updated.Availability)
// Unchanged fields should remain the same.
require.Equal(t, "my-mcp-server", updated.Slug)
require.Equal(t, "oauth2", updated.AuthType)
// Verify the update took effect through the list.
configs, err = client.MCPServerConfigs(ctx)
require.NoError(t, err)
require.Len(t, configs, 1)
require.Equal(t, "Renamed Server", configs[0].DisplayName)
require.Equal(t, "force_on", configs[0].Availability)
// Delete it.
err = client.DeleteMCPServerConfig(ctx, created.ID)
require.NoError(t, err)
// Verify it's gone.
configs, err = client.MCPServerConfigs(ctx)
require.NoError(t, err)
require.Empty(t, configs)
}
func TestMCPServerConfigsNonAdmin(t *testing.T) {
t.Parallel()
ctx := testutil.Context(t, testutil.WaitLong)
adminClient := newMCPClient(t)
firstUser := coderdtest.CreateFirstUser(t, adminClient)
memberClient, _ := coderdtest.CreateAnotherUser(t, adminClient, firstUser.OrganizationID)
// Admin creates two configs: one enabled, one disabled.
_ = createMCPServerConfig(t, adminClient, "enabled-server", true)
_ = createMCPServerConfig(t, adminClient, "disabled-server", false)
// Admin sees both.
adminConfigs, err := adminClient.MCPServerConfigs(ctx)
require.NoError(t, err)
require.Len(t, adminConfigs, 2)
// Regular user sees only the enabled one.
memberConfigs, err := memberClient.MCPServerConfigs(ctx)
require.NoError(t, err)
require.Len(t, memberConfigs, 1)
require.Equal(t, "enabled-server", memberConfigs[0].Slug)
}
// TestMCPServerConfigsSecretsNeverLeaked is a load-bearing test that
// ensures secret fields (OAuth2 client secret, API key value, custom
// headers) are never present in API responses for any caller. If this
// test fails, it means a code change accidentally started exposing
// secrets. See: https://github.com/coder/coder/pull/23227#discussion_r2959461109
func TestMCPServerConfigsSecretsNeverLeaked(t *testing.T) {
t.Parallel()
ctx := testutil.Context(t, testutil.WaitLong)
adminClient := newMCPClient(t)
firstUser := coderdtest.CreateFirstUser(t, adminClient)
memberClient, _ := coderdtest.CreateAnotherUser(t, adminClient, firstUser.OrganizationID)
// Create a config with ALL secret fields populated.
created, err := adminClient.CreateMCPServerConfig(ctx, codersdk.CreateMCPServerConfigRequest{
DisplayName: "Secrets Test",
Slug: "secrets-test",
Transport: "streamable_http",
URL: "https://mcp.example.com/secrets",
AuthType: "oauth2",
OAuth2ClientID: "client-id-secret-test",
OAuth2ClientSecret: "THIS-IS-A-SECRET-VALUE",
OAuth2AuthURL: "https://auth.example.com/authorize",
OAuth2TokenURL: "https://auth.example.com/token",
OAuth2Scopes: "read write",
APIKeyHeader: "X-Api-Key",
APIKeyValue: "THIS-IS-A-SECRET-API-KEY",
CustomHeaders: map[string]string{"X-Custom": "THIS-IS-A-SECRET-HEADER"},
Availability: "default_on",
Enabled: true,
ToolAllowList: []string{},
ToolDenyList: []string{},
})
require.NoError(t, err)
// The sentinel values we must never see in any JSON response.
secrets := []string{
"THIS-IS-A-SECRET-VALUE",
"THIS-IS-A-SECRET-API-KEY",
"THIS-IS-A-SECRET-HEADER",
}
assertNoSecrets := func(t *testing.T, label string, v interface{}) {
t.Helper()
data, err := json.Marshal(v)
require.NoError(t, err)
jsonStr := string(data)
for _, secret := range secrets {
assert.False(t, strings.Contains(jsonStr, secret),
"%s: JSON response contains secret %q", label, secret)
}
}
// Verify the create response doesn't leak secrets.
assertNoSecrets(t, "admin create response", created)
// Verify boolean indicators are set correctly.
require.True(t, created.HasOAuth2Secret, "HasOAuth2Secret should be true")
require.True(t, created.HasAPIKey, "HasAPIKey should be true")
require.True(t, created.HasCustomHeaders, "HasCustomHeaders should be true")
// Admin list endpoint.
adminConfigs, err := adminClient.MCPServerConfigs(ctx)
require.NoError(t, err)
require.NotEmpty(t, adminConfigs)
for _, cfg := range adminConfigs {
assertNoSecrets(t, "admin list", cfg)
}
// Admin get-by-ID endpoint.
adminSingle, err := adminClient.MCPServerConfigByID(ctx, created.ID)
require.NoError(t, err)
assertNoSecrets(t, "admin get-by-id", adminSingle)
// Non-admin list endpoint.
memberConfigs, err := memberClient.MCPServerConfigs(ctx)
require.NoError(t, err)
require.NotEmpty(t, memberConfigs)
for _, cfg := range memberConfigs {
assertNoSecrets(t, "member list", cfg)
// Non-admin should also not see admin-only fields.
assert.Empty(t, cfg.OAuth2ClientID, "member should not see OAuth2ClientID")
assert.Empty(t, cfg.OAuth2AuthURL, "member should not see OAuth2AuthURL")
assert.Empty(t, cfg.OAuth2TokenURL, "member should not see OAuth2TokenURL")
assert.Empty(t, cfg.APIKeyHeader, "member should not see APIKeyHeader")
assert.Empty(t, cfg.OAuth2Scopes, "member should not see OAuth2Scopes")
assert.Empty(t, cfg.URL, "member should not see URL")
assert.Empty(t, cfg.Transport, "member should not see Transport")
}
// Non-admin get-by-ID endpoint.
memberSingle, err := memberClient.MCPServerConfigByID(ctx, created.ID)
require.NoError(t, err)
assertNoSecrets(t, "member get-by-id", memberSingle)
assert.Empty(t, memberSingle.OAuth2ClientID, "member should not see OAuth2ClientID")
assert.Empty(t, memberSingle.OAuth2AuthURL, "member should not see OAuth2AuthURL")
assert.Empty(t, memberSingle.OAuth2TokenURL, "member should not see OAuth2TokenURL")
assert.Empty(t, memberSingle.OAuth2Scopes, "member should not see OAuth2Scopes")
assert.Empty(t, memberSingle.APIKeyHeader, "member should not see APIKeyHeader")
assert.Empty(t, memberSingle.URL, "member should not see URL")
assert.Empty(t, memberSingle.Transport, "member should not see Transport")
}
func TestMCPServerConfigsAuthConnected(t *testing.T) {
t.Parallel()
ctx := testutil.Context(t, testutil.WaitLong)
adminClient := newMCPClient(t)
firstUser := coderdtest.CreateFirstUser(t, adminClient)
memberClient, _ := coderdtest.CreateAnotherUser(t, adminClient, firstUser.OrganizationID)
// Create an oauth2 server config (enabled).
created, err := adminClient.CreateMCPServerConfig(ctx, codersdk.CreateMCPServerConfigRequest{
DisplayName: "OAuth Server",
Slug: "oauth-server",
Transport: "streamable_http",
URL: "https://mcp.example.com/oauth",
AuthType: "oauth2",
OAuth2ClientID: "cid",
OAuth2AuthURL: "https://auth.example.com/authorize",
OAuth2TokenURL: "https://auth.example.com/token",
Availability: "default_on",
Enabled: true,
ToolAllowList: []string{},
ToolDenyList: []string{},
})
require.NoError(t, err)
// Regular user lists configs — auth_connected should be false
// because no token has been stored.
memberConfigs, err := memberClient.MCPServerConfigs(ctx)
require.NoError(t, err)
require.Len(t, memberConfigs, 1)
require.Equal(t, created.ID, memberConfigs[0].ID)
require.False(t, memberConfigs[0].AuthConnected)
// Also create a non-oauth server. It should report
// auth_connected=true because no auth is needed.
_ = createMCPServerConfig(t, adminClient, "no-auth-server", true)
memberConfigs, err = memberClient.MCPServerConfigs(ctx)
require.NoError(t, err)
require.Len(t, memberConfigs, 2)
for _, cfg := range memberConfigs {
if cfg.AuthType == "none" {
require.True(t, cfg.AuthConnected)
} else {
require.False(t, cfg.AuthConnected)
}
}
}
func TestMCPServerConfigsAvailability(t *testing.T) {
t.Parallel()
client := newMCPClient(t)
_ = coderdtest.CreateFirstUser(t, client)
validValues := []string{"force_on", "default_on", "default_off"}
for _, av := range validValues {
av := av
t.Run(av, func(t *testing.T) {
t.Parallel()
ctx := testutil.Context(t, testutil.WaitLong)
created, err := client.CreateMCPServerConfig(ctx, codersdk.CreateMCPServerConfigRequest{
DisplayName: "Server " + av,
Slug: "server-" + av,
Transport: "streamable_http",
URL: "https://mcp.example.com/" + av,
AuthType: "none",
Availability: av,
Enabled: true,
ToolAllowList: []string{},
ToolDenyList: []string{},
})
require.NoError(t, err)
require.Equal(t, av, created.Availability)
})
}
t.Run("InvalidAvailability", func(t *testing.T) {
t.Parallel()
ctx := testutil.Context(t, testutil.WaitLong)
_, err := client.CreateMCPServerConfig(ctx, codersdk.CreateMCPServerConfigRequest{
DisplayName: "Bad Availability",
Slug: "bad-avail",
Transport: "streamable_http",
URL: "https://mcp.example.com/bad",
AuthType: "none",
Availability: "always_on",
Enabled: true,
ToolAllowList: []string{},
ToolDenyList: []string{},
})
require.Error(t, err)
var sdkErr *codersdk.Error
require.ErrorAs(t, err, &sdkErr)
require.Equal(t, http.StatusBadRequest, sdkErr.StatusCode())
})
}
func TestMCPServerConfigsUniqueSlug(t *testing.T) {
t.Parallel()
ctx := testutil.Context(t, testutil.WaitLong)
client := newMCPClient(t)
_ = coderdtest.CreateFirstUser(t, client)
_, err := client.CreateMCPServerConfig(ctx, codersdk.CreateMCPServerConfigRequest{
DisplayName: "First",
Slug: "test-server",
Transport: "streamable_http",
URL: "https://mcp.example.com/first",
AuthType: "none",
Availability: "default_off",
Enabled: true,
ToolAllowList: []string{},
ToolDenyList: []string{},
})
require.NoError(t, err)
// Attempt to create another config with the same slug.
_, err = client.CreateMCPServerConfig(ctx, codersdk.CreateMCPServerConfigRequest{
DisplayName: "Second",
Slug: "test-server",
Transport: "streamable_http",
URL: "https://mcp.example.com/second",
AuthType: "none",
Availability: "default_off",
Enabled: true,
ToolAllowList: []string{},
ToolDenyList: []string{},
})
require.Error(t, err)
var sdkErr *codersdk.Error
require.ErrorAs(t, err, &sdkErr)
require.Equal(t, http.StatusConflict, sdkErr.StatusCode())
}
func TestMCPServerConfigsOAuth2Disconnect(t *testing.T) {
t.Parallel()
ctx := testutil.Context(t, testutil.WaitLong)
adminClient := newMCPClient(t)
firstUser := coderdtest.CreateFirstUser(t, adminClient)
memberClient, _ := coderdtest.CreateAnotherUser(t, adminClient, firstUser.OrganizationID)
created, err := adminClient.CreateMCPServerConfig(ctx, codersdk.CreateMCPServerConfigRequest{
DisplayName: "OAuth Disconnect Test",
Slug: "oauth-disconnect",
Transport: "streamable_http",
URL: "https://mcp.example.com/oauth-disc",
AuthType: "oauth2",
OAuth2ClientID: "cid",
OAuth2AuthURL: "https://auth.example.com/authorize",
OAuth2TokenURL: "https://auth.example.com/token",
Availability: "default_on",
Enabled: true,
ToolAllowList: []string{},
ToolDenyList: []string{},
})
require.NoError(t, err)
// Disconnect should succeed even when no token exists (idempotent).
err = memberClient.MCPServerOAuth2Disconnect(ctx, created.ID)
require.NoError(t, err)
}
func TestChatWithMCPServerIDs(t *testing.T) {
t.Parallel()
ctx := testutil.Context(t, testutil.WaitLong)
client := newMCPClient(t)
_ = coderdtest.CreateFirstUser(t, client)
// Create the chat model config required for creating a chat.
_ = createChatModelConfigForMCP(t, client)
// Create an enabled MCP server config.
mcpConfig := createMCPServerConfig(t, client, "chat-mcp-server", true)
// Create a chat referencing the MCP server.
chat, err := client.CreateChat(ctx, codersdk.CreateChatRequest{
Content: []codersdk.ChatInputPart{
{
Type: codersdk.ChatInputPartTypeText,
Text: "hello with mcp server",
},
},
MCPServerIDs: []uuid.UUID{mcpConfig.ID},
})
require.NoError(t, err)
require.NotEqual(t, uuid.Nil, chat.ID)
require.Contains(t, chat.MCPServerIDs, mcpConfig.ID)
// Fetch the chat and verify the MCP server IDs persist.
fetched, err := client.GetChat(ctx, chat.ID)
require.NoError(t, err)
require.Contains(t, fetched.MCPServerIDs, mcpConfig.ID)
}
// createChatModelConfigForMCP sets up a chat provider and model
// config so that CreateChat succeeds. This mirrors the helper in
// chats_test.go but is defined here to avoid coupling.
func createChatModelConfigForMCP(t testing.TB, client *codersdk.Client) codersdk.ChatModelConfig {
t.Helper()
ctx := testutil.Context(t, testutil.WaitLong)
_, err := client.CreateChatProvider(ctx, codersdk.CreateChatProviderConfigRequest{
Provider: "openai",
APIKey: "test-api-key",
})
require.NoError(t, err)
contextLimit := int64(4096)
isDefault := true
modelConfig, err := client.CreateChatModelConfig(ctx, codersdk.CreateChatModelConfigRequest{
Provider: "openai",
Model: "gpt-4o-mini",
ContextLimit: &contextLimit,
IsDefault: &isDefault,
})
require.NoError(t, err)
return modelConfig
}
-37
View File
@@ -744,43 +744,6 @@ func (api *API) postLogout(rw http.ResponseWriter, r *http.Request) {
})
}
// @Summary Set session token cookie
// @Description Converts the current session token into a Set-Cookie response.
// @Description This is used by embedded iframes (e.g. VS Code chat) that
// @Description receive a session token out-of-band via postMessage but need
// @Description cookie-based auth for WebSocket connections.
// @ID set-session-token-cookie
// @Security CoderSessionToken
// @Tags Authorization
// @Success 204
// @Router /users/me/session/token-to-cookie [post]
// @x-apidocgen {"skip": true}
func (api *API) postSessionTokenCookie(rw http.ResponseWriter, r *http.Request) {
// Only accept the token from the Coder-Session-Token header.
// Other sources (query params, cookies) should not be allowed
// to bootstrap a new cookie.
token := r.Header.Get(codersdk.SessionTokenHeader)
if token == "" {
httpapi.Write(r.Context(), rw, http.StatusBadRequest, codersdk.Response{
Message: "Session token must be provided via the Coder-Session-Token header.",
})
return
}
apiKey := httpmw.APIKey(r)
cookie := api.DeploymentValues.HTTPCookies.Apply(&http.Cookie{
Name: codersdk.SessionTokenCookie,
Value: token,
Path: "/",
HttpOnly: true,
// Expire the cookie when the underlying API key expires.
Expires: apiKey.ExpiresAt,
})
http.SetCookie(rw, cookie)
rw.WriteHeader(http.StatusNoContent)
}
// GithubOAuth2Team represents a team scoped to an organization.
type GithubOAuth2Team struct {
Organization string
-58
View File
@@ -72,64 +72,6 @@ func (api *API) userDebugOIDC(rw http.ResponseWriter, r *http.Request) {
httpapi.Write(ctx, rw, http.StatusOK, link.Claims)
}
// Returns the merged OIDC claims for the authenticated user.
//
// @Summary Get OIDC claims for the authenticated user
// @ID get-oidc-claims-for-the-authenticated-user
// @Security CoderSessionToken
// @Produce json
// @Tags Users
// @Success 200 {object} codersdk.OIDCClaimsResponse
// @Router /users/oidc-claims [get]
func (api *API) userOIDCClaims(rw http.ResponseWriter, r *http.Request) {
var (
ctx = r.Context()
apiKey = httpmw.APIKey(r)
)
user, err := api.Database.GetUserByID(ctx, apiKey.UserID)
if err != nil {
httpapi.Write(ctx, rw, http.StatusInternalServerError, codersdk.Response{
Message: "Failed to get user.",
Detail: err.Error(),
})
return
}
if user.LoginType != database.LoginTypeOIDC {
httpapi.Write(ctx, rw, http.StatusBadRequest, codersdk.Response{
Message: "User is not an OIDC user.",
})
return
}
//nolint:gocritic // GetUserLinkByUserIDLoginType requires reading
// rbac.ResourceSystem. The endpoint is scoped to the authenticated
// user's own identity via apiKey, so this is safe.
link, err := api.Database.GetUserLinkByUserIDLoginType(
dbauthz.AsSystemRestricted(ctx),
database.GetUserLinkByUserIDLoginTypeParams{
UserID: user.ID,
LoginType: database.LoginTypeOIDC,
},
)
if err != nil {
httpapi.Write(ctx, rw, http.StatusInternalServerError, codersdk.Response{
Message: "Failed to get user link.",
Detail: err.Error(),
})
return
}
claims := link.Claims.MergedClaims
if claims == nil {
claims = map[string]interface{}{}
}
httpapi.Write(ctx, rw, http.StatusOK, codersdk.OIDCClaimsResponse{
Claims: claims,
})
}
// Returns whether the initial user has been created or not.
//
// @Summary Check initial user created
+1 -1
View File
@@ -41,7 +41,7 @@ func (api *API) PrimaryRegion(ctx context.Context) (codersdk.Region, error) {
ID: deploymentID,
Name: "primary",
DisplayName: proxy.DisplayName,
IconURL: proxy.IconURL,
IconURL: proxy.IconUrl,
Healthy: true,
PathAppURL: api.AccessURL.String(),
WildcardHostname: appurl.SubdomainAppHost(api.AppHostname, api.AccessURL),
+3 -1
View File
@@ -4372,7 +4372,9 @@ func TestWorkspaceWithEphemeralRichParameters(t *testing.T) {
}},
})
coderdtest.AwaitTemplateVersionJobCompleted(t, client, version.ID)
template := coderdtest.CreateTemplate(t, client, user.OrganizationID, version.ID)
template := coderdtest.CreateTemplate(t, client, user.OrganizationID, version.ID, func(request *codersdk.CreateTemplateRequest) {
request.UseClassicParameterFlow = ptr.Ref(true) // TODO: Remove this when dynamic parameters handles this case
})
// Create workspace with default values
workspace := coderdtest.CreateWorkspace(t, client, template.ID)
+1 -4
View File
@@ -49,7 +49,6 @@ type Chat struct {
CreatedAt time.Time `json:"created_at" format:"date-time"`
UpdatedAt time.Time `json:"updated_at" format:"date-time"`
Archived bool `json:"archived"`
MCPServerIDs []uuid.UUID `json:"mcp_server_ids" format:"uuid"`
}
// ChatMessage represents a single message in a chat.
@@ -268,7 +267,6 @@ type CreateChatRequest struct {
Content []ChatInputPart `json:"content"`
WorkspaceID *uuid.UUID `json:"workspace_id,omitempty" format:"uuid"`
ModelConfigID *uuid.UUID `json:"model_config_id,omitempty" format:"uuid"`
MCPServerIDs []uuid.UUID `json:"mcp_server_ids,omitempty" format:"uuid"`
}
// UpdateChatRequest is the request to update a chat.
@@ -281,7 +279,6 @@ type UpdateChatRequest struct {
type CreateChatMessageRequest struct {
Content []ChatInputPart `json:"content"`
ModelConfigID *uuid.UUID `json:"model_config_id,omitempty" format:"uuid"`
MCPServerIDs *[]uuid.UUID `json:"mcp_server_ids,omitempty" format:"uuid"`
}
// EditChatMessageRequest is the request to edit a user message in a chat.
@@ -441,7 +438,7 @@ type ChatModelOpenAIProviderOptions struct {
MaxCompletionTokens *int64 `json:"max_completion_tokens,omitempty" description:"Upper bound on tokens the model may generate"`
TextVerbosity *string `json:"text_verbosity,omitempty" description:"Controls the verbosity of the text response" enum:"low,medium,high"`
Prediction map[string]any `json:"prediction,omitempty" description:"Predicted output content to speed up responses" hidden:"true"`
Store *bool `json:"store,omitempty" description:"Whether to store the response on OpenAI for later retrieval via the API and dashboard logs"`
Store *bool `json:"store,omitempty" description:"Whether to store the output for model distillation or evals" hidden:"true"`
Metadata map[string]any `json:"metadata,omitempty" description:"Arbitrary metadata to attach to the request" hidden:"true"`
PromptCacheKey *string `json:"prompt_cache_key,omitempty" description:"Key for enabling cross-request prompt caching"`
SafetyIdentifier *string `json:"safety_identifier,omitempty" description:"Developer-specific safety identifier for the request" hidden:"true"`
-191
View File
@@ -1,191 +0,0 @@
package codersdk
import (
"context"
"encoding/json"
"fmt"
"net/http"
"time"
"github.com/google/uuid"
)
// MCPServerOAuth2ConnectURL returns the URL the user should visit to
// start the OAuth2 flow for an MCP server. The frontend opens this
// in a new window/popup.
func (c *Client) MCPServerOAuth2ConnectURL(id uuid.UUID) string {
return fmt.Sprintf("%s/api/experimental/mcp/servers/%s/oauth2/connect", c.URL.String(), id)
}
// MCPServerOAuth2Disconnect removes the user's OAuth2 token for an
// MCP server.
func (c *Client) MCPServerOAuth2Disconnect(ctx context.Context, id uuid.UUID) error {
res, err := c.Request(ctx, http.MethodDelete, fmt.Sprintf("/api/experimental/mcp/servers/%s/oauth2/disconnect", id), nil)
if err != nil {
return err
}
defer res.Body.Close()
if res.StatusCode != http.StatusNoContent {
return ReadBodyAsError(res)
}
return nil
}
// MCPServerConfig represents an admin-configured MCP server.
type MCPServerConfig struct {
ID uuid.UUID `json:"id" format:"uuid"`
DisplayName string `json:"display_name"`
Slug string `json:"slug"`
Description string `json:"description"`
IconURL string `json:"icon_url"`
Transport string `json:"transport"` // "streamable_http" or "sse"
URL string `json:"url"`
AuthType string `json:"auth_type"` // "none", "oauth2", "api_key", "custom_headers"
// OAuth2 fields (only populated for admins).
OAuth2ClientID string `json:"oauth2_client_id,omitempty"`
HasOAuth2Secret bool `json:"has_oauth2_secret"`
OAuth2AuthURL string `json:"oauth2_auth_url,omitempty"`
OAuth2TokenURL string `json:"oauth2_token_url,omitempty"`
OAuth2Scopes string `json:"oauth2_scopes,omitempty"`
// API key fields (only populated for admins).
APIKeyHeader string `json:"api_key_header,omitempty"`
HasAPIKey bool `json:"has_api_key"`
HasCustomHeaders bool `json:"has_custom_headers"`
// Tool governance.
ToolAllowList []string `json:"tool_allow_list"`
ToolDenyList []string `json:"tool_deny_list"`
// Availability policy set by admin.
Availability string `json:"availability"` // "force_on", "default_on", "default_off"
Enabled bool `json:"enabled"`
CreatedAt time.Time `json:"created_at" format:"date-time"`
UpdatedAt time.Time `json:"updated_at" format:"date-time"`
// Per-user state (populated for non-admin requests).
AuthConnected bool `json:"auth_connected"`
}
// CreateMCPServerConfigRequest is the request to create a new MCP server config.
type CreateMCPServerConfigRequest struct {
DisplayName string `json:"display_name" validate:"required"`
Slug string `json:"slug" validate:"required"`
Description string `json:"description"`
IconURL string `json:"icon_url"`
Transport string `json:"transport" validate:"required,oneof=streamable_http sse"`
URL string `json:"url" validate:"required,url"`
AuthType string `json:"auth_type" validate:"required,oneof=none oauth2 api_key custom_headers"`
OAuth2ClientID string `json:"oauth2_client_id,omitempty"`
OAuth2ClientSecret string `json:"oauth2_client_secret,omitempty"`
OAuth2AuthURL string `json:"oauth2_auth_url,omitempty" validate:"omitempty,url"`
OAuth2TokenURL string `json:"oauth2_token_url,omitempty" validate:"omitempty,url"`
OAuth2Scopes string `json:"oauth2_scopes,omitempty"`
APIKeyHeader string `json:"api_key_header,omitempty"`
APIKeyValue string `json:"api_key_value,omitempty"`
CustomHeaders map[string]string `json:"custom_headers,omitempty"`
ToolAllowList []string `json:"tool_allow_list,omitempty"`
ToolDenyList []string `json:"tool_deny_list,omitempty"`
Availability string `json:"availability" validate:"required,oneof=force_on default_on default_off"`
Enabled bool `json:"enabled"`
}
// UpdateMCPServerConfigRequest is the request to update an MCP server config.
type UpdateMCPServerConfigRequest struct {
DisplayName *string `json:"display_name,omitempty"`
Slug *string `json:"slug,omitempty"`
Description *string `json:"description,omitempty"`
IconURL *string `json:"icon_url,omitempty"`
Transport *string `json:"transport,omitempty" validate:"omitempty,oneof=streamable_http sse"`
URL *string `json:"url,omitempty" validate:"omitempty,url"`
AuthType *string `json:"auth_type,omitempty" validate:"omitempty,oneof=none oauth2 api_key custom_headers"`
OAuth2ClientID *string `json:"oauth2_client_id,omitempty"`
OAuth2ClientSecret *string `json:"oauth2_client_secret,omitempty"`
OAuth2AuthURL *string `json:"oauth2_auth_url,omitempty" validate:"omitempty,url"`
OAuth2TokenURL *string `json:"oauth2_token_url,omitempty" validate:"omitempty,url"`
OAuth2Scopes *string `json:"oauth2_scopes,omitempty"`
APIKeyHeader *string `json:"api_key_header,omitempty"`
APIKeyValue *string `json:"api_key_value,omitempty"`
CustomHeaders *map[string]string `json:"custom_headers,omitempty"`
ToolAllowList *[]string `json:"tool_allow_list,omitempty"`
ToolDenyList *[]string `json:"tool_deny_list,omitempty"`
Availability *string `json:"availability,omitempty" validate:"omitempty,oneof=force_on default_on default_off"`
Enabled *bool `json:"enabled,omitempty"`
}
func (c *Client) MCPServerConfigs(ctx context.Context) ([]MCPServerConfig, error) {
res, err := c.Request(ctx, http.MethodGet, "/api/experimental/mcp/servers", nil)
if err != nil {
return nil, err
}
defer res.Body.Close()
if res.StatusCode != http.StatusOK {
return nil, ReadBodyAsError(res)
}
var configs []MCPServerConfig
return configs, json.NewDecoder(res.Body).Decode(&configs)
}
func (c *Client) MCPServerConfigByID(ctx context.Context, id uuid.UUID) (MCPServerConfig, error) {
res, err := c.Request(ctx, http.MethodGet, fmt.Sprintf("/api/experimental/mcp/servers/%s", id), nil)
if err != nil {
return MCPServerConfig{}, err
}
defer res.Body.Close()
if res.StatusCode != http.StatusOK {
return MCPServerConfig{}, ReadBodyAsError(res)
}
var config MCPServerConfig
return config, json.NewDecoder(res.Body).Decode(&config)
}
func (c *Client) CreateMCPServerConfig(ctx context.Context, req CreateMCPServerConfigRequest) (MCPServerConfig, error) {
res, err := c.Request(ctx, http.MethodPost, "/api/experimental/mcp/servers", req)
if err != nil {
return MCPServerConfig{}, err
}
defer res.Body.Close()
if res.StatusCode != http.StatusCreated {
return MCPServerConfig{}, ReadBodyAsError(res)
}
var config MCPServerConfig
return config, json.NewDecoder(res.Body).Decode(&config)
}
func (c *Client) UpdateMCPServerConfig(ctx context.Context, id uuid.UUID, req UpdateMCPServerConfigRequest) (MCPServerConfig, error) {
res, err := c.Request(ctx, http.MethodPatch, fmt.Sprintf("/api/experimental/mcp/servers/%s", id), req)
if err != nil {
return MCPServerConfig{}, err
}
defer res.Body.Close()
if res.StatusCode != http.StatusOK {
return MCPServerConfig{}, ReadBodyAsError(res)
}
var config MCPServerConfig
return config, json.NewDecoder(res.Body).Decode(&config)
}
func (c *Client) DeleteMCPServerConfig(ctx context.Context, id uuid.UUID) error {
res, err := c.Request(ctx, http.MethodDelete, fmt.Sprintf("/api/experimental/mcp/servers/%s", id), nil)
if err != nil {
return err
}
defer res.Body.Close()
if res.StatusCode != http.StatusNoContent {
return ReadBodyAsError(res)
}
return nil
}
-22
View File
@@ -339,14 +339,6 @@ type OIDCAuthMethod struct {
IconURL string `json:"iconUrl"`
}
// OIDCClaimsResponse represents the merged OIDC claims for a user.
type OIDCClaimsResponse struct {
// Claims are the merged claims from the OIDC provider. These
// are the union of the ID token claims and the userinfo claims,
// where userinfo claims take precedence on conflict.
Claims map[string]interface{} `json:"claims"`
}
type UserParameter struct {
Name string `json:"name"`
Value string `json:"value"`
@@ -731,20 +723,6 @@ func (c *Client) UserRoles(ctx context.Context, user string) (UserRoles, error)
return roles, json.NewDecoder(res.Body).Decode(&roles)
}
// UserOIDCClaims returns the merged OIDC claims for the authenticated user.
func (c *Client) UserOIDCClaims(ctx context.Context) (OIDCClaimsResponse, error) {
res, err := c.Request(ctx, http.MethodGet, "/api/v2/users/oidc-claims", nil)
if err != nil {
return OIDCClaimsResponse{}, err
}
defer res.Body.Close()
if res.StatusCode != http.StatusOK {
return OIDCClaimsResponse{}, ReadBodyAsError(res)
}
var resp OIDCClaimsResponse
return resp, json.NewDecoder(res.Body).Decode(&resp)
}
// LoginWithPassword creates a session token authenticating with an email and password.
// Call `SetSessionToken()` to apply the newly acquired token to the client.
func (c *Client) LoginWithPassword(ctx context.Context, req LoginWithPasswordRequest) (LoginWithPasswordResponse, error) {
Binary file not shown.

Before

Width:  |  Height:  |  Size: 107 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 360 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 134 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 148 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 139 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 156 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 112 KiB

-52
View File
@@ -1,52 +0,0 @@
# Amazon Web Services
This guide is designed to get you up and running with a Coder proof-of-concept
on AWS EKS using a [Coder-provided CloudFormation Template](https://codermktplc-assets.s3.us-east-1.amazonaws.com/community-edition/eks-cluster.yaml). The deployed AWS Coder Reference Architecture is below:
![Coder on AWS EKS](../../images/platforms/aws/aws-coder-refarch-v1.png)
If you are familiar with EC2 however, you can use our
[install script](../cli.md) to run Coder on any popular Linux distribution.
## Requirements
This guide assumes your AWS account has `AdministratorAccess` permissions given the number and types of AWS Services deployed. After deployment of Coder into a AWS POC or Sandbox account, it is recommended that the permissions be scaled back to only what your deployment requires.
## Launch Coder Community Edition from the from AWS Marketplace
We publish an Ubuntu 22.04 Container Image with Coder pre-installed and a supporting AWS Marketplace Launch guide. Search for `Coder Community Edition` in the AWS Marketplace or
[launch directly from the Coder listing](https://aws.amazon.com/marketplace/pp/prodview-34vmflqoi3zo4).
![Coder on AWS Marketplace](../../images/platforms/aws/marketplace-ce.png)
Use `View purchase options` to create a zero-cost subscription to Coder Community Edition and then use `Launch your software` to deploy to your current AWS Account.
![AWS Marketplace Subscription](../../images/platforms/aws/marketplace-sub.png)
Select `EKS` for the Launch setup, choose the desired/lastest version to deploy, and then review the **Launch** instructions for more detail explanation of what will be deployed. When you are ready to proceed, click the `CloudFormation Template` link under **Deployment templates**.
![AWS Marketplace Launch](../../images/platforms/aws/marketplace-launch.png)
You will then be taken to the AWS Management Console, CloudFormation `Create stack` in the currently selected AWS Region. Select `Next` to view the Coder Community Edition CloudFormation Stack parameters.
![AWS Marketplace Stack](../../images/platforms/aws/marketplace-stack.png)
The default parameters will support POCs and small team deployments of Coder using `t3.large` (2 cores and 8 GB memory) Nodes. While the deployment uses EKS Auto-mode and will scale using Karpenter, keep in mind this platforms is intended for proof-of-concept
deployments. You should adjust your infrastructure when preparing for
production use. See: [Scaling Coder](../../admin/infrastructure/index.md)
![AWS Marketplace Parameters](../../images/platforms/aws/marketplace-parm.png)
Select `Next` and follow the prompts to submit the CloudFormation Stack. Deployment of the Stack can take 10-20 minutes, and will create EKS related sub-stacks and a CodeBuild pipeline that automates the initial Helm deployment of Coder and final AWS network services integration. Once the Stack successfully creates, access the `Outputs` as shown below:
![AWS Marketplace Outputs](../../images/platforms/aws/marketplace-output.png)
Look for the `CoderURL` output link, and use to navigate to your newly deployed instance of Coder Community Edition.
That's all! Use the UI to create your first user, template, and workspace. We recommend starting with a Kubernetes template since Coder Community Edition is deployed to EKS.
### Next steps
- [IDEs with Coder](../../user-guides/workspace-access/index.md)
- [Writing custom templates for Coder](../../admin/templates/index.md)
- [Configure the Coder server](../../admin/setup/index.md)
- [Use your own domain + TLS](../../admin/setup/index.md#tls--reverse-proxy)
+90
View File
@@ -0,0 +1,90 @@
# Amazon Web Services
This guide is designed to get you up and running with a Coder proof-of-concept
VM on AWS EC2 using a [Coder-provided AMI](https://github.com/coder/packages).
If you are familiar with EC2 however, you can use our
[install script](../cli.md) to run Coder on any popular Linux distribution.
## Requirements
This guide assumes your AWS account has `AmazonEC2FullAccess` permissions.
## Launch a Coder instance from the from AWS Marketplace
We publish an Ubuntu 22.04 AMI with Coder and Docker pre-installed. Search for
`Coder` in the EC2 "Launch an Instance" screen or
[launch directly from the marketplace](https://aws.amazon.com/marketplace/pp/prodview-zaoq7tiogkxhc).
![Coder on AWS Marketplace](../../images/platforms/aws/marketplace.png)
Be sure to keep the default firewall (SecurityGroup) options checked so you can
connect over HTTP, HTTPS, and SSH.
![AWS Security Groups](../../images/platforms/aws/security-groups.png)
We recommend keeping the default instance type (`t2.xlarge`, 4 cores and 16 GB
memory) if you plan on provisioning Docker containers as workspaces on this EC2
instance. Keep in mind this platforms is intended for proof-of-concept
deployments and you should adjust your infrastructure when preparing for
production use. See: [Scaling Coder](../../admin/infrastructure/index.md)
Be sure to add a keypair so that you can connect over SSH to further
[configure Coder](../../admin/setup/index.md).
After launching the instance, wait 30 seconds and navigate to the public IPv4
address. You should be redirected to a public tunnel URL.
<video autoplay playsinline loop>
<source src="https://github.com/coder/coder/blob/main/docs/images/platforms/aws/launch.mp4?raw=true" type="video/mp4">
Your browser does not support the video tag.
</video>
That's all! Use the UI to create your first user, template, and workspace. We
recommend starting with a Docker template since the instance has Docker
pre-installed.
![Coder Workspace and IDE in AWS EC2](../../images/platforms/aws/workspace.png)
## Configuring Coder server
Coder is primarily configured by server-side flags and environment variables.
Given you created or added key-pairs when launching the instance, you can
[configure your Coder deployment](../../admin/setup/index.md) by logging in via
SSH or using the console:
<!-- TOOD(@kylecarbs): fix this weird formatting (https://imgur.com/a/LAUY3cT) -->
```sh
ssh ubuntu@<ec2-public-IPv4>
sudo vim /etc/coder.d/coder.env # edit config
sudo systemctl daemon-reload
sudo systemctl restart coder # restart Coder
```
## Give developers EC2 workspaces (optional)
Instead of running containers on the Coder instance, you can offer developers
full EC2 instances with the
[aws-linux](https://github.com/coder/coder/tree/main/examples/templates/aws-linux)
template.
Before you add the AWS template from the dashboard or CLI, you'll need to modify
the instance IAM role.
![Modify IAM role](../../images/platforms/aws/modify-iam.png)
You must create or select a role that has `EC2FullAccess` permissions or a
limited
[Coder-specific permissions policy](https://github.com/coder/coder/tree/main/examples/templates/aws-linux#required-permissions--policy).
From there, you can import the AWS starter template in the dashboard and begin
creating VM-based workspaces.
![Modify IAM role](../../images/platforms/aws/aws-linux.png)
### Next steps
- [IDEs with Coder](../../user-guides/workspace-access/index.md)
- [Writing custom templates for Coder](../../admin/templates/index.md)
- [Configure the Coder server](../../admin/setup/index.md)
- [Use your own domain + TLS](../../admin/setup/index.md#tls--reverse-proxy)
+3 -2
View File
@@ -7,9 +7,10 @@ cloud of choice.
## AWS
We publish Coder Community Edition on the AWS Marketplace. Follow the tutorial here:
We publish an EC2 image with Coder pre-installed. Follow the tutorial here:
- [Install Coder Community Edition from AWS Marketplace](./aws-marketplace.md)
- [Install Coder on AWS EC2](./ec2.md)
- [Install Coder on AWS EKS](../kubernetes.md#aws)
Alternatively, install the [CLI binary](../cli.md) on any Linux machine or
follow our [Kubernetes](../kubernetes.md) documentation to install Coder on an
+3 -3
View File
@@ -137,9 +137,9 @@
"icon_path": "./images/icons/cloud.svg",
"children": [
{
"title": "AWS Marketplace",
"description": "Install Coder via AWS Marketplace",
"path": "./install/cloud/aws-marketplace.md"
"title": "AWS EC2",
"description": "Install Coder on AWS EC2",
"path": "./install/cloud/ec2.md"
},
{
"title": "GCP Compute Engine",
-14
View File
@@ -5768,20 +5768,6 @@ Only certain features set these fields: - FeatureManagedAgentLimit|
| `iconUrl` | string | false | | |
| `signInText` | string | false | | |
## codersdk.OIDCClaimsResponse
```json
{
"claims": {}
}
```
### Properties
| Name | Type | Required | Restrictions | Description |
|----------|--------|----------|--------------|-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
| `claims` | object | false | | Claims are the merged claims from the OIDC provider. These are the union of the ID token claims and the userinfo claims, where userinfo claims take precedence on conflict. |
## codersdk.OIDCConfig
```json
-31
View File
@@ -376,37 +376,6 @@ curl -X GET http://coder-server:8080/api/v2/users/oauth2/github/device \
To perform this operation, you must be authenticated. [Learn more](authentication.md).
## Get OIDC claims for the authenticated user
### Code samples
```shell
# Example request using curl
curl -X GET http://coder-server:8080/api/v2/users/oidc-claims \
-H 'Accept: application/json' \
-H 'Coder-Session-Token: API_KEY'
```
`GET /users/oidc-claims`
### Example responses
> 200 Response
```json
{
"claims": {}
}
```
### Responses
| Status | Meaning | Description | Schema |
|--------|---------------------------------------------------------|-------------|----------------------------------------------------------------------|
| 200 | [OK](https://tools.ietf.org/html/rfc7231#section-6.3.1) | OK | [codersdk.OIDCClaimsResponse](schemas.md#codersdkoidcclaimsresponse) |
To perform this operation, you must be authenticated. [Learn more](authentication.md).
## OpenID Connect Callback
### Code samples
+9 -10
View File
@@ -15,13 +15,12 @@ coder users [subcommand]
## Subcommands
| Name | Purpose |
|----------------------------------------------------|---------------------------------------------------------------------------------------|
| [<code>create</code>](./users_create.md) | Create a new user. |
| [<code>list</code>](./users_list.md) | Prints the list of users. |
| [<code>show</code>](./users_show.md) | Show a single user. Use 'me' to indicate the currently authenticated user. |
| [<code>delete</code>](./users_delete.md) | Delete a user by username or user_id. |
| [<code>edit-roles</code>](./users_edit-roles.md) | Edit a user's roles by username or id |
| [<code>oidc-claims</code>](./users_oidc-claims.md) | Display the OIDC claims for the authenticated user. |
| [<code>activate</code>](./users_activate.md) | Update a user's status to 'active'. Active users can fully interact with the platform |
| [<code>suspend</code>](./users_suspend.md) | Update a user's status to 'suspended'. A suspended user cannot log into the platform |
| Name | Purpose |
|--------------------------------------------------|---------------------------------------------------------------------------------------|
| [<code>create</code>](./users_create.md) | Create a new user. |
| [<code>list</code>](./users_list.md) | Prints the list of users. |
| [<code>show</code>](./users_show.md) | Show a single user. Use 'me' to indicate the currently authenticated user. |
| [<code>delete</code>](./users_delete.md) | Delete a user by username or user_id. |
| [<code>edit-roles</code>](./users_edit-roles.md) | Edit a user's roles by username or id |
| [<code>activate</code>](./users_activate.md) | Update a user's status to 'active'. Active users can fully interact with the platform |
| [<code>suspend</code>](./users_suspend.md) | Update a user's status to 'suspended'. A suspended user cannot log into the platform |
-42
View File
@@ -1,42 +0,0 @@
<!-- DO NOT EDIT | GENERATED CONTENT -->
# users oidc-claims
Display the OIDC claims for the authenticated user.
## Usage
```console
coder users oidc-claims [flags]
```
## Description
```console
- Display your OIDC claims:
$ coder users oidc-claims
- Display your OIDC claims as JSON:
$ coder users oidc-claims -o json
```
## Options
### -c, --column
| | |
|---------|---------------------------|
| Type | <code>[key\|value]</code> |
| Default | <code>key,value</code> |
Columns to display in table output.
### -o, --output
| | |
|---------|--------------------------|
| Type | <code>table\|json</code> |
| Default | <code>table</code> |
Output format.
+2 -2
View File
@@ -204,7 +204,7 @@ func (api *API) patchPrimaryWorkspaceProxy(req codersdk.PatchWorkspaceProxy, rw
args := database.UpsertDefaultProxyParams{
DisplayName: req.DisplayName,
IconURL: req.Icon,
IconUrl: req.Icon,
}
if req.DisplayName == "" || req.Icon == "" {
// If the user has not specified an update value, use the existing value.
@@ -217,7 +217,7 @@ func (api *API) patchPrimaryWorkspaceProxy(req codersdk.PatchWorkspaceProxy, rw
args.DisplayName = existing.DisplayName
}
if req.Icon == "" {
args.IconURL = existing.IconURL
args.IconUrl = existing.IconUrl
}
}
-195
View File
@@ -471,201 +471,6 @@ func (db *dbCrypt) UpdateChatProvider(ctx context.Context, params database.Updat
return provider, nil
}
// decryptMCPServerConfig decrypts all encrypted fields on a
// single MCPServerConfig in place.
func (db *dbCrypt) decryptMCPServerConfig(cfg *database.MCPServerConfig) error {
if err := db.decryptField(&cfg.OAuth2ClientSecret, cfg.OAuth2ClientSecretKeyID); err != nil {
return err
}
if err := db.decryptField(&cfg.APIKeyValue, cfg.APIKeyValueKeyID); err != nil {
return err
}
return db.decryptField(&cfg.CustomHeaders, cfg.CustomHeadersKeyID)
}
// decryptMCPServerUserToken decrypts all encrypted fields on a
// single MCPServerUserToken in place.
func (db *dbCrypt) decryptMCPServerUserToken(tok *database.MCPServerUserToken) error {
if err := db.decryptField(&tok.AccessToken, tok.AccessTokenKeyID); err != nil {
return err
}
return db.decryptField(&tok.RefreshToken, tok.RefreshTokenKeyID)
}
func (db *dbCrypt) GetMCPServerConfigByID(ctx context.Context, id uuid.UUID) (database.MCPServerConfig, error) {
cfg, err := db.Store.GetMCPServerConfigByID(ctx, id)
if err != nil {
return database.MCPServerConfig{}, err
}
if err := db.decryptMCPServerConfig(&cfg); err != nil {
return database.MCPServerConfig{}, err
}
return cfg, nil
}
func (db *dbCrypt) GetMCPServerConfigBySlug(ctx context.Context, slug string) (database.MCPServerConfig, error) {
cfg, err := db.Store.GetMCPServerConfigBySlug(ctx, slug)
if err != nil {
return database.MCPServerConfig{}, err
}
if err := db.decryptMCPServerConfig(&cfg); err != nil {
return database.MCPServerConfig{}, err
}
return cfg, nil
}
func (db *dbCrypt) GetMCPServerConfigs(ctx context.Context) ([]database.MCPServerConfig, error) {
cfgs, err := db.Store.GetMCPServerConfigs(ctx)
if err != nil {
return nil, err
}
for i := range cfgs {
if err := db.decryptMCPServerConfig(&cfgs[i]); err != nil {
return nil, err
}
}
return cfgs, nil
}
func (db *dbCrypt) GetMCPServerConfigsByIDs(ctx context.Context, ids []uuid.UUID) ([]database.MCPServerConfig, error) {
cfgs, err := db.Store.GetMCPServerConfigsByIDs(ctx, ids)
if err != nil {
return nil, err
}
for i := range cfgs {
if err := db.decryptMCPServerConfig(&cfgs[i]); err != nil {
return nil, err
}
}
return cfgs, nil
}
func (db *dbCrypt) GetEnabledMCPServerConfigs(ctx context.Context) ([]database.MCPServerConfig, error) {
cfgs, err := db.Store.GetEnabledMCPServerConfigs(ctx)
if err != nil {
return nil, err
}
for i := range cfgs {
if err := db.decryptMCPServerConfig(&cfgs[i]); err != nil {
return nil, err
}
}
return cfgs, nil
}
func (db *dbCrypt) GetForcedMCPServerConfigs(ctx context.Context) ([]database.MCPServerConfig, error) {
cfgs, err := db.Store.GetForcedMCPServerConfigs(ctx)
if err != nil {
return nil, err
}
for i := range cfgs {
if err := db.decryptMCPServerConfig(&cfgs[i]); err != nil {
return nil, err
}
}
return cfgs, nil
}
func (db *dbCrypt) GetMCPServerUserToken(ctx context.Context, arg database.GetMCPServerUserTokenParams) (database.MCPServerUserToken, error) {
tok, err := db.Store.GetMCPServerUserToken(ctx, arg)
if err != nil {
return database.MCPServerUserToken{}, err
}
if err := db.decryptMCPServerUserToken(&tok); err != nil {
return database.MCPServerUserToken{}, err
}
return tok, nil
}
func (db *dbCrypt) GetMCPServerUserTokensByUserID(ctx context.Context, userID uuid.UUID) ([]database.MCPServerUserToken, error) {
toks, err := db.Store.GetMCPServerUserTokensByUserID(ctx, userID)
if err != nil {
return nil, err
}
for i := range toks {
if err := db.decryptMCPServerUserToken(&toks[i]); err != nil {
return nil, err
}
}
return toks, nil
}
func (db *dbCrypt) InsertMCPServerConfig(ctx context.Context, params database.InsertMCPServerConfigParams) (database.MCPServerConfig, error) {
if strings.TrimSpace(params.OAuth2ClientSecret) == "" {
params.OAuth2ClientSecretKeyID = sql.NullString{}
} else if err := db.encryptField(&params.OAuth2ClientSecret, &params.OAuth2ClientSecretKeyID); err != nil {
return database.MCPServerConfig{}, err
}
if strings.TrimSpace(params.APIKeyValue) == "" {
params.APIKeyValueKeyID = sql.NullString{}
} else if err := db.encryptField(&params.APIKeyValue, &params.APIKeyValueKeyID); err != nil {
return database.MCPServerConfig{}, err
}
if strings.TrimSpace(params.CustomHeaders) == "" {
params.CustomHeadersKeyID = sql.NullString{}
} else if err := db.encryptField(&params.CustomHeaders, &params.CustomHeadersKeyID); err != nil {
return database.MCPServerConfig{}, err
}
cfg, err := db.Store.InsertMCPServerConfig(ctx, params)
if err != nil {
return database.MCPServerConfig{}, err
}
if err := db.decryptMCPServerConfig(&cfg); err != nil {
return database.MCPServerConfig{}, err
}
return cfg, nil
}
func (db *dbCrypt) UpdateMCPServerConfig(ctx context.Context, params database.UpdateMCPServerConfigParams) (database.MCPServerConfig, error) {
if strings.TrimSpace(params.OAuth2ClientSecret) == "" {
params.OAuth2ClientSecretKeyID = sql.NullString{}
} else if err := db.encryptField(&params.OAuth2ClientSecret, &params.OAuth2ClientSecretKeyID); err != nil {
return database.MCPServerConfig{}, err
}
if strings.TrimSpace(params.APIKeyValue) == "" {
params.APIKeyValueKeyID = sql.NullString{}
} else if err := db.encryptField(&params.APIKeyValue, &params.APIKeyValueKeyID); err != nil {
return database.MCPServerConfig{}, err
}
if strings.TrimSpace(params.CustomHeaders) == "" {
params.CustomHeadersKeyID = sql.NullString{}
} else if err := db.encryptField(&params.CustomHeaders, &params.CustomHeadersKeyID); err != nil {
return database.MCPServerConfig{}, err
}
cfg, err := db.Store.UpdateMCPServerConfig(ctx, params)
if err != nil {
return database.MCPServerConfig{}, err
}
if err := db.decryptMCPServerConfig(&cfg); err != nil {
return database.MCPServerConfig{}, err
}
return cfg, nil
}
func (db *dbCrypt) UpsertMCPServerUserToken(ctx context.Context, params database.UpsertMCPServerUserTokenParams) (database.MCPServerUserToken, error) {
if strings.TrimSpace(params.AccessToken) == "" {
params.AccessTokenKeyID = sql.NullString{}
} else if err := db.encryptField(&params.AccessToken, &params.AccessTokenKeyID); err != nil {
return database.MCPServerUserToken{}, err
}
if strings.TrimSpace(params.RefreshToken) == "" {
params.RefreshTokenKeyID = sql.NullString{}
} else if err := db.encryptField(&params.RefreshToken, &params.RefreshTokenKeyID); err != nil {
return database.MCPServerUserToken{}, err
}
tok, err := db.Store.UpsertMCPServerUserToken(ctx, params)
if err != nil {
return database.MCPServerUserToken{}, err
}
if err := db.decryptMCPServerUserToken(&tok); err != nil {
return database.MCPServerUserToken{}, err
}
return tok, nil
}
func (db *dbCrypt) encryptField(field *string, digest *sql.NullString) error {
// If no cipher is loaded, then we can't encrypt anything!
if db.ciphers == nil || db.primaryCipherDigest == "" {
-299
View File
@@ -9,7 +9,6 @@ import (
"testing"
"time"
"github.com/google/uuid"
"github.com/lib/pq"
"github.com/stretchr/testify/require"
"go.uber.org/mock/gomock"
@@ -879,301 +878,3 @@ func fakeBase64RandomData(t *testing.T, n int) string {
require.NoError(t, err)
return base64.StdEncoding.EncodeToString(b)
}
// requireMCPServerConfigDecrypted verifies all encrypted fields on an
// MCPServerConfig match the expected plaintext values and carry the
// correct key-ID.
func requireMCPServerConfigDecrypted(
t *testing.T,
cfg database.MCPServerConfig,
ciphers []Cipher,
wantSecret, wantAPIKey, wantHeaders string,
) {
t.Helper()
require.Equal(t, wantSecret, cfg.OAuth2ClientSecret)
require.Equal(t, wantAPIKey, cfg.APIKeyValue)
require.Equal(t, wantHeaders, cfg.CustomHeaders)
require.Equal(t, ciphers[0].HexDigest(), cfg.OAuth2ClientSecretKeyID.String)
require.Equal(t, ciphers[0].HexDigest(), cfg.APIKeyValueKeyID.String)
require.Equal(t, ciphers[0].HexDigest(), cfg.CustomHeadersKeyID.String)
}
// requireMCPServerConfigRawEncrypted reads the config from the raw
// (unwrapped) store and asserts every secret field is encrypted.
func requireMCPServerConfigRawEncrypted(
ctx context.Context,
t *testing.T,
rawDB database.Store,
cfgID uuid.UUID,
ciphers []Cipher,
wantSecret, wantAPIKey, wantHeaders string,
) {
t.Helper()
raw, err := rawDB.GetMCPServerConfigByID(ctx, cfgID)
require.NoError(t, err)
requireEncryptedEquals(t, ciphers[0], raw.OAuth2ClientSecret, wantSecret)
requireEncryptedEquals(t, ciphers[0], raw.APIKeyValue, wantAPIKey)
requireEncryptedEquals(t, ciphers[0], raw.CustomHeaders, wantHeaders)
}
func TestMCPServerConfigs(t *testing.T) {
t.Parallel()
ctx := context.Background()
const (
//nolint:gosec // test credentials
oauthSecret = "my-oauth-secret"
apiKeyValue = "my-api-key"
customHeaders = `{"X-Custom":"header-value"}`
)
// insertConfig is a small helper that creates a user and an MCP
// server config through the encrypted store, returning both.
insertConfig := func(t *testing.T, crypt *dbCrypt, ciphers []Cipher) database.MCPServerConfig {
t.Helper()
user := dbgen.User(t, crypt, database.User{})
cfg, err := crypt.InsertMCPServerConfig(ctx, database.InsertMCPServerConfigParams{
DisplayName: "Test MCP Server",
Slug: "test-mcp-" + uuid.New().String()[:8],
Description: "test description",
Url: "https://mcp.example.com",
Transport: "streamable_http",
AuthType: "oauth2",
OAuth2ClientID: "client-id",
OAuth2ClientSecret: oauthSecret,
APIKeyValue: apiKeyValue,
CustomHeaders: customHeaders,
ToolAllowList: []string{},
ToolDenyList: []string{},
Availability: "force_on",
Enabled: true,
CreatedBy: user.ID,
UpdatedBy: user.ID,
})
require.NoError(t, err)
requireMCPServerConfigDecrypted(t, cfg, ciphers, oauthSecret, apiKeyValue, customHeaders)
return cfg
}
t.Run("InsertMCPServerConfig", func(t *testing.T) {
t.Parallel()
db, crypt, ciphers := setup(t)
cfg := insertConfig(t, crypt, ciphers)
requireMCPServerConfigRawEncrypted(ctx, t, db, cfg.ID, ciphers, oauthSecret, apiKeyValue, customHeaders)
})
t.Run("GetMCPServerConfigByID", func(t *testing.T) {
t.Parallel()
db, crypt, ciphers := setup(t)
cfg := insertConfig(t, crypt, ciphers)
got, err := crypt.GetMCPServerConfigByID(ctx, cfg.ID)
require.NoError(t, err)
requireMCPServerConfigDecrypted(t, got, ciphers, oauthSecret, apiKeyValue, customHeaders)
requireMCPServerConfigRawEncrypted(ctx, t, db, cfg.ID, ciphers, oauthSecret, apiKeyValue, customHeaders)
})
t.Run("GetMCPServerConfigBySlug", func(t *testing.T) {
t.Parallel()
db, crypt, ciphers := setup(t)
cfg := insertConfig(t, crypt, ciphers)
got, err := crypt.GetMCPServerConfigBySlug(ctx, cfg.Slug)
require.NoError(t, err)
requireMCPServerConfigDecrypted(t, got, ciphers, oauthSecret, apiKeyValue, customHeaders)
requireMCPServerConfigRawEncrypted(ctx, t, db, cfg.ID, ciphers, oauthSecret, apiKeyValue, customHeaders)
})
t.Run("GetMCPServerConfigs", func(t *testing.T) {
t.Parallel()
db, crypt, ciphers := setup(t)
cfg := insertConfig(t, crypt, ciphers)
cfgs, err := crypt.GetMCPServerConfigs(ctx)
require.NoError(t, err)
require.Len(t, cfgs, 1)
requireMCPServerConfigDecrypted(t, cfgs[0], ciphers, oauthSecret, apiKeyValue, customHeaders)
requireMCPServerConfigRawEncrypted(ctx, t, db, cfg.ID, ciphers, oauthSecret, apiKeyValue, customHeaders)
})
t.Run("GetMCPServerConfigsByIDs", func(t *testing.T) {
t.Parallel()
db, crypt, ciphers := setup(t)
cfg := insertConfig(t, crypt, ciphers)
cfgs, err := crypt.GetMCPServerConfigsByIDs(ctx, []uuid.UUID{cfg.ID})
require.NoError(t, err)
require.Len(t, cfgs, 1)
requireMCPServerConfigDecrypted(t, cfgs[0], ciphers, oauthSecret, apiKeyValue, customHeaders)
requireMCPServerConfigRawEncrypted(ctx, t, db, cfg.ID, ciphers, oauthSecret, apiKeyValue, customHeaders)
})
t.Run("GetEnabledMCPServerConfigs", func(t *testing.T) {
t.Parallel()
db, crypt, ciphers := setup(t)
cfg := insertConfig(t, crypt, ciphers)
cfgs, err := crypt.GetEnabledMCPServerConfigs(ctx)
require.NoError(t, err)
require.Len(t, cfgs, 1)
requireMCPServerConfigDecrypted(t, cfgs[0], ciphers, oauthSecret, apiKeyValue, customHeaders)
requireMCPServerConfigRawEncrypted(ctx, t, db, cfg.ID, ciphers, oauthSecret, apiKeyValue, customHeaders)
})
t.Run("GetForcedMCPServerConfigs", func(t *testing.T) {
t.Parallel()
db, crypt, ciphers := setup(t)
cfg := insertConfig(t, crypt, ciphers)
cfgs, err := crypt.GetForcedMCPServerConfigs(ctx)
require.NoError(t, err)
require.Len(t, cfgs, 1)
requireMCPServerConfigDecrypted(t, cfgs[0], ciphers, oauthSecret, apiKeyValue, customHeaders)
requireMCPServerConfigRawEncrypted(ctx, t, db, cfg.ID, ciphers, oauthSecret, apiKeyValue, customHeaders)
})
t.Run("UpdateMCPServerConfig", func(t *testing.T) {
t.Parallel()
db, crypt, ciphers := setup(t)
cfg := insertConfig(t, crypt, ciphers)
const (
//nolint:gosec // test credential
newSecret = "updated-oauth-secret"
newAPIKey = "updated-api-key"
newHeaders = `{"X-New":"new-value"}`
)
updated, err := crypt.UpdateMCPServerConfig(ctx, database.UpdateMCPServerConfigParams{
ID: cfg.ID,
DisplayName: cfg.DisplayName,
Slug: cfg.Slug,
Description: cfg.Description,
Url: cfg.Url,
Transport: cfg.Transport,
AuthType: cfg.AuthType,
OAuth2ClientID: cfg.OAuth2ClientID,
OAuth2ClientSecret: newSecret,
APIKeyValue: newAPIKey,
CustomHeaders: newHeaders,
ToolAllowList: cfg.ToolAllowList,
ToolDenyList: cfg.ToolDenyList,
Availability: cfg.Availability,
Enabled: cfg.Enabled,
UpdatedBy: cfg.CreatedBy.UUID,
})
require.NoError(t, err)
requireMCPServerConfigDecrypted(t, updated, ciphers, newSecret, newAPIKey, newHeaders)
requireMCPServerConfigRawEncrypted(ctx, t, db, cfg.ID, ciphers, newSecret, newAPIKey, newHeaders)
})
}
func TestMCPServerUserTokens(t *testing.T) {
t.Parallel()
ctx := context.Background()
const (
accessToken = "access-token-value"
refreshToken = "refresh-token-value"
)
// insertConfigAndToken creates a user, an MCP server config, and a
// user token through the encrypted store.
insertConfigAndToken := func(
t *testing.T,
crypt *dbCrypt,
ciphers []Cipher,
) (database.MCPServerConfig, database.MCPServerUserToken) {
t.Helper()
user := dbgen.User(t, crypt, database.User{})
cfg, err := crypt.InsertMCPServerConfig(ctx, database.InsertMCPServerConfigParams{
DisplayName: "Token Test MCP",
Slug: "tok-mcp-" + uuid.New().String()[:8],
Url: "https://mcp.example.com",
Transport: "streamable_http",
AuthType: "oauth2",
ToolAllowList: []string{},
ToolDenyList: []string{},
Availability: "default_off",
Enabled: true,
CreatedBy: user.ID,
UpdatedBy: user.ID,
})
require.NoError(t, err)
tok, err := crypt.UpsertMCPServerUserToken(ctx, database.UpsertMCPServerUserTokenParams{
MCPServerConfigID: cfg.ID,
UserID: user.ID,
AccessToken: accessToken,
RefreshToken: refreshToken,
TokenType: "Bearer",
})
require.NoError(t, err)
require.Equal(t, accessToken, tok.AccessToken)
require.Equal(t, refreshToken, tok.RefreshToken)
require.Equal(t, ciphers[0].HexDigest(), tok.AccessTokenKeyID.String)
require.Equal(t, ciphers[0].HexDigest(), tok.RefreshTokenKeyID.String)
return cfg, tok
}
t.Run("UpsertMCPServerUserToken", func(t *testing.T) {
t.Parallel()
db, crypt, ciphers := setup(t)
cfg, tok := insertConfigAndToken(t, crypt, ciphers)
// Verify the raw DB values are encrypted.
rawTok, err := db.GetMCPServerUserToken(ctx, database.GetMCPServerUserTokenParams{
MCPServerConfigID: cfg.ID,
UserID: tok.UserID,
})
require.NoError(t, err)
requireEncryptedEquals(t, ciphers[0], rawTok.AccessToken, accessToken)
requireEncryptedEquals(t, ciphers[0], rawTok.RefreshToken, refreshToken)
})
t.Run("GetMCPServerUserToken", func(t *testing.T) {
t.Parallel()
db, crypt, ciphers := setup(t)
cfg, tok := insertConfigAndToken(t, crypt, ciphers)
got, err := crypt.GetMCPServerUserToken(ctx, database.GetMCPServerUserTokenParams{
MCPServerConfigID: cfg.ID,
UserID: tok.UserID,
})
require.NoError(t, err)
require.Equal(t, accessToken, got.AccessToken)
require.Equal(t, refreshToken, got.RefreshToken)
require.Equal(t, ciphers[0].HexDigest(), got.AccessTokenKeyID.String)
require.Equal(t, ciphers[0].HexDigest(), got.RefreshTokenKeyID.String)
// Raw values must be encrypted.
rawTok, err := db.GetMCPServerUserToken(ctx, database.GetMCPServerUserTokenParams{
MCPServerConfigID: cfg.ID,
UserID: tok.UserID,
})
require.NoError(t, err)
requireEncryptedEquals(t, ciphers[0], rawTok.AccessToken, accessToken)
requireEncryptedEquals(t, ciphers[0], rawTok.RefreshToken, refreshToken)
})
t.Run("GetMCPServerUserTokensByUserID", func(t *testing.T) {
t.Parallel()
db, crypt, ciphers := setup(t)
cfg, tok := insertConfigAndToken(t, crypt, ciphers)
toks, err := crypt.GetMCPServerUserTokensByUserID(ctx, tok.UserID)
require.NoError(t, err)
require.Len(t, toks, 1)
require.Equal(t, accessToken, toks[0].AccessToken)
require.Equal(t, refreshToken, toks[0].RefreshToken)
require.Equal(t, ciphers[0].HexDigest(), toks[0].AccessTokenKeyID.String)
require.Equal(t, ciphers[0].HexDigest(), toks[0].RefreshTokenKeyID.String)
// Raw values must be encrypted.
rawTok, err := db.GetMCPServerUserToken(ctx, database.GetMCPServerUserTokenParams{
MCPServerConfigID: cfg.ID,
UserID: tok.UserID,
})
require.NoError(t, err)
requireEncryptedEquals(t, ciphers[0], rawTok.AccessToken, accessToken)
requireEncryptedEquals(t, ciphers[0], rawTok.RefreshToken, refreshToken)
})
}
+22 -22
View File
@@ -80,7 +80,7 @@ replace github.com/spf13/afero => github.com/aslilac/afero v0.0.0-20250403163713
// 1) Adds thinking effort to Anthropic provider
// 2) Downgraded to Go 1.25 due to issue with Windows CI
// https://github.com/kylecarbs/fantasy/compare/main...kylecarbs:fantasy:cj/go1.25
replace charm.land/fantasy => github.com/kylecarbs/fantasy v0.0.0-20260319151840-18e18e661ed4
replace charm.land/fantasy => github.com/kylecarbs/fantasy v0.0.0-20260313123746-578317bb0e5b
replace github.com/charmbracelet/anthropic-sdk-go => github.com/kylecarbs/anthropic-sdk-go v0.0.0-20260223140439-63879b0b8dab
@@ -219,7 +219,7 @@ require (
golang.org/x/tools v0.43.0
golang.org/x/xerrors v0.0.0-20240903120638-7835f813f4da
google.golang.org/api v0.271.0
google.golang.org/grpc v1.79.3
google.golang.org/grpc v1.79.2
google.golang.org/protobuf v1.36.11
gopkg.in/DataDog/dd-trace-go.v1 v1.74.0
gopkg.in/natefinch/lumberjack.v2 v2.2.1
@@ -264,20 +264,20 @@ require (
github.com/apparentlymart/go-textseg/v15 v15.0.0 // indirect
github.com/armon/go-radix v1.0.1-0.20221118154546-54df44f2176c // indirect
github.com/atotto/clipboard v0.1.4 // indirect
github.com/aws/aws-sdk-go-v2 v1.41.4
github.com/aws/aws-sdk-go-v2/config v1.32.12
github.com/aws/aws-sdk-go-v2/credentials v1.19.12 // indirect
github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.18.20 // indirect
github.com/aws/aws-sdk-go-v2 v1.41.3
github.com/aws/aws-sdk-go-v2/config v1.32.11
github.com/aws/aws-sdk-go-v2/credentials v1.19.11 // indirect
github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.18.19 // indirect
github.com/aws/aws-sdk-go-v2/feature/rds/auth v1.6.2
github.com/aws/aws-sdk-go-v2/internal/configsources v1.4.20 // indirect
github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.7.20 // indirect
github.com/aws/aws-sdk-go-v2/internal/ini v1.8.6 // indirect
github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.13.7 // indirect
github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.13.20 // indirect
github.com/aws/aws-sdk-go-v2/internal/configsources v1.4.19 // indirect
github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.7.19 // indirect
github.com/aws/aws-sdk-go-v2/internal/ini v1.8.5 // indirect
github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.13.6 // indirect
github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.13.19 // indirect
github.com/aws/aws-sdk-go-v2/service/ssm v1.60.1 // indirect
github.com/aws/aws-sdk-go-v2/service/sso v1.30.13 // indirect
github.com/aws/aws-sdk-go-v2/service/ssooidc v1.35.17 // indirect
github.com/aws/aws-sdk-go-v2/service/sts v1.41.9 // indirect
github.com/aws/aws-sdk-go-v2/service/sso v1.30.12 // indirect
github.com/aws/aws-sdk-go-v2/service/ssooidc v1.35.16 // indirect
github.com/aws/aws-sdk-go-v2/service/sts v1.41.8 // indirect
github.com/aymanbagabas/go-osc52/v2 v2.0.1 // indirect
github.com/aymerick/douceur v0.2.0 // indirect
github.com/beorn7/perks v1.0.1 // indirect
@@ -493,7 +493,7 @@ require (
github.com/fsnotify/fsnotify v1.9.0
github.com/go-git/go-git/v5 v5.17.0
github.com/mark3labs/mcp-go v0.38.0
github.com/openai/openai-go/v3 v3.28.0
github.com/openai/openai-go/v3 v3.15.0
github.com/shopspring/decimal v1.4.0
gonum.org/v1/gonum v0.17.0
)
@@ -522,11 +522,11 @@ require (
github.com/aquasecurity/jfather v0.0.8 // indirect
github.com/aquasecurity/trivy v0.61.1-0.20250407075540-f1329c7ea1aa // indirect
github.com/aquasecurity/trivy-checks v1.12.2-0.20251219190323-79d27547baf5 // indirect
github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.7.6 // indirect
github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.7.5 // indirect
github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.9.8 // indirect
github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.19.17 // indirect
github.com/aws/aws-sdk-go-v2/service/s3 v1.96.0 // indirect
github.com/aws/aws-sdk-go-v2/service/signin v1.0.8 // indirect
github.com/aws/aws-sdk-go-v2/service/signin v1.0.7 // indirect
github.com/bahlo/generic-list-go v0.2.0 // indirect
github.com/bgentry/go-netrc v0.0.0-20140422174119-9fd32a8b3d3d // indirect
github.com/bits-and-blooms/bitset v1.24.4 // indirect
@@ -590,6 +590,7 @@ require (
github.com/moby/sys/user v0.4.0 // indirect
github.com/nfnt/resize v0.0.0-20180221191011-83c6a9932646 // indirect
github.com/openai/openai-go v1.12.0 // indirect
github.com/openai/openai-go/v2 v2.7.1 // indirect
github.com/package-url/packageurl-go v0.1.3 // indirect
github.com/planetscale/vtprotobuf v0.6.1-0.20240319094008-0393e58bdf10 // indirect
github.com/puzpuzpuz/xsync/v3 v3.5.1 // indirect
@@ -614,13 +615,13 @@ require (
github.com/yosida95/uritemplate/v3 v3.0.2 // indirect
github.com/zeebo/xxh3 v1.0.2 // indirect
go.opentelemetry.io/contrib/detectors/gcp v1.40.0 // indirect
go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.67.0 // indirect
go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.65.0 // indirect
go.opentelemetry.io/otel/sdk/metric v1.42.0 // indirect
go.yaml.in/yaml/v2 v2.4.3 // indirect
go.yaml.in/yaml/v3 v3.0.4 // indirect
go.yaml.in/yaml/v4 v4.0.0-rc.3 // indirect
golang.org/x/telemetry v0.0.0-20260311193753-579e4da9a98c // indirect
google.golang.org/genai v1.50.0 // indirect
google.golang.org/genai v1.49.0 // indirect
gopkg.in/warnings.v0 v0.1.2 // indirect
k8s.io/utils v0.0.0-20250820121507-0af2bda4dd1d // indirect
mvdan.cc/gofumpt v0.8.0 // indirect
@@ -641,6 +642,5 @@ tool (
// https://github.com/anthropics/anthropic-sdk-go/pull/262
replace github.com/anthropics/anthropic-sdk-go v1.19.0 => github.com/dannykopping/anthropic-sdk-go v0.0.0-20251230111224-88a4315810bd
// SasSwart perf fork of openai-go with fix for WithJSONSet + deferred serialization.
// https://github.com/kylecarbs/openai-go/pull/2
replace github.com/openai/openai-go/v3 => github.com/kylecarbs/openai-go/v3 v3.0.0-20260319113850-9477dcaedcae
// https://github.com/openai/openai-go/pull/602
replace github.com/openai/openai-go/v3 => github.com/SasSwart/openai-go/v3 v3.0.0-20260204134041-fb987b42a728
+40 -38
View File
@@ -104,6 +104,8 @@ github.com/Nvveen/Gotty v0.0.0-20120604004816-cd527374f1e5 h1:TngWCqHvy9oXAN6lEV
github.com/Nvveen/Gotty v0.0.0-20120604004816-cd527374f1e5/go.mod h1:lmUJ/7eu/Q8D7ML55dXQrVaamCz2vxCfdQBasLZfHKk=
github.com/ProtonMail/go-crypto v1.3.0 h1:ILq8+Sf5If5DCpHQp4PbZdS1J7HDFRXz/+xKBiRGFrw=
github.com/ProtonMail/go-crypto v1.3.0/go.mod h1:9whxjD8Rbs29b4XWbB8irEcE8KHMqaR2e7GWU1R+/PE=
github.com/SasSwart/openai-go/v3 v3.0.0-20260204134041-fb987b42a728 h1:FOjd3xOH+arcrtz1e5P6WZ/VtRD5KQHHRg4kc4BZers=
github.com/SasSwart/openai-go/v3 v3.0.0-20260204134041-fb987b42a728/go.mod h1:cdufnVK14cWcT9qA1rRtrXx4FTRsgbDPW7Ia7SS5cZo=
github.com/SherClockHolmes/webpush-go v1.4.0 h1:ocnzNKWN23T9nvHi6IfyrQjkIc0oJWv1B1pULsf9i3s=
github.com/SherClockHolmes/webpush-go v1.4.0/go.mod h1:XSq8pKX11vNV8MJEMwjrlTkxhAj1zKfxmyhdV7Pd6UA=
github.com/acarl005/stripansi v0.0.0-20180116102854-5a71ef0e047d h1:licZJFw2RwpHMqeKTCYkitsPqHNxTmd4SNR5r94FGM8=
@@ -159,46 +161,46 @@ github.com/atotto/clipboard v0.1.4 h1:EH0zSVneZPSuFR11BlR9YppQTVDbh5+16AmcJi4g1z
github.com/atotto/clipboard v0.1.4/go.mod h1:ZY9tmq7sm5xIbd9bOK4onWV4S6X0u6GY7Vn0Yu86PYI=
github.com/awalterschulze/gographviz v2.0.3+incompatible h1:9sVEXJBJLwGX7EQVhLm2elIKCm7P2YHFC8v6096G09E=
github.com/awalterschulze/gographviz v2.0.3+incompatible/go.mod h1:GEV5wmg4YquNw7v1kkyoX9etIk8yVmXj+AkDHuuETHs=
github.com/aws/aws-sdk-go-v2 v1.41.4 h1:10f50G7WyU02T56ox1wWXq+zTX9I1zxG46HYuG1hH/k=
github.com/aws/aws-sdk-go-v2 v1.41.4/go.mod h1:mwsPRE8ceUUpiTgF7QmQIJ7lgsKUPQOUl3o72QBrE1o=
github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.7.6 h1:N4lRUXZpZ1KVEUn6hxtco/1d2lgYhNn1fHkkl8WhlyQ=
github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.7.6/go.mod h1:lyw7GFp3qENLh7kwzf7iMzAxDn+NzjXEAGjKS2UOKqI=
github.com/aws/aws-sdk-go-v2/config v1.32.12 h1:O3csC7HUGn2895eNrLytOJQdoL2xyJy0iYXhoZ1OmP0=
github.com/aws/aws-sdk-go-v2/config v1.32.12/go.mod h1:96zTvoOFR4FURjI+/5wY1vc1ABceROO4lWgWJuxgy0g=
github.com/aws/aws-sdk-go-v2/credentials v1.19.12 h1:oqtA6v+y5fZg//tcTWahyN9PEn5eDU/Wpvc2+kJ4aY8=
github.com/aws/aws-sdk-go-v2/credentials v1.19.12/go.mod h1:U3R1RtSHx6NB0DvEQFGyf/0sbrpJrluENHdPy1j/3TE=
github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.18.20 h1:zOgq3uezl5nznfoK3ODuqbhVg1JzAGDUhXOsU0IDCAo=
github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.18.20/go.mod h1:z/MVwUARehy6GAg/yQ1GO2IMl0k++cu1ohP9zo887wE=
github.com/aws/aws-sdk-go-v2 v1.41.3 h1:4kQ/fa22KjDt13QCy1+bYADvdgcxpfH18f0zP542kZA=
github.com/aws/aws-sdk-go-v2 v1.41.3/go.mod h1:mwsPRE8ceUUpiTgF7QmQIJ7lgsKUPQOUl3o72QBrE1o=
github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.7.5 h1:zWFmPmgw4sveAYi1mRqG+E/g0461cJ5M4bJ8/nc6d3Q=
github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.7.5/go.mod h1:nVUlMLVV8ycXSb7mSkcNu9e3v/1TJq2RTlrPwhYWr5c=
github.com/aws/aws-sdk-go-v2/config v1.32.11 h1:ftxI5sgz8jZkckuUHXfC/wMUc8u3fG1vQS0plr2F2Zs=
github.com/aws/aws-sdk-go-v2/config v1.32.11/go.mod h1:twF11+6ps9aNRKEDimksp923o44w/Thk9+8YIlzWMmo=
github.com/aws/aws-sdk-go-v2/credentials v1.19.11 h1:NdV8cwCcAXrCWyxArt58BrvZJ9pZ9Fhf9w6Uh5W3Uyc=
github.com/aws/aws-sdk-go-v2/credentials v1.19.11/go.mod h1:30yY2zqkMPdrvxBqzI9xQCM+WrlrZKSOpSJEsylVU+8=
github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.18.19 h1:INUvJxmhdEbVulJYHI061k4TVuS3jzzthNvjqvVvTKM=
github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.18.19/go.mod h1:FpZN2QISLdEBWkayloda+sZjVJL+e9Gl0k1SyTgcswU=
github.com/aws/aws-sdk-go-v2/feature/rds/auth v1.6.2 h1:QbFjOdplTkOgviHNKyTW/TZpvIYhD6lqEc3tkIvqMoQ=
github.com/aws/aws-sdk-go-v2/feature/rds/auth v1.6.2/go.mod h1:d0pTYUeTv5/tPSlbPZZQSqssM158jZBs02jx2LDslM8=
github.com/aws/aws-sdk-go-v2/internal/configsources v1.4.20 h1:CNXO7mvgThFGqOFgbNAP2nol2qAWBOGfqR/7tQlvLmc=
github.com/aws/aws-sdk-go-v2/internal/configsources v1.4.20/go.mod h1:oydPDJKcfMhgfcgBUZaG+toBbwy8yPWubJXBVERtI4o=
github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.7.20 h1:tN6W/hg+pkM+tf9XDkWUbDEjGLb+raoBMFsTodcoYKw=
github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.7.20/go.mod h1:YJ898MhD067hSHA6xYCx5ts/jEd8BSOLtQDL3iZsvbc=
github.com/aws/aws-sdk-go-v2/internal/ini v1.8.6 h1:qYQ4pzQ2Oz6WpQ8T3HvGHnZydA72MnLuFK9tJwmrbHw=
github.com/aws/aws-sdk-go-v2/internal/ini v1.8.6/go.mod h1:O3h0IK87yXci+kg6flUKzJnWeziQUKciKrLjcatSNcY=
github.com/aws/aws-sdk-go-v2/internal/configsources v1.4.19 h1:/sECfyq2JTifMI2JPyZ4bdRN77zJmr6SrS1eL3augIA=
github.com/aws/aws-sdk-go-v2/internal/configsources v1.4.19/go.mod h1:dMf8A5oAqr9/oxOfLkC/c2LU/uMcALP0Rgn2BD5LWn0=
github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.7.19 h1:AWeJMk33GTBf6J20XJe6qZoRSJo0WfUhsMdUKhoODXE=
github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.7.19/go.mod h1:+GWrYoaAsV7/4pNHpwh1kiNLXkKaSoppxQq9lbH8Ejw=
github.com/aws/aws-sdk-go-v2/internal/ini v1.8.5 h1:clHU5fm//kWS1C2HgtgWxfQbFbx4b6rx+5jzhgX9HrI=
github.com/aws/aws-sdk-go-v2/internal/ini v1.8.5/go.mod h1:O3h0IK87yXci+kg6flUKzJnWeziQUKciKrLjcatSNcY=
github.com/aws/aws-sdk-go-v2/internal/v4a v1.4.17 h1:JqcdRG//czea7Ppjb+g/n4o8i/R50aTBHkA7vu0lK+k=
github.com/aws/aws-sdk-go-v2/internal/v4a v1.4.17/go.mod h1:CO+WeGmIdj/MlPel2KwID9Gt7CNq4M65HUfBW97liM0=
github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.13.7 h1:5EniKhLZe4xzL7a+fU3C2tfUN4nWIqlLesfrjkuPFTY=
github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.13.7/go.mod h1:x0nZssQ3qZSnIcePWLvcoFisRXJzcTVvYpAAdYX8+GI=
github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.13.6 h1:XAq62tBTJP/85lFD5oqOOe7YYgWxY9LvWq8plyDvDVg=
github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.13.6/go.mod h1:x0nZssQ3qZSnIcePWLvcoFisRXJzcTVvYpAAdYX8+GI=
github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.9.8 h1:Z5EiPIzXKewUQK0QTMkutjiaPVeVYXX7KIqhXu/0fXs=
github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.9.8/go.mod h1:FsTpJtvC4U1fyDXk7c71XoDv3HlRm8V3NiYLeYLh5YE=
github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.13.20 h1:2HvVAIq+YqgGotK6EkMf+KIEqTISmTYh5zLpYyeTo1Y=
github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.13.20/go.mod h1:V4X406Y666khGa8ghKmphma/7C0DAtEQYhkq9z4vpbk=
github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.13.19 h1:X1Tow7suZk9UCJHE1Iw9GMZJJl0dAnKXXP1NaSDHwmw=
github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.13.19/go.mod h1:/rARO8psX+4sfjUQXp5LLifjUt8DuATZ31WptNJTyQA=
github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.19.17 h1:bGeHBsGZx0Dvu/eJC0Lh9adJa3M1xREcndxLNZlve2U=
github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.19.17/go.mod h1:dcW24lbU0CzHusTE8LLHhRLI42ejmINN8Lcr22bwh/g=
github.com/aws/aws-sdk-go-v2/service/s3 v1.96.0 h1:oeu8VPlOre74lBA/PMhxa5vewaMIMmILM+RraSyB8KA=
github.com/aws/aws-sdk-go-v2/service/s3 v1.96.0/go.mod h1:5jggDlZ2CLQhwJBiZJb4vfk4f0GxWdEDruWKEJ1xOdo=
github.com/aws/aws-sdk-go-v2/service/signin v1.0.8 h1:0GFOLzEbOyZABS3PhYfBIx2rNBACYcKty+XGkTgw1ow=
github.com/aws/aws-sdk-go-v2/service/signin v1.0.8/go.mod h1:LXypKvk85AROkKhOG6/YEcHFPoX+prKTowKnVdcaIxE=
github.com/aws/aws-sdk-go-v2/service/signin v1.0.7 h1:Y2cAXlClHsXkkOvWZFXATr34b0hxxloeQu/pAZz2row=
github.com/aws/aws-sdk-go-v2/service/signin v1.0.7/go.mod h1:idzZ7gmDeqeNrSPkdbtMp9qWMgcBwykA7P7Rzh5DXVU=
github.com/aws/aws-sdk-go-v2/service/ssm v1.60.1 h1:OwMzNDe5VVTXD4kGmeK/FtqAITiV8Mw4TCa8IyNO0as=
github.com/aws/aws-sdk-go-v2/service/ssm v1.60.1/go.mod h1:IyVabkWrs8SNdOEZLyFFcW9bUltV4G6OQS0s6H20PHg=
github.com/aws/aws-sdk-go-v2/service/sso v1.30.13 h1:kiIDLZ005EcKomYYITtfsjn7dtOwHDOFy7IbPXKek2o=
github.com/aws/aws-sdk-go-v2/service/sso v1.30.13/go.mod h1:2h/xGEowcW/g38g06g3KpRWDlT+OTfxxI0o1KqayAB8=
github.com/aws/aws-sdk-go-v2/service/ssooidc v1.35.17 h1:jzKAXIlhZhJbnYwHbvUQZEB8KfgAEuG0dc08Bkda7NU=
github.com/aws/aws-sdk-go-v2/service/ssooidc v1.35.17/go.mod h1:Al9fFsXjv4KfbzQHGe6V4NZSZQXecFcvaIF4e70FoRA=
github.com/aws/aws-sdk-go-v2/service/sts v1.41.9 h1:Cng+OOwCHmFljXIxpEVXAGMnBia8MSU6Ch5i9PgBkcU=
github.com/aws/aws-sdk-go-v2/service/sts v1.41.9/go.mod h1:LrlIndBDdjA/EeXeyNBle+gyCwTlizzW5ycgWnvIxkk=
github.com/aws/aws-sdk-go-v2/service/sso v1.30.12 h1:iSsvB9EtQ09YrsmIc44Heqlx5ByGErqhPK1ZQLppias=
github.com/aws/aws-sdk-go-v2/service/sso v1.30.12/go.mod h1:fEWYKTRGoZNl8tZ77i61/ccwOMJdGxwOhWCkp6TXAr0=
github.com/aws/aws-sdk-go-v2/service/ssooidc v1.35.16 h1:EnUdUqRP1CNzt2DkV67tJx6XDN4xlfBFm+bzeNOQVb0=
github.com/aws/aws-sdk-go-v2/service/ssooidc v1.35.16/go.mod h1:Jic/xv0Rq/pFNCh3WwpH4BEqdbSAl+IyHro8LbibHD8=
github.com/aws/aws-sdk-go-v2/service/sts v1.41.8 h1:XQTQTF75vnug2TXS8m7CVJfC2nniYPZnO1D4Np761Oo=
github.com/aws/aws-sdk-go-v2/service/sts v1.41.8/go.mod h1:Xgx+PR1NUOjNmQY+tRMnouRp83JRM8pRMw/vCaVhPkI=
github.com/aws/smithy-go v1.24.2 h1:FzA3bu/nt/vDvmnkg+R8Xl46gmzEDam6mZ1hzmwXFng=
github.com/aws/smithy-go v1.24.2/go.mod h1:YE2RhdIuDbA5E5bTdciG9KrW3+TiEONeUWCqxX9i1Fc=
github.com/aymanbagabas/go-osc52/v2 v2.0.1 h1:HwpRHbFMcZLEVr42D4p7XBqjyuxQH5SMiErDT4WkJ2k=
@@ -815,10 +817,8 @@ github.com/kylecarbs/anthropic-sdk-go v0.0.0-20260223140439-63879b0b8dab h1:5UMY
github.com/kylecarbs/anthropic-sdk-go v0.0.0-20260223140439-63879b0b8dab/go.mod h1:hqlYqR7uPKOKfnNeicUbZp0Ps0GeYFlKYtwh5HGDCx8=
github.com/kylecarbs/chroma/v2 v2.0.0-20240401211003-9e036e0631f3 h1:Z9/bo5PSeMutpdiKYNt/TTSfGM1Ll0naj3QzYX9VxTc=
github.com/kylecarbs/chroma/v2 v2.0.0-20240401211003-9e036e0631f3/go.mod h1:BUGjjsD+ndS6eX37YgTchSEG+Jg9Jv1GiZs9sqPqztk=
github.com/kylecarbs/fantasy v0.0.0-20260319151840-18e18e661ed4 h1:DO0b5G0yfrtKkzlJofnPxEcRlS157lzQyPiUSDkzfcU=
github.com/kylecarbs/fantasy v0.0.0-20260319151840-18e18e661ed4/go.mod h1:I/i6LkVAWnSVdFZ37SbcR0IZz6eBhu4P9IK3XHTX6Gk=
github.com/kylecarbs/openai-go/v3 v3.0.0-20260319113850-9477dcaedcae h1:xlFZNX4nnxpj9Cf6mTwD3pirXGNtBJ/6COsf9iZmsL0=
github.com/kylecarbs/openai-go/v3 v3.0.0-20260319113850-9477dcaedcae/go.mod h1:cdufnVK14cWcT9qA1rRtrXx4FTRsgbDPW7Ia7SS5cZo=
github.com/kylecarbs/fantasy v0.0.0-20260313123746-578317bb0e5b h1:sC/Qw4tgnzsYQ04i8RU/RIL9UGzLYOSVWKK83CEPoJk=
github.com/kylecarbs/fantasy v0.0.0-20260313123746-578317bb0e5b/go.mod h1:p6cYJVG8D8AC51MgejAKCMu0myRyQ+vKLuoJQ3biaXo=
github.com/kylecarbs/spinner v1.18.2-0.20220329160715-20702b5af89e h1:OP0ZMFeZkUnOzTFRfpuK3m7Kp4fNvC6qN+exwj7aI4M=
github.com/kylecarbs/spinner v1.18.2-0.20220329160715-20702b5af89e/go.mod h1:mQak9GHqbspjC/5iUx3qMlIho8xBS/ppAL/hX5SmPJU=
github.com/kylelemons/godebug v1.1.0 h1:RPNrshWIDI6G2gRW9EHilWtl7Z6Sb1BR0xunSBf0SNc=
@@ -976,6 +976,8 @@ github.com/open-telemetry/opentelemetry-collector-contrib/processor/probabilisti
github.com/open-telemetry/opentelemetry-collector-contrib/processor/probabilisticsamplerprocessor v0.120.1/go.mod h1:Z/S1brD5gU2Ntht/bHxBVnGxXKTvZDr0dNv/riUzPmY=
github.com/openai/openai-go v1.12.0 h1:NBQCnXzqOTv5wsgNC36PrFEiskGfO5wccfCWDo9S1U0=
github.com/openai/openai-go v1.12.0/go.mod h1:g461MYGXEXBVdV5SaR/5tNzNbSfwTBBefwc+LlDCK0Y=
github.com/openai/openai-go/v2 v2.7.1 h1:/tfvTJhfv7hTSL8mWwc5VL4WLLSDL5yn9VqVykdu9r8=
github.com/openai/openai-go/v2 v2.7.1/go.mod h1:jrJs23apqJKKbT+pqtFgNKpRju/KP9zpUTZhz3GElQE=
github.com/opencontainers/go-digest v1.0.0 h1:apOUWs51W5PlhuyGyz9FCeeBIOUDA/6nW8Oi/yOhh5U=
github.com/opencontainers/go-digest v1.0.0/go.mod h1:0JzlMkj0TRzQZfJkVvzbP0HBR3IKzErnv2BNG4W4MAM=
github.com/opencontainers/image-spec v1.1.1 h1:y0fUlFfIZhPF1W537XOLg0/fcx6zcHCJwooC2xJA040=
@@ -1311,8 +1313,8 @@ go.opentelemetry.io/contrib v1.19.0 h1:rnYI7OEPMWFeM4QCqWQ3InMJ0arWMR1i0Cx9A5hcj
go.opentelemetry.io/contrib v1.19.0/go.mod h1:gIzjwWFoGazJmtCaDgViqOSJPde2mCWzv60o0bWPcZs=
go.opentelemetry.io/contrib/detectors/gcp v1.40.0 h1:Awaf8gmW99tZTOWqkLCOl6aw1/rxAWVlHsHIZ3fT2sA=
go.opentelemetry.io/contrib/detectors/gcp v1.40.0/go.mod h1:99OY9ZCqyLkzJLTh5XhECpLRSxcZl+ZDKBEO+jMBFR4=
go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.67.0 h1:yI1/OhfEPy7J9eoa6Sj051C7n5dvpj0QX8g4sRchg04=
go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.67.0/go.mod h1:NoUCKYWK+3ecatC4HjkRktREheMeEtrXoQxrqYFeHSc=
go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.65.0 h1:XmiuHzgJt067+a6kwyAzkhXooYVv3/TOw9cM2VfJgUM=
go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.65.0/go.mod h1:KDgtbWKTQs4bM+VPUr6WlL9m/WXcmkCcBlIzqxPGzmI=
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.67.0 h1:OyrsyzuttWTSur2qN/Lm0m2a8yqyIjUVBZcxFPuXq2o=
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.67.0/go.mod h1:C2NGBr+kAB4bk3xtMXfZ94gqFDtg/GkI7e9zqGh5Beg=
go.opentelemetry.io/otel v1.3.0/go.mod h1:PWIKzi6JCp7sM0k9yZ43VX+T345uNbAkDKwHVjb2PTs=
@@ -1521,16 +1523,16 @@ google.golang.org/api v0.271.0/go.mod h1:CGT29bhwkbF+i11qkRUJb2KMKqcJ1hdFceEIRd9
google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM=
google.golang.org/appengine v1.6.8 h1:IhEN5q69dyKagZPYMSdIjS2HqprW324FRQZJcGqPAsM=
google.golang.org/appengine v1.6.8/go.mod h1:1jJ3jBArFh5pcgW8gCtRJnepW8FzD1V44FJffLiz/Ds=
google.golang.org/genai v1.50.0 h1:yHKV/vjoeN9PJ3iF0ur4cBZco4N3Kl7j09rMq7XSoWk=
google.golang.org/genai v1.50.0/go.mod h1:A3kkl0nyBjyFlNjgxIwKq70julKbIxpSxqKO5gw/gmk=
google.golang.org/genai v1.49.0 h1:Se+QJaH2GYK1aaR1o5S38mlU2GD5FnVvP76nfkV7LH0=
google.golang.org/genai v1.49.0/go.mod h1:A3kkl0nyBjyFlNjgxIwKq70julKbIxpSxqKO5gw/gmk=
google.golang.org/genproto v0.0.0-20260217215200-42d3e9bedb6d h1:vsOm753cOAMkt76efriTCDKjpCbK18XGHMJHo0JUKhc=
google.golang.org/genproto v0.0.0-20260217215200-42d3e9bedb6d/go.mod h1:0oz9d7g9QLSdv9/lgbIjowW1JoxMbxmBVNe8i6tORJI=
google.golang.org/genproto/googleapis/api v0.0.0-20260217215200-42d3e9bedb6d h1:EocjzKLywydp5uZ5tJ79iP6Q0UjDnyiHkGRWxuPBP8s=
google.golang.org/genproto/googleapis/api v0.0.0-20260217215200-42d3e9bedb6d/go.mod h1:48U2I+QQUYhsFrg2SY6r+nJzeOtjey7j//WBESw+qyQ=
google.golang.org/genproto/googleapis/rpc v0.0.0-20260226221140-a57be14db171 h1:ggcbiqK8WWh6l1dnltU4BgWGIGo+EVYxCaAPih/zQXQ=
google.golang.org/genproto/googleapis/rpc v0.0.0-20260226221140-a57be14db171/go.mod h1:4Hqkh8ycfw05ld/3BWL7rJOSfebL2Q+DVDeRgYgxUU8=
google.golang.org/grpc v1.79.3 h1:sybAEdRIEtvcD68Gx7dmnwjZKlyfuc61Dyo9pGXXkKE=
google.golang.org/grpc v1.79.3/go.mod h1:KmT0Kjez+0dde/v2j9vzwoAScgEPx/Bw1CYChhHLrHQ=
google.golang.org/grpc v1.79.2 h1:fRMD94s2tITpyJGtBBn7MkMseNpOZU8ZxgC3MMBaXRU=
google.golang.org/grpc v1.79.2/go.mod h1:KmT0Kjez+0dde/v2j9vzwoAScgEPx/Bw1CYChhHLrHQ=
google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw=
google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc=
google.golang.org/protobuf v1.33.0/go.mod h1:c6P6GXX6sHbq/GpV6MGZEdwhWPcYBgnhAHhKbcUYpos=
+1 -7
View File
@@ -650,12 +650,6 @@ func ParameterTerraform(param *proto.RichParameter) (string, error) {
s, _ := proto.ProviderFormType(v.FormType)
return string(s)
},
"hasDefault": func(v *proto.RichParameter) bool {
// Emit default when the value is explicitly non-empty,
// or when the parameter is ephemeral (ephemeral params
// always need a default, even if it's an empty string).
return v.DefaultValue != "" || v.Ephemeral
},
}).Parse(`
data "coder_parameter" "{{ .Name }}" {
name = "{{ .Name }}"
@@ -665,7 +659,7 @@ data "coder_parameter" "{{ .Name }}" {
mutable = {{ .Mutable }}
ephemeral = {{ .Ephemeral }}
order = {{ .Order }}
{{- if hasDefault . }}
{{- if .DefaultValue }}
{{- if eq .Type "list(string)" }}
default = jsonencode({{ .DefaultValue }})
{{else if eq .Type "bool"}}
-4
View File
@@ -58,10 +58,6 @@ else
fi
log "Deploy branch: $deploy_branch"
# TODO: remove this temporary override
log "OVERRIDE: forcing main as deploy branch"
deploy_branch=main
# Finally, check if the current branch is the deploy branch.
log
if [[ "$branch_name" != "$deploy_branch" ]]; then
-1
View File
@@ -8,7 +8,6 @@ export default {
"@storybook/addon-links",
"@storybook/addon-themes",
"storybook-addon-remix-react-router",
"@storybook/addon-vitest",
],
staticDirs: ["../static"],
-7
View File
@@ -1,7 +0,0 @@
import { setProjectAnnotations } from "@storybook/react-vite";
import { beforeAll } from "vitest";
import * as previewAnnotations from "./preview";
const annotations = setProjectAnnotations([previewAnnotations]);
beforeAll(annotations.beforeAll);
+7 -145
View File
@@ -16,9 +16,7 @@ When investigating or editing TypeScript/React code, always use the TypeScript l
## Bash commands
- `pnpm dev` - Start Vite development server
- `pnpm storybook --no-open` - Start Storybook dev server
- `pnpm test:storybook` - Run storybook story tests (play functions) via Vitest + Playwright
- `pnpm test:storybook src/path/to/component.stories.tsx` - Run a single story file
- `pnpm storybook --no-open` - Run storybook tests
- `pnpm test` - Run jest unit tests
- `pnpm test -- path/to/specific.test.ts` - Run a single test file
- `pnpm lint` - Run complete linting suite (Biome + TypeScript + circular deps + knip)
@@ -33,14 +31,6 @@ When investigating or editing TypeScript/React code, always use the TypeScript l
- Do not use shadcn CLI - manually add components to maintain consistency
- The modules folder should contain components with business logic specific to the codebase.
- Create custom components only when shadcn alternatives don't exist
- **Before creating any new component**, search the codebase for existing
implementations. Check `site/src/components/` for shared primitives
(Table, Badge, icons, error handlers) and sibling files for local
helpers. Duplicating existing components wastes effort and creates
maintenance burden.
- Keep component files under ~500 lines. When a file grows beyond that,
extract logical sections into sub-components or a folder with an
index file.
## Styling
@@ -62,111 +52,7 @@ When investigating or editing TypeScript/React code, always use the TypeScript l
- Destructure imports when possible (eg. import { foo } from 'bar')
- Prefer `for...of` over `forEach` for iteration
- **Biome** handles both linting and formatting (not ESLint/Prettier)
- Always use react-query for data fetching. Do not attempt to manage any
data life cycle manually. Do not ever call an `API` function directly
within a component.
- **Match existing patterns** in the same file before introducing new
conventions. For example, if sibling API methods use a shared helper
like `getURLWithSearchParams`, do not manually build `URLSearchParams`.
If sibling components initialize state with `useMemo`, don't switch to
`useState(initialFn)` in the same file without reason.
- Match errors by error code or HTTP status, never by comparing error
message strings. String matching is brittle — messages change, get
localized, or get reformatted.
## TypeScript Type Safety
- **Never use `as unknown as X`** double assertions. They bypass
TypeScript's type system entirely and hide real type incompatibilities.
If types don't align, fix the types at the source.
- **Prefer type annotations over `as` casts.** When narrowing is needed,
use type guards or conditional checks instead of assertions.
- **Avoid the non-null assertion operator (`!.`)**. If a value could be
null/undefined, add a proper guard or narrow the type. If it can never
be null, fix the upstream type definition to reflect that.
- **Use generated types from `api/typesGenerated.ts`** for all
API/server types. Never manually re-declare types that already exist in
generated code — duplicated types drift out of sync with the backend.
- If a component's implementation depends on a prop being present, make
that prop **required** in the type definition. Optional props that are
actually required create a false sense of flexibility and hide bugs.
- Avoid `// @ts-ignore` and `// eslint-disable`. If they seem necessary,
document why and seek a better-typed alternative first.
## React Query Patterns
- **Query keys must nest** under established parent key hierarchies. For
example, use `["chats", "costSummary", ...]` not `["chatCostSummary"]`.
Flat keys that break hierarchy prevent
`queryClient.invalidateQueries(parentKey)` from correctly invalidating
related queries.
- When you don't need to `await` a mutation result, use **`mutate()`**
with `onSuccess`/`onError` callbacks — not `mutateAsync()` wrapped in
`try/catch` with an empty catch block. Empty catch blocks silently
swallow errors. `mutate()` automatically surfaces errors through
react-query's error state.
## Accessibility
- Every `<table>` / `<Table>` must have an **`aria-label`** or
`<caption>` so screen readers can distinguish between multiple tables
on a page.
- Every element with `tabIndex={0}` must have a semantic **`role`**
attribute (e.g., `role="button"`, `role="row"`) so assistive technology
can communicate what the element is.
- When hiding an interactive element visually (e.g., `opacity-0`,
`pointer-events-none`), you **must also** remove it from the keyboard
tab order and accessibility tree. Add `tabIndex={-1}` and
`aria-hidden="true"`, or better yet, conditionally render the element
so it's not in the DOM at all. `pointer-events: none` only suppresses
mouse/touch — keyboard and screen readers still reach the element.
## Testing Patterns
- **Assert observable behavior, not CSS class names.** In Storybook play
functions and tests, use queries like `queryByRole`, `toBeVisible()`,
or `not.toBeVisible()` — not assertions on class names like
`opacity-0`. Asserting class names couples tests to the specific
Tailwind/CSS technique and breaks when the styling mechanism changes
without user-visible regression.
- **Use `data-testid`** for test element lookup when an element has no
semantic role or accessible name (e.g., scroll containers, wrapper
divs). Never use CSS class substring matches like
`querySelector("[class*='flex-col-reverse']")` — these break silently
on class renames or Tailwind output changes.
- **Don't depend on `behavior: "smooth"` scroll** in tests. Smooth
scrolling is async and implementation-defined — in test environments,
`scrollTo` may not produce native scroll events at all. Use
`behavior: "instant"` in test contexts or mock the scroll position
directly.
- When modifying a component's visual appearance or behavior, **update or
add Storybook stories** to capture the change. Stories must stay
current as components evolve — stale stories hide regressions.
## Robustness
- When rendering user-facing text from nullable/optional data, always
provide a **visible fallback** (e.g., "Untitled", "N/A", em-dash).
Never render a blank cell or element.
- When converting strings to numbers (e.g., `Number(apiValue)`), **guard
against `NaN`** and non-finite results before formatting. For example,
`Number("abc").toFixed(2)` produces `"NaN"`.
- When using `toLocaleString()`, always pass an **explicit locale**
(e.g., `"en-US"`) for deterministic output across environments. Without
a locale, `1234` formats as `"1.234"` in `de-DE` but `"1,234"` in
`en-US`.
## Performance
- When adding state that changes frequently (scroll position, hover,
animation frame), **extract the state and its dependent UI into a child
component** rather than keeping it in a parent that renders a large
subtree. This prevents React from re-rendering the entire subtree on
every state change.
- **Throttle high-frequency event handlers** (scroll, resize, mousemove)
that call `setState`. Use `requestAnimationFrame` or a throttle
utility. Even when React skips re-renders for identical state, the
handler itself still runs on every frame (60Hz+).
- Always use react-query for data fetching. Do not attempt to manage any data life cycle manually. Do not ever call an `API` function directly within a component.
## Workflow
@@ -215,12 +101,12 @@ When investigating or editing TypeScript/React code, always use the TypeScript l
### 3. React orchestrates execution
- **Don't call component functions directly; render them via JSX.** This keeps Hook rules intact and lets React optimize reconciliation.
- **Dont call component functions directly; render them via JSX.** This keeps Hook rules intact and lets React optimize reconciliation.
- **Never pass Hooks around as values or mutate them dynamically.** Keep Hook usage static and local to each component.
### 4. State Management
- After calling a setter you'll still read the **previous** state during the same event; updates are queued and batched.
- After calling a setter youll still read the **previous** state during the same event; updates are queued and batched.
- Use **functional updates** (setX(prev ⇒ …)) whenever next state depends on previous state.
- Pass a function to useState(initialFn) for **lazy initialization**—it runs only on the first render.
- If the next state is Object.is-equal to the current one, React skips the re-render.
@@ -230,39 +116,15 @@ When investigating or editing TypeScript/React code, always use the TypeScript l
- An Effect takes a **setup** function and optional **cleanup**; React runs setup after commit, cleanup before the next setup or on unmount.
- The **dependency array must list every reactive value** referenced inside the Effect, and its length must stay constant.
- Effects run **only on the client**, never during server rendering.
- Use Effects solely to **synchronize with external systems**; if you're not "escaping React," you probably don't need one.
- **Never use `useEffect` to derive state from props or other state.** If
a value can be computed during render, use `useMemo` or a plain
variable. A `useEffect` that reads state A and calls `setState(B)` on
every change is a code smell — it causes an extra render cycle and adds
unnecessary complexity.
- Use Effects solely to **synchronize with external systems**; if youre not escaping React, you probably dont need one.
### 6. Lists & Keys
- Every sibling element in a list **needs a stable, unique key prop**. Never use array indexes or Math.random(); prefer data-driven IDs.
- Keys aren't passed to children and **must not change between renders**; if you return multiple nodes per item, use `<Fragment key={id}>`
- **Never use `key={String(booleanState)}`** to force remounts. When the
boolean flips, React unmounts and remounts the component synchronously,
killing exit animations (e.g., dialog close transitions) and wasting
renders. Use a monotonically increasing counter or avoid `key` for
this pattern entirely.
- Keys arent passed to children and **must not change between renders**; if you return multiple nodes per item, use `<Fragment key={id}>`
### 7. Refs & DOM Access
- useRef stores a mutable .current **without causing re-renders**.
- **Don't call Hooks (including useRef) inside loops, conditions, or map().** Extract a child component instead.
- **Dont call Hooks (including useRef) inside loops, conditions, or map().** Extract a child component instead.
- **Avoid reading or mutating refs during render;** access them in event handlers or Effects after commit.
### 8. Element IDs
- **Use `React.useId()`** to generate unique IDs for form elements,
labels, and ARIA attributes. Never hard-code string IDs — they collide
when a component is rendered multiple times on the same page.
### 9. Component Testability
- When a component depends on a dynamic value like the current time or
date, **accept it as a prop** (or via context) rather than reading it
internally (e.g., `new Date()`, `Date.now()`). This makes the
component deterministic and testable in Storybook without mocking
globals.
+3 -6
View File
@@ -27,10 +27,9 @@
"storybook": "STORYBOOK=true storybook dev -p 6006",
"storybook:build": "storybook build",
"storybook:ci": "storybook build --test",
"test": "vitest run --project=unit && jest",
"test:storybook": "vitest --project=storybook",
"test:ci": "vitest run --project=unit && jest --silent",
"test:watch": "vitest --project=unit",
"test": "vitest run && jest",
"test:ci": "vitest run && jest --silent",
"test:watch": "vitest",
"test:watch-jest": "jest --watch",
"stats": "STATS=true pnpm build && npx http-server ./stats -p 8081 -c-1",
"update-emojis": "cp -rf ./node_modules/emoji-datasource-apple/img/apple/64/* ./static/emojis && cp -f ./node_modules/emoji-datasource-apple/img/apple/sheets-256/64.png ./static/emojis/spritesheet.png"
@@ -140,7 +139,6 @@
"@storybook/addon-docs": "10.2.10",
"@storybook/addon-links": "10.2.10",
"@storybook/addon-themes": "10.2.10",
"@storybook/addon-vitest": "10.2.10",
"@storybook/react-vite": "10.2.10",
"@swc/core": "1.3.38",
"@swc/jest": "0.2.37",
@@ -169,7 +167,6 @@
"@types/ua-parser-js": "0.7.36",
"@types/uuid": "9.0.2",
"@vitejs/plugin-react": "5.1.1",
"@vitest/browser-playwright": "4.0.14",
"autoprefixer": "10.4.22",
"chromatic": "11.29.0",
"dpdm": "3.14.0",
+2 -211
View File
@@ -326,9 +326,6 @@ importers:
'@storybook/addon-themes':
specifier: 10.2.10
version: 10.2.10(storybook@10.2.10(@testing-library/dom@10.4.0)(prettier@3.4.1)(react-dom@19.2.2(react@19.2.2))(react@19.2.2))
'@storybook/addon-vitest':
specifier: 10.2.10
version: 10.2.10(@vitest/browser-playwright@4.0.14)(@vitest/browser@4.1.0(msw@2.4.8(typescript@5.6.3))(vite@7.2.6(@types/node@20.19.25)(jiti@1.21.7)(yaml@2.7.0))(vitest@4.0.14))(@vitest/runner@4.0.14)(react-dom@19.2.2(react@19.2.2))(react@19.2.2)(storybook@10.2.10(@testing-library/dom@10.4.0)(prettier@3.4.1)(react-dom@19.2.2(react@19.2.2))(react@19.2.2))(vitest@4.0.14)
'@storybook/react-vite':
specifier: 10.2.10
version: 10.2.10(esbuild@0.25.12)(react-dom@19.2.2(react@19.2.2))(react@19.2.2)(rollup@4.53.3)(storybook@10.2.10(@testing-library/dom@10.4.0)(prettier@3.4.1)(react-dom@19.2.2(react@19.2.2))(react@19.2.2))(typescript@5.6.3)(vite@7.2.6(@types/node@20.19.25)(jiti@1.21.7)(yaml@2.7.0))
@@ -413,9 +410,6 @@ importers:
'@vitejs/plugin-react':
specifier: 5.1.1
version: 5.1.1(vite@7.2.6(@types/node@20.19.25)(jiti@1.21.7)(yaml@2.7.0))
'@vitest/browser-playwright':
specifier: 4.0.14
version: 4.0.14(msw@2.4.8(typescript@5.6.3))(playwright@1.50.1)(vite@7.2.6(@types/node@20.19.25)(jiti@1.21.7)(yaml@2.7.0))(vitest@4.0.14)
autoprefixer:
specifier: 10.4.22
version: 10.4.22(postcss@8.5.6)
@@ -496,7 +490,7 @@ importers:
version: 0.11.0(@biomejs/biome@2.2.4)(eslint@8.52.0)(optionator@0.9.3)(typescript@5.6.3)(vite@7.2.6(@types/node@20.19.25)(jiti@1.21.7)(yaml@2.7.0))
vitest:
specifier: 4.0.14
version: 4.0.14(@types/node@20.19.25)(@vitest/browser-playwright@4.0.14)(jiti@1.21.7)(jsdom@27.2.0)(msw@2.4.8(typescript@5.6.3))(yaml@2.7.0)
version: 4.0.14(@types/node@20.19.25)(jiti@1.21.7)(jsdom@27.2.0)(msw@2.4.8(typescript@5.6.3))(yaml@2.7.0)
packages:
@@ -765,9 +759,6 @@ packages:
cpu: [x64]
os: [win32]
'@blazediff/core@1.9.1':
resolution: {integrity: sha512-ehg3jIkYKulZh+8om/O25vkvSsXXwC+skXmyA87FFx6A/45eqOkZsBltMw/TVteb0mloiGT8oGRTcjRAz66zaA==, tarball: https://registry.npmjs.org/@blazediff/core/-/core-1.9.1.tgz}
'@bundled-es-modules/cookie@2.0.1':
resolution: {integrity: sha512-8o+5fRPLNbjbdGRRmJj3h6Hh1AQJf2dk3qQ/5ZFb+PXkRNiSoMGGUKlsgLfrxneb72axVJyIYji64E2+nNfYyw==, tarball: https://registry.npmjs.org/@bundled-es-modules/cookie/-/cookie-2.0.1.tgz}
@@ -1640,9 +1631,6 @@ packages:
engines: {node: '>=18'}
hasBin: true
'@polka/url@1.0.0-next.29':
resolution: {integrity: sha512-wwQAWhWSuHaag8c4q/KN/vCoeOJYshAIvMQwD4GpSb3OiZklFfvAgmj0VCBBImRpuF/aFgIRzllXlVX93Jevww==, tarball: https://registry.npmjs.org/@polka/url/-/url-1.0.0-next.29.tgz}
'@popperjs/core@2.11.8':
resolution: {integrity: sha512-P1st0aksCrn9sGZhp8GMYwBnQsbvAWsZAX44oXNNvLHGqAOcoVxmjZiohstwQ7SqKnbR47akdNi+uleWD8+g6A==, tarball: https://registry.npmjs.org/@popperjs/core/-/core-2.11.8.tgz}
@@ -2361,24 +2349,6 @@ packages:
peerDependencies:
storybook: ^10.2.10
'@storybook/addon-vitest@10.2.10':
resolution: {integrity: sha512-U2oHw+Ar+Xd06wDTB74VlujhIIW89OHThpJjwgqgM6NWrOC/XLllJ53ILFDyREBkMwpBD7gJQIoQpLEcKBIEhw==, tarball: https://registry.npmjs.org/@storybook/addon-vitest/-/addon-vitest-10.2.10.tgz}
peerDependencies:
'@vitest/browser': ^3.0.0 || ^4.0.0
'@vitest/browser-playwright': ^4.0.0
'@vitest/runner': ^3.0.0 || ^4.0.0
storybook: ^10.2.10
vitest: ^3.0.0 || ^4.0.0
peerDependenciesMeta:
'@vitest/browser':
optional: true
'@vitest/browser-playwright':
optional: true
'@vitest/runner':
optional: true
vitest:
optional: true
'@storybook/builder-vite@10.2.10':
resolution: {integrity: sha512-Wd6CYL7LvRRNiXMz977x9u/qMm7nmMw/7Dow2BybQo+Xbfy1KhVjIoZ/gOiG515zpojSozctNrJUbM0+jH1jwg==, tarball: https://registry.npmjs.org/@storybook/builder-vite/-/builder-vite-10.2.10.tgz}
peerDependencies:
@@ -2866,22 +2836,6 @@ packages:
peerDependencies:
vite: ^4.2.0 || ^5.0.0 || ^6.0.0 || ^7.0.0
'@vitest/browser-playwright@4.0.14':
resolution: {integrity: sha512-rUvyz6wX6wDjcYzf/7fgXYfca2bAu0Axoq/v9LYdELzcBSS9UKjnZ7MaMY4UDP78HHHCdmdtceuSao1s51ON8A==, tarball: https://registry.npmjs.org/@vitest/browser-playwright/-/browser-playwright-4.0.14.tgz}
peerDependencies:
playwright: '*'
vitest: 4.0.14
'@vitest/browser@4.0.14':
resolution: {integrity: sha512-vO0uqR8SnPTd8ykp14yaIuUyMZ9HEBYuoZrVdUp7RrEp76VEnkrX9fDkGnK0NyBdfWXB6cqp7BmqVekd8yKHFQ==, tarball: https://registry.npmjs.org/@vitest/browser/-/browser-4.0.14.tgz}
peerDependencies:
vitest: 4.0.14
'@vitest/browser@4.1.0':
resolution: {integrity: sha512-tG/iOrgbiHQks0ew7CdelUyNEHkv8NLrt+CqdTivIuoSnXvO7scWMn4Kqo78/UGY1NJ6Hv+vp8BvRnED/bjFdQ==, tarball: https://registry.npmjs.org/@vitest/browser/-/browser-4.1.0.tgz}
peerDependencies:
vitest: 4.1.0
'@vitest/expect@3.2.4':
resolution: {integrity: sha512-Io0yyORnB6sikFlt8QW5K7slY4OjqNX9jmJQ02QDda8lyM6B5oNgVWoSoKPac8/kgnCUzuHQKrSLtu/uOqqrig==, tarball: https://registry.npmjs.org/@vitest/expect/-/expect-3.2.4.tgz}
@@ -2899,26 +2853,12 @@ packages:
vite:
optional: true
'@vitest/mocker@4.1.0':
resolution: {integrity: sha512-evxREh+Hork43+Y4IOhTo+h5lGmVRyjqI739Rz4RlUPqwrkFFDF6EMvOOYjTx4E8Tl6gyCLRL8Mu7Ry12a13Tw==, tarball: https://registry.npmjs.org/@vitest/mocker/-/mocker-4.1.0.tgz}
peerDependencies:
msw: ^2.4.9
vite: ^6.0.0 || ^7.0.0 || ^8.0.0-0
peerDependenciesMeta:
msw:
optional: true
vite:
optional: true
'@vitest/pretty-format@3.2.4':
resolution: {integrity: sha512-IVNZik8IVRJRTr9fxlitMKeJeXFFFN0JaB9PHPGQ8NKQbGpfjlTx9zO4RefN8gp7eqjNy8nyK3NZmBzOPeIxtA==, tarball: https://registry.npmjs.org/@vitest/pretty-format/-/pretty-format-3.2.4.tgz}
'@vitest/pretty-format@4.0.14':
resolution: {integrity: sha512-SOYPgujB6TITcJxgd3wmsLl+wZv+fy3av2PpiPpsWPZ6J1ySUYfScfpIt2Yv56ShJXR2MOA6q2KjKHN4EpdyRQ==, tarball: https://registry.npmjs.org/@vitest/pretty-format/-/pretty-format-4.0.14.tgz}
'@vitest/pretty-format@4.1.0':
resolution: {integrity: sha512-3RZLZlh88Ib0J7NQTRATfc/3ZPOnSUn2uDBUoGNn5T36+bALixmzphN26OUD3LRXWkJu4H0s5vvUeqBiw+kS0A==, tarball: https://registry.npmjs.org/@vitest/pretty-format/-/pretty-format-4.1.0.tgz}
'@vitest/runner@4.0.14':
resolution: {integrity: sha512-BsAIk3FAqxICqREbX8SetIteT8PiaUL/tgJjmhxJhCsigmzzH8xeadtp7LRnTpCVzvf0ib9BgAfKJHuhNllKLw==, tarball: https://registry.npmjs.org/@vitest/runner/-/runner-4.0.14.tgz}
@@ -2931,18 +2871,12 @@ packages:
'@vitest/spy@4.0.14':
resolution: {integrity: sha512-JmAZT1UtZooO0tpY3GRyiC/8W7dCs05UOq9rfsUUgEZEdq+DuHLmWhPsrTt0TiW7WYeL/hXpaE07AZ2RCk44hg==, tarball: https://registry.npmjs.org/@vitest/spy/-/spy-4.0.14.tgz}
'@vitest/spy@4.1.0':
resolution: {integrity: sha512-pz77k+PgNpyMDv2FV6qmk5ZVau6c3R8HC8v342T2xlFxQKTrSeYw9waIJG8KgV9fFwAtTu4ceRzMivPTH6wSxw==, tarball: https://registry.npmjs.org/@vitest/spy/-/spy-4.1.0.tgz}
'@vitest/utils@3.2.4':
resolution: {integrity: sha512-fB2V0JFrQSMsCo9HiSq3Ezpdv4iYaXRG1Sx8edX3MwxfyNn83mKiGzOcH+Fkxt4MHxr3y42fQi1oeAInqgX2QA==, tarball: https://registry.npmjs.org/@vitest/utils/-/utils-3.2.4.tgz}
'@vitest/utils@4.0.14':
resolution: {integrity: sha512-hLqXZKAWNg8pI+SQXyXxWCTOpA3MvsqcbVeNgSi8x/CSN2wi26dSzn1wrOhmCmFjEvN9p8/kLFRHa6PI8jHazw==, tarball: https://registry.npmjs.org/@vitest/utils/-/utils-4.0.14.tgz}
'@vitest/utils@4.1.0':
resolution: {integrity: sha512-XfPXT6a8TZY3dcGY8EdwsBulFCIw+BeeX0RZn2x/BtiY/75YGh8FeWGG8QISN/WhaqSrE2OrlDgtF8q5uhOTmw==, tarball: https://registry.npmjs.org/@vitest/utils/-/utils-4.1.0.tgz}
'@xterm/addon-canvas@0.7.0':
resolution: {integrity: sha512-LF5LYcfvefJuJ7QotNRdRSPc9YASAVDeoT5uyXS/nZshZXjYplGXRECBGiznwvhNL2I8bq1Lf5MzRwstsYQ2Iw==, tarball: https://registry.npmjs.org/@xterm/addon-canvas/-/addon-canvas-0.7.0.tgz}
peerDependencies:
@@ -5132,10 +5066,6 @@ packages:
react-dom:
optional: true
mrmime@2.0.1:
resolution: {integrity: sha512-Y3wQdFg2Va6etvQ5I82yUhGdsKrcYox6p7FfL1LbK2J4V01F9TGlepTIhnK24t7koZibmg82KGglhA1XK5IsLQ==, tarball: https://registry.npmjs.org/mrmime/-/mrmime-2.0.1.tgz}
engines: {node: '>=10'}
ms@2.0.0:
resolution: {integrity: sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==, tarball: https://registry.npmjs.org/ms/-/ms-2.0.0.tgz}
@@ -5382,10 +5312,6 @@ packages:
resolution: {integrity: sha512-TfySrs/5nm8fQJDcBDuUng3VOUKsd7S+zqvbOTiGXHfxX4wK31ard+hoNuvkicM/2YFzlpDgABOevKSsB4G/FA==, tarball: https://registry.npmjs.org/pirates/-/pirates-4.0.7.tgz}
engines: {node: '>= 6'}
pixelmatch@7.1.0:
resolution: {integrity: sha512-1wrVzJ2STrpmONHKBy228LM1b84msXDUoAzVEl0R8Mz4Ce6EPr+IVtxm8+yvrqLYMHswREkjYFaMxnyGnaY3Ng==, tarball: https://registry.npmjs.org/pixelmatch/-/pixelmatch-7.1.0.tgz}
hasBin: true
pkg-dir@4.2.0:
resolution: {integrity: sha512-HRDzbaKjC+AOWVXxAU/x54COGeIv9eb+6CkDSQoNTt4XyWoIJvuPsXizxu/Fr23EiekbtZwmh1IcIG/l/a10GQ==, tarball: https://registry.npmjs.org/pkg-dir/-/pkg-dir-4.2.0.tgz}
engines: {node: '>=8'}
@@ -5400,10 +5326,6 @@ packages:
engines: {node: '>=18'}
hasBin: true
pngjs@7.0.0:
resolution: {integrity: sha512-LKWqWJRhstyYo9pGvgor/ivk2w94eSjE3RGVuzLGlr3NmD8bf7RcYGze1mNdEHRP6TRP6rMuDHk5t44hnTRyow==, tarball: https://registry.npmjs.org/pngjs/-/pngjs-7.0.0.tgz}
engines: {node: '>=14.19.0'}
possible-typed-array-names@1.0.0:
resolution: {integrity: sha512-d7Uw+eZoloe0EHDIYoe+bQ5WXnGMOpmiZFTuMWCwpjzzkL2nTjcKiAk4hh8TjnGye2TwWOk3UXucZ+3rbmBa8Q==, tarball: https://registry.npmjs.org/possible-typed-array-names/-/possible-typed-array-names-1.0.0.tgz}
engines: {node: '>= 0.4'}
@@ -5950,10 +5872,6 @@ packages:
resolution: {integrity: sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw==, tarball: https://registry.npmjs.org/signal-exit/-/signal-exit-4.1.0.tgz}
engines: {node: '>=14'}
sirv@3.0.2:
resolution: {integrity: sha512-2wcC/oGxHis/BoHkkPwldgiPSYcpZK3JU28WoMVv55yHJgcZ8rlXvuG9iZggz+sU1d4bRgIGASwyWqjxu3FM0g==, tarball: https://registry.npmjs.org/sirv/-/sirv-3.0.2.tgz}
engines: {node: '>=18'}
sisteransi@1.0.5:
resolution: {integrity: sha512-bLGGlR1QxBcynn2d5YmDX4MGjlZvy2MRBDRNHLJ8VI6l6+9FUiyTFNJ0IveOSP0bcXgVDPRcfGqA0pjaqUpfVg==, tarball: https://registry.npmjs.org/sisteransi/-/sisteransi-1.0.5.tgz}
@@ -6232,10 +6150,6 @@ packages:
toposort@2.0.2:
resolution: {integrity: sha512-0a5EOkAUp8D4moMi2W8ZF8jcga7BgZd91O/yabJCFY8az+XSzeGyTKs0Aoo897iV1Nj6guFq8orWDS96z91oGg==, tarball: https://registry.npmjs.org/toposort/-/toposort-2.0.2.tgz}
totalist@3.0.1:
resolution: {integrity: sha512-sf4i37nQ2LBx4m3wB74y+ubopq6W/dIzXg0FDGjsYnZHVa1Da8FH853wlL2gtUhg+xJXjfk3kUZS3BRoQeoQBQ==, tarball: https://registry.npmjs.org/totalist/-/totalist-3.0.1.tgz}
engines: {node: '>=6'}
tough-cookie@4.1.4:
resolution: {integrity: sha512-Loo5UUvLD9ScZ6jh8beX1T6sO1w2/MpCRpEP7V280GKMVUQ0Jzar2U3UJPsrdbziLEMMhu3Ujnq//rhiFuIeag==, tarball: https://registry.npmjs.org/tough-cookie/-/tough-cookie-4.1.4.tgz}
engines: {node: '>=6'}
@@ -6716,18 +6630,6 @@ packages:
utf-8-validate:
optional: true
ws@8.19.0:
resolution: {integrity: sha512-blAT2mjOEIi0ZzruJfIhb3nps74PRWTCz1IjglWEEpQl5XS/UNama6u2/rjFkDDouqr4L67ry+1aGIALViWjDg==, tarball: https://registry.npmjs.org/ws/-/ws-8.19.0.tgz}
engines: {node: '>=10.0.0'}
peerDependencies:
bufferutil: ^4.0.1
utf-8-validate: '>=5.0.2'
peerDependenciesMeta:
bufferutil:
optional: true
utf-8-validate:
optional: true
wsl-utils@0.1.0:
resolution: {integrity: sha512-h3Fbisa2nKGPxCpm89Hk33lBLsnaGBvctQopaBSOW/uIs6FTe1ATyAnKFJrzVs9vpGdsTe73WF3V4lIsk4Gacw==, tarball: https://registry.npmjs.org/wsl-utils/-/wsl-utils-0.1.0.tgz}
engines: {node: '>=18'}
@@ -7071,9 +6973,6 @@ snapshots:
'@biomejs/cli-win32-x64@2.2.4':
optional: true
'@blazediff/core@1.9.1':
optional: true
'@bundled-es-modules/cookie@2.0.1':
dependencies:
cookie: 0.7.2
@@ -8115,8 +8014,6 @@ snapshots:
dependencies:
playwright: 1.50.1
'@polka/url@1.0.0-next.29': {}
'@popperjs/core@2.11.8': {}
'@preact/signals-core@1.13.0': {}
@@ -8817,20 +8714,6 @@ snapshots:
storybook: 10.2.10(@testing-library/dom@10.4.0)(prettier@3.4.1)(react-dom@19.2.2(react@19.2.2))(react@19.2.2)
ts-dedent: 2.2.0
'@storybook/addon-vitest@10.2.10(@vitest/browser-playwright@4.0.14)(@vitest/browser@4.1.0(msw@2.4.8(typescript@5.6.3))(vite@7.2.6(@types/node@20.19.25)(jiti@1.21.7)(yaml@2.7.0))(vitest@4.0.14))(@vitest/runner@4.0.14)(react-dom@19.2.2(react@19.2.2))(react@19.2.2)(storybook@10.2.10(@testing-library/dom@10.4.0)(prettier@3.4.1)(react-dom@19.2.2(react@19.2.2))(react@19.2.2))(vitest@4.0.14)':
dependencies:
'@storybook/global': 5.0.0
'@storybook/icons': 2.0.1(react-dom@19.2.2(react@19.2.2))(react@19.2.2)
storybook: 10.2.10(@testing-library/dom@10.4.0)(prettier@3.4.1)(react-dom@19.2.2(react@19.2.2))(react@19.2.2)
optionalDependencies:
'@vitest/browser': 4.1.0(msw@2.4.8(typescript@5.6.3))(vite@7.2.6(@types/node@20.19.25)(jiti@1.21.7)(yaml@2.7.0))(vitest@4.0.14)
'@vitest/browser-playwright': 4.0.14(msw@2.4.8(typescript@5.6.3))(playwright@1.50.1)(vite@7.2.6(@types/node@20.19.25)(jiti@1.21.7)(yaml@2.7.0))(vitest@4.0.14)
'@vitest/runner': 4.0.14
vitest: 4.0.14(@types/node@20.19.25)(@vitest/browser-playwright@4.0.14)(jiti@1.21.7)(jsdom@27.2.0)(msw@2.4.8(typescript@5.6.3))(yaml@2.7.0)
transitivePeerDependencies:
- react
- react-dom
'@storybook/builder-vite@10.2.10(esbuild@0.25.12)(rollup@4.53.3)(storybook@10.2.10(@testing-library/dom@10.4.0)(prettier@3.4.1)(react-dom@19.2.2(react@19.2.2))(react@19.2.2))(vite@7.2.6(@types/node@20.19.25)(jiti@1.21.7)(yaml@2.7.0))':
dependencies:
'@storybook/csf-plugin': 10.2.10(esbuild@0.25.12)(rollup@4.53.3)(storybook@10.2.10(@testing-library/dom@10.4.0)(prettier@3.4.1)(react-dom@19.2.2(react@19.2.2))(react@19.2.2))(vite@7.2.6(@types/node@20.19.25)(jiti@1.21.7)(yaml@2.7.0))
@@ -9343,54 +9226,6 @@ snapshots:
transitivePeerDependencies:
- supports-color
'@vitest/browser-playwright@4.0.14(msw@2.4.8(typescript@5.6.3))(playwright@1.50.1)(vite@7.2.6(@types/node@20.19.25)(jiti@1.21.7)(yaml@2.7.0))(vitest@4.0.14)':
dependencies:
'@vitest/browser': 4.0.14(msw@2.4.8(typescript@5.6.3))(vite@7.2.6(@types/node@20.19.25)(jiti@1.21.7)(yaml@2.7.0))(vitest@4.0.14)
'@vitest/mocker': 4.0.14(msw@2.4.8(typescript@5.6.3))(vite@7.2.6(@types/node@20.19.25)(jiti@1.21.7)(yaml@2.7.0))
playwright: 1.50.1
tinyrainbow: 3.0.3
vitest: 4.0.14(@types/node@20.19.25)(@vitest/browser-playwright@4.0.14)(jiti@1.21.7)(jsdom@27.2.0)(msw@2.4.8(typescript@5.6.3))(yaml@2.7.0)
transitivePeerDependencies:
- bufferutil
- msw
- utf-8-validate
- vite
'@vitest/browser@4.0.14(msw@2.4.8(typescript@5.6.3))(vite@7.2.6(@types/node@20.19.25)(jiti@1.21.7)(yaml@2.7.0))(vitest@4.0.14)':
dependencies:
'@vitest/mocker': 4.0.14(msw@2.4.8(typescript@5.6.3))(vite@7.2.6(@types/node@20.19.25)(jiti@1.21.7)(yaml@2.7.0))
'@vitest/utils': 4.0.14
magic-string: 0.30.21
pixelmatch: 7.1.0
pngjs: 7.0.0
sirv: 3.0.2
tinyrainbow: 3.0.3
vitest: 4.0.14(@types/node@20.19.25)(@vitest/browser-playwright@4.0.14)(jiti@1.21.7)(jsdom@27.2.0)(msw@2.4.8(typescript@5.6.3))(yaml@2.7.0)
ws: 8.19.0
transitivePeerDependencies:
- bufferutil
- msw
- utf-8-validate
- vite
'@vitest/browser@4.1.0(msw@2.4.8(typescript@5.6.3))(vite@7.2.6(@types/node@20.19.25)(jiti@1.21.7)(yaml@2.7.0))(vitest@4.0.14)':
dependencies:
'@blazediff/core': 1.9.1
'@vitest/mocker': 4.1.0(msw@2.4.8(typescript@5.6.3))(vite@7.2.6(@types/node@20.19.25)(jiti@1.21.7)(yaml@2.7.0))
'@vitest/utils': 4.1.0
magic-string: 0.30.21
pngjs: 7.0.0
sirv: 3.0.2
tinyrainbow: 3.0.3
vitest: 4.0.14(@types/node@20.19.25)(@vitest/browser-playwright@4.0.14)(jiti@1.21.7)(jsdom@27.2.0)(msw@2.4.8(typescript@5.6.3))(yaml@2.7.0)
ws: 8.19.0
transitivePeerDependencies:
- bufferutil
- msw
- utf-8-validate
- vite
optional: true
'@vitest/expect@3.2.4':
dependencies:
'@types/chai': 5.2.3
@@ -9417,16 +9252,6 @@ snapshots:
msw: 2.4.8(typescript@5.6.3)
vite: 7.2.6(@types/node@20.19.25)(jiti@1.21.7)(yaml@2.7.0)
'@vitest/mocker@4.1.0(msw@2.4.8(typescript@5.6.3))(vite@7.2.6(@types/node@20.19.25)(jiti@1.21.7)(yaml@2.7.0))':
dependencies:
'@vitest/spy': 4.1.0
estree-walker: 3.0.3
magic-string: 0.30.21
optionalDependencies:
msw: 2.4.8(typescript@5.6.3)
vite: 7.2.6(@types/node@20.19.25)(jiti@1.21.7)(yaml@2.7.0)
optional: true
'@vitest/pretty-format@3.2.4':
dependencies:
tinyrainbow: 2.0.0
@@ -9435,11 +9260,6 @@ snapshots:
dependencies:
tinyrainbow: 3.0.3
'@vitest/pretty-format@4.1.0':
dependencies:
tinyrainbow: 3.0.3
optional: true
'@vitest/runner@4.0.14':
dependencies:
'@vitest/utils': 4.0.14
@@ -9457,9 +9277,6 @@ snapshots:
'@vitest/spy@4.0.14': {}
'@vitest/spy@4.1.0':
optional: true
'@vitest/utils@3.2.4':
dependencies:
'@vitest/pretty-format': 3.2.4
@@ -9471,13 +9288,6 @@ snapshots:
'@vitest/pretty-format': 4.0.14
tinyrainbow: 3.0.3
'@vitest/utils@4.1.0':
dependencies:
'@vitest/pretty-format': 4.1.0
convert-source-map: 2.0.0
tinyrainbow: 3.0.3
optional: true
'@xterm/addon-canvas@0.7.0(@xterm/xterm@5.5.0)':
dependencies:
'@xterm/xterm': 5.5.0
@@ -12226,8 +12036,6 @@ snapshots:
react: 19.2.2
react-dom: 19.2.2(react@19.2.2)
mrmime@2.0.1: {}
ms@2.0.0: {}
ms@2.1.3: {}
@@ -12496,10 +12304,6 @@ snapshots:
pirates@4.0.7: {}
pixelmatch@7.1.0:
dependencies:
pngjs: 7.0.0
pkg-dir@4.2.0:
dependencies:
find-up: 4.1.0
@@ -12512,8 +12316,6 @@ snapshots:
optionalDependencies:
fsevents: 2.3.2
pngjs@7.0.0: {}
possible-typed-array-names@1.0.0: {}
postcss-import@15.1.0(postcss@8.5.6):
@@ -13169,12 +12971,6 @@ snapshots:
signal-exit@4.1.0: {}
sirv@3.0.2:
dependencies:
'@polka/url': 1.0.0-next.29
mrmime: 2.0.1
totalist: 3.0.1
sisteransi@1.0.5: {}
slash@3.0.0: {}
@@ -13468,8 +13264,6 @@ snapshots:
toposort@2.0.2: {}
totalist@3.0.1: {}
tough-cookie@4.1.4:
dependencies:
psl: 1.9.0
@@ -13763,7 +13557,7 @@ snapshots:
jiti: 1.21.7
yaml: 2.7.0
vitest@4.0.14(@types/node@20.19.25)(@vitest/browser-playwright@4.0.14)(jiti@1.21.7)(jsdom@27.2.0)(msw@2.4.8(typescript@5.6.3))(yaml@2.7.0):
vitest@4.0.14(@types/node@20.19.25)(jiti@1.21.7)(jsdom@27.2.0)(msw@2.4.8(typescript@5.6.3))(yaml@2.7.0):
dependencies:
'@vitest/expect': 4.0.14
'@vitest/mocker': 4.0.14(msw@2.4.8(typescript@5.6.3))(vite@7.2.6(@types/node@20.19.25)(jiti@1.21.7)(yaml@2.7.0))
@@ -13787,7 +13581,6 @@ snapshots:
why-is-node-running: 2.3.0
optionalDependencies:
'@types/node': 20.19.25
'@vitest/browser-playwright': 4.0.14(msw@2.4.8(typescript@5.6.3))(playwright@1.50.1)(vite@7.2.6(@types/node@20.19.25)(jiti@1.21.7)(yaml@2.7.0))(vitest@4.0.14)
jsdom: 27.2.0
transitivePeerDependencies:
- jiti
@@ -13914,8 +13707,6 @@ snapshots:
ws@8.18.3: {}
ws@8.19.0: {}
wsl-utils@0.1.0:
dependencies:
is-wsl: 3.1.1
-42
View File
@@ -402,7 +402,6 @@ export type DeploymentConfig = Readonly<{
const chatProviderConfigsPath = "/api/experimental/chats/providers";
const chatModelConfigsPath = "/api/experimental/chats/model-configs";
const mcpServerConfigsPath = "/api/experimental/mcp/servers";
type ChatCostDateParams = {
start_date?: string;
@@ -627,13 +626,6 @@ class ApiMethods {
return response.data;
};
getUser = async (userIdOrName: string): Promise<TypesGen.User> => {
const response = await this.axios.get<TypesGen.User>(
`/api/v2/users/${encodeURIComponent(userIdOrName)}`,
);
return response.data;
};
/**
* Get users for workspace owner selection. Requires
* permission to create workspaces for other users in the
@@ -3222,40 +3214,6 @@ class ApiMethods {
`${chatModelConfigsPath}/${encodeURIComponent(modelConfigId)}`,
);
};
getMCPServerConfigs = async (): Promise<TypesGen.MCPServerConfig[]> => {
const response =
await this.axios.get<TypesGen.MCPServerConfig[]>(mcpServerConfigsPath);
return response.data;
};
createMCPServerConfig = async (
req: TypesGen.CreateMCPServerConfigRequest,
): Promise<TypesGen.MCPServerConfig> => {
const response = await this.axios.post<TypesGen.MCPServerConfig>(
mcpServerConfigsPath,
req,
);
return response.data;
};
updateMCPServerConfig = async (
id: string,
req: TypesGen.UpdateMCPServerConfigRequest,
): Promise<TypesGen.MCPServerConfig> => {
const response = await this.axios.patch<TypesGen.MCPServerConfig>(
`${mcpServerConfigsPath}/${encodeURIComponent(id)}`,
req,
);
return response.data;
};
deleteMCPServerConfig = async (id: string): Promise<void> => {
await this.axios.delete(
`${mcpServerConfigsPath}/${encodeURIComponent(id)}`,
);
};
getAIBridgeModels = async (options: SearchParamOptions) => {
const url = getURLWithSearchParams("/api/v2/aibridge/models", options);
+3 -2
View File
@@ -318,9 +318,10 @@
"json_name": "store",
"go_name": "Store",
"type": "boolean",
"description": "Whether to store the response on OpenAI for later retrieval via the API and dashboard logs",
"description": "Whether to store the output for model distillation or evals",
"required": false,
"input_type": "select"
"input_type": "select",
"hidden": true
},
{
"json_name": "metadata",
-1
View File
@@ -73,7 +73,6 @@ const makeChat = (
id,
owner_id: "owner-1",
last_model_config_id: "model-1",
mcp_server_ids: [],
title: `Chat ${id}`,
status: "running",
created_at: "2025-01-01T00:00:00.000Z",
+3 -45
View File
@@ -139,7 +139,6 @@ export const infiniteChats = (opts?: { q?: string; archived?: boolean }) => {
});
},
refetchOnWindowFocus: true as const,
retry: 3,
} satisfies UseInfiniteQueryOptions<TypesGen.Chat[]>;
};
@@ -367,9 +366,9 @@ export const promoteChatQueuedMessage = (
) => ({
mutationFn: (queuedMessageId: number) =>
API.promoteChatQueuedMessage(chatId, queuedMessageId),
// No onSuccess invalidation needed: the caller upserts the
// promoted message from the response, and the per-chat
// WebSocket delivers queue and status updates in real-time.
// No onSuccess invalidation needed: the per-chat WebSocket
// delivers the promoted message, queue update, and status
// change in real-time.
});
export const chatDiffContentsKey = (chatId: string) =>
@@ -632,44 +631,3 @@ export const deleteChatUsageLimitGroupOverride = (
});
},
});
// ── MCP Server Configs ───────────────────────────────────────
const mcpServerConfigsKey = ["mcp-server-configs"] as const;
export const mcpServerConfigs = () => ({
queryKey: mcpServerConfigsKey,
queryFn: (): Promise<TypesGen.MCPServerConfig[]> => API.getMCPServerConfigs(),
});
const invalidateMCPServerConfigQueries = async (queryClient: QueryClient) => {
await queryClient.invalidateQueries({ queryKey: mcpServerConfigsKey });
};
export const createMCPServerConfig = (queryClient: QueryClient) => ({
mutationFn: (req: TypesGen.CreateMCPServerConfigRequest) =>
API.createMCPServerConfig(req),
onSuccess: async () => {
await invalidateMCPServerConfigQueries(queryClient);
},
});
type UpdateMCPServerConfigMutationArgs = {
id: string;
req: TypesGen.UpdateMCPServerConfigRequest;
};
export const updateMCPServerConfig = (queryClient: QueryClient) => ({
mutationFn: ({ id, req }: UpdateMCPServerConfigMutationArgs) =>
API.updateMCPServerConfig(id, req),
onSuccess: async () => {
await invalidateMCPServerConfigQueries(queryClient);
},
});
export const deleteMCPServerConfig = (queryClient: QueryClient) => ({
mutationFn: (id: string) => API.deleteMCPServerConfig(id),
onSuccess: async () => {
await invalidateMCPServerConfigQueries(queryClient);
},
});
-9
View File
@@ -33,15 +33,6 @@ export function usersKey(req: UsersRequest) {
return ["users", req] as const;
}
export const userByNameKey = (username: string) => ["user", username] as const;
export const userByName = (username: string): UseQueryOptions<User> => {
return {
queryKey: userByNameKey(username),
queryFn: () => API.getUser(username),
};
};
export function paginatedUsers(
searchParams: URLSearchParams,
): UsePaginatedQueryOptions<GetUsersResponse, UsersRequest> {
-114
View File
@@ -1070,7 +1070,6 @@ export interface Chat {
readonly created_at: string;
readonly updated_at: string;
readonly archived: boolean;
readonly mcp_server_ids: readonly string[];
}
// From codersdk/deployment.go
@@ -2077,7 +2076,6 @@ export interface ConvertLoginRequest {
export interface CreateChatMessageRequest {
readonly content: readonly ChatInputPart[];
readonly model_config_id?: string;
readonly mcp_server_ids?: string[];
}
// From codersdk/chats.go
@@ -2125,7 +2123,6 @@ export interface CreateChatRequest {
readonly content: readonly ChatInputPart[];
readonly workspace_id?: string;
readonly model_config_id?: string;
readonly mcp_server_ids?: readonly string[];
}
// From codersdk/users.go
@@ -2166,32 +2163,6 @@ export interface CreateGroupRequest {
readonly quota_allowance: number;
}
// From codersdk/mcp.go
/**
* CreateMCPServerConfigRequest is the request to create a new MCP server config.
*/
export interface CreateMCPServerConfigRequest {
readonly display_name: string;
readonly slug: string;
readonly description: string;
readonly icon_url: string;
readonly transport: string;
readonly url: string;
readonly auth_type: string;
readonly oauth2_client_id?: string;
readonly oauth2_client_secret?: string;
readonly oauth2_auth_url?: string;
readonly oauth2_token_url?: string;
readonly oauth2_scopes?: string;
readonly api_key_header?: string;
readonly api_key_value?: string;
readonly custom_headers?: Record<string, string>;
readonly tool_allow_list?: readonly string[];
readonly tool_deny_list?: readonly string[];
readonly availability: string;
readonly enabled: boolean;
}
// From codersdk/organizations.go
export interface CreateOrganizationRequest {
readonly name: string;
@@ -3721,51 +3692,6 @@ export interface LoginWithPasswordResponse {
readonly session_token: string;
}
// From codersdk/mcp.go
/**
* MCPServerConfig represents an admin-configured MCP server.
*/
export interface MCPServerConfig {
readonly id: string;
readonly display_name: string;
readonly slug: string;
readonly description: string;
readonly icon_url: string;
readonly transport: string; // "streamable_http" or "sse"
readonly url: string;
readonly auth_type: string; // "none", "oauth2", "api_key", "custom_headers"
/**
* OAuth2 fields (only populated for admins).
*/
readonly oauth2_client_id?: string;
readonly has_oauth2_secret: boolean;
readonly oauth2_auth_url?: string;
readonly oauth2_token_url?: string;
readonly oauth2_scopes?: string;
/**
* API key fields (only populated for admins).
*/
readonly api_key_header?: string;
readonly has_api_key: boolean;
readonly has_custom_headers: boolean;
/**
* Tool governance.
*/
readonly tool_allow_list: readonly string[];
readonly tool_deny_list: readonly string[];
/**
* Availability policy set by admin.
*/
readonly availability: string; // "force_on", "default_on", "default_off"
readonly enabled: boolean;
readonly created_at: string;
readonly updated_at: string;
/**
* Per-user state (populated for non-admin requests).
*/
readonly auth_connected: boolean;
}
// From codersdk/provisionerdaemons.go
/**
* MatchedProvisioners represents the number of provisioner daemons
@@ -4413,20 +4339,6 @@ export interface OIDCAuthMethod extends AuthMethod {
readonly iconUrl: string;
}
// From codersdk/users.go
/**
* OIDCClaimsResponse represents the merged OIDC claims for a user.
*/
export interface OIDCClaimsResponse {
/**
* Claims are the merged claims from the OIDC provider. These
* are the union of the ID token claims and the userinfo claims,
* where userinfo claims take precedence on conflict.
*/
// empty interface{} type, falling back to unknown
readonly claims: Record<string, unknown>;
}
// From codersdk/deployment.go
export interface OIDCConfig {
readonly allow_signups: boolean;
@@ -6918,32 +6830,6 @@ export interface UpdateInboxNotificationReadStatusResponse {
readonly unread_count: number;
}
// From codersdk/mcp.go
/**
* UpdateMCPServerConfigRequest is the request to update an MCP server config.
*/
export interface UpdateMCPServerConfigRequest {
readonly display_name?: string;
readonly slug?: string;
readonly description?: string;
readonly icon_url?: string;
readonly transport?: string;
readonly url?: string;
readonly auth_type?: string;
readonly oauth2_client_id?: string;
readonly oauth2_client_secret?: string;
readonly oauth2_auth_url?: string;
readonly oauth2_token_url?: string;
readonly oauth2_scopes?: string;
readonly api_key_header?: string;
readonly api_key_value?: string;
readonly custom_headers?: Record<string, string>;
readonly tool_allow_list?: string[];
readonly tool_deny_list?: string[];
readonly availability?: string;
readonly enabled?: boolean;
}
// From codersdk/notifications.go
export interface UpdateNotificationTemplateMethod {
readonly method?: string;
@@ -324,7 +324,6 @@ const ChatMessageInput = memo(
namespace: "ChatMessageInput",
theme: {
paragraph: "m-0",
inlineDecorator: "mx-1",
},
onError: (error: Error) => console.error("Lexical error:", error),
nodes: [FileReferenceNode],
@@ -30,7 +30,6 @@ export function FileReferenceChip({
isSelected,
onRemove,
onClick,
className: extraClassName,
}: {
fileName: string;
startLine: number;
@@ -38,7 +37,6 @@ export function FileReferenceChip({
isSelected?: boolean;
onRemove?: () => void;
onClick?: () => void;
className?: string;
}) {
const shortFile = fileName.split("/").pop() || fileName;
const lineLabel =
@@ -47,10 +45,9 @@ export function FileReferenceChip({
return (
<span
className={cn(
"inline-flex h-6 max-w-[300px] cursor-pointer select-none items-center gap-1.5 rounded-md border border-border-default bg-surface-primary px-1.5 align-middle text-xs text-content-primary shadow-sm transition-colors",
"inline-flex h-6 max-w-[300px] cursor-pointer select-none items-center gap-1.5 rounded-md border border-border-default bg-surface-secondary px-1.5 align-middle text-xs text-content-primary shadow-sm transition-colors",
isSelected &&
"border-content-link bg-content-link/10 ring-1 ring-content-link/40",
extraClassName,
)}
contentEditable={false}
title={`${fileName}:${lineLabel}`}
@@ -122,9 +119,8 @@ export class FileReferenceNode extends DecoratorNode<ReactNode> {
this.__content = content;
}
createDOM(config: EditorConfig): HTMLElement {
createDOM(_config: EditorConfig): HTMLElement {
const span = document.createElement("span");
span.className = config.theme.inlineDecorator ?? "";
span.style.display = "inline";
span.style.userSelect = "none";
return span;
@@ -1,5 +1,4 @@
import type { Meta, StoryObj } from "@storybook/react-vite";
import { expect, within } from "storybook/test";
import { Response } from "./response";
const sampleMarkdown = `
@@ -67,39 +66,3 @@ export const MarkdownAndLinksLight: Story = {
theme: "light",
},
};
// Verifies that JSX-like syntax in LLM output is preserved as
// escaped text rather than being swallowed by the HTML pipeline.
const jsxProseMarkdown = `
\`getLineAnnotations\` depends on \`activeCommentBox\` which could shift.
<RemoteDiffPanel
commentBox={commentBox}
scrollToFile={scrollTarget}
onScrollToFileComplete={handleScrollComplete}
/>
The props that might change on every \`RemoteDiffPanel\` re-render:
- \`isLoading\` only during refetch
- \`getLineAnnotations\` only when \`activeCommentBox\` changes
`;
export const JsxInProse: Story = {
args: {
children: jsxProseMarkdown,
},
play: async ({ canvasElement }) => {
const canvas = within(canvasElement);
// These strings live inside the <RemoteDiffPanel .../> JSX block.
// Without the rehype-raw fix they are silently eaten by the
// HTML sanitizer and never reach the DOM.
// The tag name itself is the token most likely to be consumed
// by HTML parsing, so assert it explicitly.
const tagName = await canvas.findByText(/<RemoteDiffPanel/);
expect(tagName).toBeInTheDocument();
const marker = await canvas.findByText(/scrollToFile=\{scrollTarget\}/);
expect(marker).toBeInTheDocument();
const marker2 = await canvas.findByText(/commentBox=\{commentBox\}/);
expect(marker2).toBeInTheDocument();
},
};
+1 -17
View File
@@ -5,12 +5,7 @@ import {
} from "@pierre/diffs/react";
import type { ComponentPropsWithRef, ReactNode } from "react";
import { useMemo } from "react";
import {
type Components,
defaultRehypePlugins,
Streamdown,
type UrlTransform,
} from "streamdown";
import { type Components, Streamdown, type UrlTransform } from "streamdown";
import { cn } from "utils/cn";
interface ResponseProps extends Omit<ComponentPropsWithRef<"div">, "children"> {
@@ -18,16 +13,6 @@ interface ResponseProps extends Omit<ComponentPropsWithRef<"div">, "children"> {
urlTransform?: UrlTransform;
}
// Omit rehype-raw so HTML-like syntax in LLM output is rendered as
// escaped text instead of being parsed by the HTML5 engine. Without
// this, JSX fragments such as <ComponentName prop={value} /> are
// consumed by rehype-raw and then stripped by rehype-sanitize,
// silently destroying content mid-stream.
const chatRehypePlugins = [
defaultRehypePlugins.sanitize,
defaultRehypePlugins.harden,
];
const fileViewerCSS =
"pre, [data-line], [data-diffs-header] { background-color: transparent !important; }";
@@ -256,7 +241,6 @@ export const Response = ({
controls={false}
components={components}
urlTransform={urlTransform}
rehypePlugins={chatRehypePlugins}
>
{children}
</Streamdown>
+2 -85
View File
@@ -150,91 +150,8 @@ export const formatResultOutput = (result: unknown): string | null => {
export const fileViewerCSS =
"pre, [data-line], [data-diffs-header] { background-color: transparent !important; }";
// Selection override CSS maps the library's gold/yellow selection
// palette to the Coder blue accent (`--content-link`) so line
// highlighting feels native to the rest of the page.
//
// The library has two selection code paths: context lines use
// `--diffs-bg-selection`, but change-addition/deletion lines
// use a separate `color-mix()` against `--diffs-line-bg`. To
// guarantee a uniform highlight across all line types we set
// the CSS variables for annotations AND apply direct rules
// with `!important` for line and gutter elements.
const SELECTION_OVERRIDE_CSS = [
// Variable overrides for annotation areas and library internals.
":host {",
" --diffs-bg-selection-override: hsl(var(--content-link) / 0.08);",
" --diffs-bg-selection-number-override: hsl(var(--content-link) / 0.13);",
" --diffs-selection-number-fg: hsl(var(--content-link));",
"}",
// Direct rules that override both context and change-line
// selection backgrounds so every selected line looks the same.
"[data-selected-line][data-line] {",
" background-color: hsl(var(--content-link) / 0.08) !important;",
"}",
"[data-selected-line][data-column-number] {",
" background-color: hsl(var(--content-link) / 0.13) !important;",
" color: hsl(var(--content-link)) !important;",
"}",
// Clear the selection tint from annotation rows so the inline
// prompt input stands out clearly against the selected lines.
"[data-line-annotation][data-selected-line] [data-annotation-content] {",
" background-color: transparent !important;",
"}",
"[data-line-annotation][data-selected-line]::before {",
" background-color: transparent !important;",
"}",
"[data-selected-line][data-gutter-buffer='annotation'] {",
" background-color: transparent !important;",
"}",
].join(" ");
// Restyled separators: quiet, full-width dividers that fade
// into the background instead of drawing attention.
const SEPARATOR_CSS = [
// Transparent backgrounds so separators blend with the
// code area rather than forming a distinct band.
":host {",
" --diffs-bg-separator-override: transparent;",
"}",
"[data-separator-content] {",
" border-radius: 0 !important;",
" background-color: transparent !important;",
"}",
"[data-separator-wrapper] {",
" border-radius: 0 !important;",
"}",
// Remove the inline padding that creates the inset pill look
// so separators span the full width of the diff.
"[data-unified] [data-separator='line-info'] [data-separator-wrapper] {",
" padding-inline: 0 !important;",
"}",
// The first separator in a file just says "N unmodified
// lines" before the first hunk — that's obvious context
// that adds no value, so hide it entirely.
"[data-separator='line-info'][data-separator-first] {",
" display: none !important;",
"}",
// Thin single border and muted text so collapsed-line
// indicators read as a quiet hint, not a landmark.
"[data-separator='line-info'] {",
" height: 28px !important;",
" border-top: 1px solid hsl(var(--border-default));",
" border-bottom: 1px solid hsl(var(--border-default));",
"}",
"[data-separator-content] {",
" font-size: 11px !important;",
" color: hsl(var(--content-secondary)) !important;",
" opacity: 0.8;",
"}",
].join(" ");
export const diffViewerCSS = [
"pre, [data-line]:not([data-selected-line]), [data-diffs-header] { background-color: transparent !important; }",
"[data-diffs-header] { border-left: 1px solid var(--border); }",
SELECTION_OVERRIDE_CSS,
SEPARATOR_CSS,
].join(" ");
export const diffViewerCSS =
"pre, [data-line], [data-diffs-header] { background-color: transparent !important; } [data-diffs-header] { border-left: 1px solid var(--border); }";
// Theme-aware option factories shared across tool renderers.
export function getDiffViewerOptions(isDark: boolean) {
+33 -12
View File
@@ -7,9 +7,9 @@ import {
createContext,
type FC,
type PropsWithChildren,
useCallback,
useContext,
useEffect,
useMemo,
useState,
} from "react";
import { useQuery } from "react-query";
@@ -95,6 +95,11 @@ export const ProxyProvider: FC<PropsWithChildren> = ({ children }) => {
// proxy.
const [userSavedProxy, setUserSavedProxy] = useState(loadUserSelectedProxy());
// Load the initial state from local storage.
const [proxy, setProxy] = useState<PreferredProxy>(
computeUsableURLS(userSavedProxy),
);
const { permissions } = useAuthenticated();
const { metadata } = useEmbeddedMetadata();
@@ -126,30 +131,43 @@ export const ProxyProvider: FC<PropsWithChildren> = ({ children }) => {
loaded: latenciesLoaded,
} = useProxyLatency(proxiesResp);
const proxy = useMemo(
() =>
// updateProxy is a helper function that when called will
// update the proxy being used.
const updateProxy = useCallback(() => {
// Update the saved user proxy for the caller.
setUserSavedProxy(loadUserSelectedProxy());
setProxy(
getPreferredProxy(
proxiesResp ?? [],
userSavedProxy,
loadUserSelectedProxy(),
proxyLatencies,
// Do not auto select based on latencies, as inconsistent
// latencies can cause this to change on each call. The proxy
// value should be stable to prevent flickering.
// Do not auto select based on latencies, as inconsistent latencies can cause this
// to change on each call. updateProxy should be stable when selecting a proxy to
// prevent flickering.
false,
),
[proxiesResp, userSavedProxy, proxyLatencies],
);
);
}, [proxiesResp, proxyLatencies]);
// This useEffect ensures the proxy to be used is updated whenever the state changes.
// This includes proxies being loaded, latencies being calculated, and the user selecting a proxy.
// biome-ignore lint/correctness/useExhaustiveDependencies: Only update if the source data changes
useEffect(() => {
updateProxy();
}, [proxiesResp, proxyLatencies]);
// This useEffect will auto select the best proxy if the user has not selected one.
// It must wait until all latencies are loaded to select based on latency. This does mean
// the first time a user loads the page, the proxy will "flicker" to the best proxy.
//
// Once the page is loaded, or the user selects a proxy, this will not run again.
// biome-ignore lint/correctness/useExhaustiveDependencies: Only update if the source data changes
useEffect(() => {
if (loadUserSelectedProxy() !== undefined) {
return; // User has selected a proxy, do not auto select.
}
if (!latenciesLoaded) {
// Wait until the latencies are loaded first.
return;
}
@@ -162,7 +180,7 @@ export const ProxyProvider: FC<PropsWithChildren> = ({ children }) => {
if (best?.proxy) {
saveUserSelectedProxy(best.proxy);
setUserSavedProxy(best.proxy);
updateProxy();
}
}, [latenciesLoaded, proxiesResp, proxyLatencies]);
@@ -181,12 +199,15 @@ export const ProxyProvider: FC<PropsWithChildren> = ({ children }) => {
// These functions are exposed to allow the user to select a proxy.
setProxy: (proxy: Region) => {
// Save to local storage to persist the user's preference across reloads
saveUserSelectedProxy(proxy);
setUserSavedProxy(proxy);
// Update the selected proxy
updateProxy();
},
clearProxy: () => {
// Clear the user's selection from local storage.
clearUserSelectedProxy();
setUserSavedProxy(undefined);
updateProxy();
},
}}
>
+7 -2
View File
@@ -21,9 +21,14 @@ export const useWebpushNotifications = (): WebpushNotifications => {
const [subscribed, setSubscribed] = useState<boolean>(false);
const [loading, setLoading] = useState<boolean>(true);
const enabled = enabledExperimentsQuery.data?.includes("web-push") ?? false;
const [enabled, setEnabled] = useState<boolean>(false);
useEffect(() => {
// Check if the experiment is enabled.
if (enabledExperimentsQuery.data?.includes("web-push")) {
setEnabled(true);
}
// Check if browser supports push notifications
if (!("Notification" in window) || !("serviceWorker" in navigator)) {
setSubscribed(false);
@@ -45,7 +50,7 @@ export const useWebpushNotifications = (): WebpushNotifications => {
};
checkSubscription();
}, []);
}, [enabledExperimentsQuery.data]);
const subscribe = async (): Promise<void> => {
try {
@@ -20,7 +20,9 @@ export function useSyncFormParameters({
// Keep track of form values in a ref to avoid unnecessary updates to rich_parameter_values
const formValuesRef = useRef(formValues);
formValuesRef.current = formValues;
useEffect(() => {
formValuesRef.current = formValues;
}, [formValues]);
useEffect(() => {
if (!parameters) return;
@@ -1,6 +1,4 @@
import type { Meta, StoryObj } from "@storybook/react-vite";
import type { ChatMessageInputRef } from "components/ChatMessageInput/ChatMessageInput";
import { useEffect, useRef } from "react";
import { expect, fn, userEvent, waitFor, within } from "storybook/test";
import { AgentChatInput, type UploadState } from "./AgentChatInput";
@@ -262,72 +260,6 @@ export const WithAttachmentError: Story = {
})(),
};
/** File reference chip rendered inline with text in the editor. */
export const WithFileReference: Story = {
render: (args) => {
const ref = useRef<ChatMessageInputRef>(null);
useEffect(() => {
const handle = ref.current;
if (!handle) return;
handle.addFileReference({
fileName: "site/src/components/Button.tsx",
startLine: 42,
endLine: 42,
content: "export const Button = ...",
});
}, []);
return <AgentChatInput {...args} inputRef={ref} />;
},
args: {
initialValue: "Can you refactor ",
},
play: async ({ canvasElement }) => {
const canvas = within(canvasElement);
await waitFor(() => {
expect(canvas.getByText(/Button\.tsx/)).toBeInTheDocument();
});
},
};
/** Multiple file reference chips rendered inline with text. */
export const WithMultipleFileReferences: Story = {
render: (args) => {
const ref = useRef<ChatMessageInputRef>(null);
useEffect(() => {
const handle = ref.current;
if (!handle) return;
handle.addFileReference({
fileName: "api/handler.go",
startLine: 1,
endLine: 50,
content: "...",
});
handle.insertText(" and ");
handle.addFileReference({
fileName: "api/handler_test.go",
startLine: 10,
endLine: 30,
content: "...",
});
}, []);
return <AgentChatInput {...args} inputRef={ref} />;
},
args: {
initialValue: "Compare ",
},
play: async ({ canvasElement }) => {
const canvas = within(canvasElement);
await waitFor(() => {
expect(canvas.getByText(/handler\.go/)).toBeInTheDocument();
expect(canvas.getByText(/handler_test\.go/)).toBeInTheDocument();
});
},
};
export const AttachmentsOnly: Story = {
args: (() => {
const file = createMockFile("photo.png", "image/png");
+7 -11
View File
@@ -469,17 +469,13 @@ export const AgentChatInput = memo<AgentChatInputProps>(
// Re-focus the editor after a send completes (isLoading goes
// from true → false) so the user can immediately type again.
// Uses the "store previous value in state" pattern recommended
// by React for responding to prop changes during render.
const [prevIsLoading, setPrevIsLoading] = useState(isLoading);
if (prevIsLoading !== isLoading) {
setPrevIsLoading(isLoading);
if (prevIsLoading && !isLoading) {
if (!isMobileViewport()) {
internalRef.current?.focus();
}
const prevIsLoadingRef = useRef(isLoading);
useEffect(() => {
if (prevIsLoadingRef.current && !isLoading && !isMobileViewport()) {
internalRef.current?.focus();
}
}
prevIsLoadingRef.current = isLoading;
}, [isLoading]);
const isUploading = attachments.some(
(f) => uploadStates?.get(f)?.status === "uploading",
@@ -642,7 +638,7 @@ export const AgentChatInput = memo<AgentChatInputProps>(
<PencilIcon className="h-3.5 w-3.5" />
{isLoading
? "Saving edit..."
: "Editing will delete all subsequent messages and restart the conversation here."}
: "Editing message \u2014 all subsequent messages will be deleted"}
</span>
<Button
type="button"
+5 -13
View File
@@ -35,7 +35,6 @@ import { AgentChatInput } from "./AgentChatInput";
import {
getModelCatalogStatusMessage,
getModelSelectorPlaceholder,
getNormalizedModelRef,
hasConfiguredModelsInCatalog,
} from "./modelOptions";
import { formatUsageLimitMessage, isUsageLimitData } from "./usageLimitMessage";
@@ -159,7 +158,8 @@ export const AgentCreateForm: FC<AgentCreateFormProps> = ({
const byConfigID = new Map<string, string>();
for (const config of modelConfigs) {
const { provider, model } = getNormalizedModelRef(config);
const provider = config.provider.trim().toLowerCase();
const model = config.model.trim();
if (!provider || !model) {
continue;
}
@@ -245,14 +245,6 @@ export const AgentCreateForm: FC<AgentCreateFormProps> = ({
lastUsedModelID,
]);
// Keep a mutable ref to selectedWorkspaceId and selectedModel so
// that the onSend callback always sees the latest values without
// the shared input component re-rendering on every change.
const selectedWorkspaceIdRef = useRef(selectedWorkspaceId);
selectedWorkspaceIdRef.current = selectedWorkspaceId;
const selectedModelRef = useRef(selectedModel);
selectedModelRef.current = selectedModel;
const handleWorkspaceChange = (value: string) => {
if (value === autoCreateWorkspaceValue) {
setSelectedWorkspaceId(null);
@@ -278,15 +270,15 @@ export const AgentCreateForm: FC<AgentCreateFormProps> = ({
await onCreateChat({
message,
fileIDs,
workspaceId: selectedWorkspaceIdRef.current ?? undefined,
model: selectedModelRef.current || undefined,
workspaceId: selectedWorkspaceId ?? undefined,
model: selectedModel || undefined,
}).catch(() => {
// Re-enable draft persistence so the user can edit
// and retry after a failed send attempt.
resetDraft();
});
},
[submitDraft, resetDraft, onCreateChat],
[submitDraft, resetDraft, onCreateChat, selectedWorkspaceId, selectedModel],
);
const selectedWorkspace = selectedWorkspaceId
@@ -44,10 +44,7 @@ const AgentDetailLayout: FC = () => {
setChatErrorReason: () => {},
clearChatErrorReason: () => {},
requestArchiveAgent: () => {},
requestArchiveAndDeleteWorkspace: (
_chatId: string,
_workspaceId: string,
) => {},
requestArchiveAndDeleteWorkspace: () => {},
requestUnarchiveAgent: () => {},
isSidebarCollapsed: false,
onToggleSidebarCollapsed: () => {},
@@ -109,7 +106,6 @@ const baseChatFields = {
owner_id: "owner-id",
workspace_id: mockWorkspace.id,
last_model_config_id: "model-config-1",
mcp_server_ids: [],
created_at: "2026-02-18T00:00:00.000Z",
updated_at: "2026-02-18T00:00:00.000Z",
archived: false,
@@ -211,9 +207,7 @@ const meta: Meta<typeof AgentDetailLayout> = {
}),
},
beforeEach: () => {
localStorage.removeItem(RIGHT_PANEL_OPEN_KEY);
spyOn(API, "getApiKey").mockRejectedValue(new Error("missing API key"));
return () => localStorage.removeItem(RIGHT_PANEL_OPEN_KEY);
},
};
@@ -590,6 +584,22 @@ export const CompletedWithDiffPanel: Story = {
},
};
/** Right panel stays closed when no diff-status URL exists. */
export const NoDiffUrl: Story = {
parameters: {
queries: buildQueries(
{
id: CHAT_ID,
...baseChatFields,
title: "No diff yet",
status: "completed",
},
{ messages: [], queued_messages: [], has_more: false },
{ diffUrl: undefined },
),
},
};
/** Subagent tool-call/result messages render subagent cards. */
export const WithSubagentCards: Story = {
parameters: {
+3 -7
View File
@@ -57,7 +57,6 @@ import {
getModelCatalogStatusMessage,
getModelOptionsFromCatalog,
getModelSelectorPlaceholder,
getNormalizedModelRef,
hasConfiguredModelsInCatalog,
} from "./modelOptions";
import { parsePullRequestUrl } from "./pullRequest";
@@ -396,7 +395,8 @@ const AgentDetail: FC = () => {
const modelConfigIDByModelID = useMemo(() => {
const byModelID = new Map<string, string>();
for (const config of chatModelConfigsQuery.data ?? []) {
const { provider, model } = getNormalizedModelRef(config);
const provider = config.provider.trim().toLowerCase();
const model = config.model.trim();
if (!provider || !model) {
continue;
}
@@ -709,11 +709,7 @@ const AgentDetail: FC = () => {
store.clearStreamError();
store.setChatStatus("pending");
try {
const promotedMessage = await promoteQueuedMutation.mutateAsync(id);
// Insert the promoted message into the store immediately
// so it appears in the timeline without waiting for the
// WebSocket to deliver it.
store.upsertDurableMessage(promotedMessage);
await promoteQueuedMutation.mutateAsync(id);
} catch (error) {
store.setQueuedMessages(previousQueuedMessages);
store.setChatStatus(previousChatStatus);
@@ -183,7 +183,6 @@ const makeChat = (chatID: string): TypesGen.Chat => ({
id: chatID,
owner_id: "owner-1",
last_model_config_id: "model-1",
mcp_server_ids: [],
title: "test",
status: "running",
created_at: "2025-01-01T00:00:00.000Z",
@@ -2339,222 +2338,6 @@ describe("useChatStore", () => {
expect(result.current.orderedMessageIDs).toEqual([1]);
});
});
it("does not wipe WebSocket-delivered message when queue_update triggers cache change", async () => {
immediateAnimationFrame();
const chatID = "chat-queue-promote";
const msg1 = makeMessage(chatID, 1, "user", "hello");
const msg2 = makeMessage(chatID, 2, "assistant", "hi");
// The promoted message that will arrive via WebSocket.
const promotedMsg = makeMessage(chatID, 3, "user", "follow-up");
const mockSocket = createMockSocket();
vi.mocked(watchChat).mockReturnValue(mockSocket as never);
const queryClient = createTestQueryClient();
const wrapper: FC<PropsWithChildren> = ({ children }) => (
<QueryClientProvider client={queryClient}>{children}</QueryClientProvider>
);
const setChatErrorReason = vi.fn();
const clearChatErrorReason = vi.fn();
const queuedMsg = makeQueuedMessage(chatID, 10, "follow-up");
const initialMessages = [msg1, msg2];
const initialOptions = {
chatID,
chatMessages: initialMessages,
chatRecord: makeChat(chatID),
chatMessagesData: {
messages: initialMessages,
queued_messages: [queuedMsg],
has_more: false,
},
chatQueuedMessages: [queuedMsg],
setChatErrorReason,
clearChatErrorReason,
};
const { result, rerender } = renderHook(
(options: Parameters<typeof useChatStore>[0]) => {
const { store } = useChatStore(options);
return {
orderedMessageIDs: useChatSelector(store, selectOrderedMessageIDs),
queuedMessages: useChatSelector(store, selectQueuedMessages),
};
},
{ initialProps: initialOptions, wrapper },
);
await waitFor(() => {
expect(result.current.orderedMessageIDs).toEqual([1, 2]);
expect(result.current.queuedMessages).toHaveLength(1);
});
// Simulate the WebSocket delivering the promoted message
// followed by a queue_update in the same batch (as the server
// does when auto-promoting or when the promote endpoint runs).
act(() => {
mockSocket.emitOpen();
});
act(() => {
mockSocket.emitDataBatch([
{
type: "message",
chat_id: chatID,
message: promotedMsg,
},
{
type: "queue_update",
chat_id: chatID,
queued_messages: [],
},
]);
});
// The promoted message should appear in the store and the
// queue should be empty. Before the fix, the queue_update
// caused updateChatQueuedMessages to mutate the React Query
// cache, giving chatMessages a new reference that triggered
// the sync effect. The effect detected the promoted message
// as a "stale entry" (present in store but not in the REST
// data) and called replaceMessages, wiping it.
//
// Now re-render so the updated query cache flows through
// the chatMessages prop (simulating React rerender after
// the query cache mutation).
rerender({
...initialOptions,
// chatMessages still comes from REST (not refetched), so
// it only has [msg1, msg2]. The promoted message lives
// only in the store via the WebSocket delivery.
//
// Spread into a new array to simulate what actually
// happens: updateChatQueuedMessages mutates the React
// Query cache (changing queued_messages), which gives
// chatMessagesQuery.data a new reference, causing the
// chatMessagesList useMemo to return a new array with
// the same elements. The new reference triggers the
// sync effect.
chatMessages: [...initialMessages],
chatMessagesData: {
messages: [...initialMessages],
queued_messages: [],
has_more: false,
},
chatQueuedMessages: [],
});
await waitFor(() => {
expect(result.current.orderedMessageIDs).toEqual([1, 2, 3]);
expect(result.current.queuedMessages).toHaveLength(0);
});
});
it("does not wipe in-progress stream state when user message arrives in batch", async () => {
immediateAnimationFrame();
const chatID = "chat-promote-stream";
const msg1 = makeMessage(chatID, 1, "user", "hello");
const msg2 = makeMessage(chatID, 2, "assistant", "hi");
const mockSocket = createMockSocket();
vi.mocked(watchChat).mockReturnValue(mockSocket as never);
const queryClient = createTestQueryClient();
const wrapper: FC<PropsWithChildren> = ({ children }) => (
<QueryClientProvider client={queryClient}>{children}</QueryClientProvider>
);
const setChatErrorReason = vi.fn();
const clearChatErrorReason = vi.fn();
const queuedMsg = makeQueuedMessage(chatID, 10, "follow-up");
const initialMessages = [msg1, msg2];
const initialOptions = {
chatID,
chatMessages: initialMessages,
chatRecord: makeChat(chatID),
chatMessagesData: {
messages: initialMessages,
queued_messages: [queuedMsg],
has_more: false,
},
chatQueuedMessages: [queuedMsg],
setChatErrorReason,
clearChatErrorReason,
};
const { result } = renderHook(
(options: Parameters<typeof useChatStore>[0]) => {
const { store } = useChatStore(options);
return {
store,
streamState: useChatSelector(store, selectStreamState),
chatStatus: useChatSelector(store, selectChatStatus),
orderedMessageIDs: useChatSelector(store, selectOrderedMessageIDs),
};
},
{ initialProps: initialOptions, wrapper },
);
await waitFor(() => {
expect(result.current.orderedMessageIDs).toEqual([1, 2]);
});
// Open the WebSocket and set the chat to running.
act(() => {
mockSocket.emitOpen();
});
act(() => {
mockSocket.emitData({
type: "status",
chat_id: chatID,
status: { status: "running" },
});
});
await waitFor(() => {
expect(result.current.chatStatus).toBe("running");
});
// Deliver a batch containing trailing message_parts for
// the current response followed by the promoted user
// message. The batch loop flushes pending parts when it
// hits the message event (building stream state). Before
// the fix, scheduleStreamReset would fire for the user
// message because it only checked `changed`, and with
// immediateAnimationFrame the RAF fires synchronously,
// wiping the stream state that was just built.
const promotedUser = makeMessage(chatID, 3, "user", "follow-up");
act(() => {
mockSocket.emitDataBatch([
{
type: "message_part",
chat_id: chatID,
message_part: {
part: { type: "text", text: "I am helping you" },
},
},
{
type: "message",
chat_id: chatID,
message: promotedUser,
},
]);
});
// The stream state must survive: the promoted user message
// should not wipe the in-progress assistant stream.
await waitFor(() => {
expect(result.current.orderedMessageIDs).toContain(3);
expect(result.current.streamState).not.toBeNull();
const blocks = result.current.streamState?.blocks ?? [];
const textBlock = blocks.find((b) => b.type === "response");
expect(textBlock).toBeDefined();
});
});
});
describe("updateSidebarChat via stream events", () => {
@@ -436,7 +436,10 @@ export const useChatStore = (
} = options;
const queryClient = useQueryClient();
const storeRef = useRef<ChatStore>(createChatStore());
const storeRef = useRef<ChatStore | null>(null);
if (storeRef.current === null) {
storeRef.current = createChatStore();
}
const streamResetFrameRef = useRef<number | null>(null);
const queuedMessagesHydratedChatIDRef = useRef<string | null>(null);
// Tracks whether the WebSocket has delivered a queue_update for the
@@ -448,18 +451,8 @@ export const useChatStore = (
const wsQueueUpdateReceivedRef = useRef(false);
const activeChatIDRef = useRef<string | null>(null);
const prevChatIDRef = useRef<string | undefined>(chatID);
// Snapshot of the chatMessages elements from the last sync effect
// run. Used to detect whether chatMessages actually changed (e.g.
// after a refetch producing new objects) vs. just getting a new
// array reference because an unrelated field like queued_messages
// was updated in the query cache. Element-level reference
// comparison works because useMemo(flatMap) preserves message
// object references when only non-message fields change in the
// page, while a genuine refetch returns new objects from the
// server.
const lastSyncedMessagesRef = useRef<readonly TypesGen.ChatMessage[]>([]);
const store = storeRef.current;
const store = storeRef.current!;
// Compute the last REST-fetched message ID so the stream can
// skip messages the client already has. We use a ref so the
@@ -467,10 +460,12 @@ export const useChatStore = (
// chatMessages in its dependency array (which would cause
// unnecessary reconnections).
const lastMessageIdRef = useRef<number | undefined>(undefined);
lastMessageIdRef.current =
chatMessages && chatMessages.length > 0
? chatMessages[chatMessages.length - 1].id
: undefined;
useEffect(() => {
lastMessageIdRef.current =
chatMessages && chatMessages.length > 0
? chatMessages[chatMessages.length - 1].id
: undefined;
}, [chatMessages]);
// True once the initial REST page has resolved for the current
// chat. The WebSocket effect gates on this so that
@@ -554,7 +549,6 @@ export const useChatStore = (
// the new chat's query resolves.
if (prevChatIDRef.current !== chatID) {
prevChatIDRef.current = chatID;
lastSyncedMessagesRef.current = [];
store.replaceMessages([]);
}
// Merge REST-fetched messages into the store one-by-one instead
@@ -565,25 +559,12 @@ export const useChatStore = (
// However, if the fetched set is missing message IDs the store
// already has (e.g. after an edit truncation), a full replace
// is needed because upsert can only add/update, not remove.
// We must only do this when the fetched messages actually
// changed (new elements from a refetch), not when an
// unrelated field like queued_messages caused the query
// data reference to update. Without this guard, a
// queue_update WebSocket event would trigger
// replaceMessages with the stale REST data, wiping any
// message the WebSocket just delivered.
if (chatMessages) {
const prev = lastSyncedMessagesRef.current;
const contentChanged =
chatMessages.length !== prev.length ||
chatMessages.some((m, i) => m !== prev[i]);
lastSyncedMessagesRef.current = chatMessages;
const storeSnap = store.getSnapshot();
const fetchedIDs = new Set(chatMessages.map((m) => m.id));
const hasStaleEntries =
contentChanged &&
storeSnap.orderedMessageIDs.some((id) => !fetchedIDs.has(id));
const storeSnap = store.getSnapshot();
const hasStaleEntries = storeSnap.orderedMessageIDs.some(
(id) => !fetchedIDs.has(id),
);
if (hasStaleEntries) {
store.replaceMessages(chatMessages);
} else {
@@ -725,7 +706,7 @@ export const useChatStore = (
) {
lastMessageIdRef.current = message.id;
}
if (changed && message.role === "assistant") {
if (changed) {
scheduleStreamReset();
}
// Do not update updated_at here. The global
@@ -366,7 +366,6 @@ const ChatMessageItem = memo<{
fileName={block.file_name}
startLine={block.start_line}
endLine={block.end_line}
className="mx-1"
/>
),
)

Some files were not shown because too many files have changed in this diff Show More