Compare commits

...

12 Commits

Author SHA1 Message Date
gcp-cherry-pick-bot[bot] 0ead64f264 chore: add image styles for kiro.svg (cherry-pick #18889) (#18890)
Co-authored-by: ケイラ <mckayla@hey.com>
2025-07-16 01:16:21 +05:00
gcp-cherry-pick-bot[bot] 51e60b74d1 fix: exclude prebuilt workspaces from lifecycle executor (cherry-pick #18762) (#18858)
Co-authored-by: Susana Ferreira <susana@coder.com>
Co-authored-by: Stephen Kirby <58410745+stirby@users.noreply.github.com>
Fixes: https://github.com/coder/coder/issues/18740
2025-07-15 14:47:05 -05:00
gcp-cherry-pick-bot[bot] 33885afbff chore: add kiro: protocol to external app whitelist (cherry-pick #18884) (#18886)
Co-authored-by: blink-so[bot]
Co-authored-by: matifali <10648092+matifali@users.noreply.github.com>
Co-authored-by: blink-so[bot] <211532188+blink-so[bot]@users.noreply.github.com>
2025-07-15 23:48:58 +05:00
gcp-cherry-pick-bot[bot] 3c602b0e29 chore: add kiro icon (cherry-pick #18881) (#18885)
Co-authored-by: Atif Ali <atif@coder.com>
2025-07-15 23:41:58 +05:00
Dean Sheather 5096582dda cherry: feat: sign coder binaries with the release key using GPG (#18774) (#18868)
(cherry picked from commit dc0919da33)

Co-authored-by: Jakub Domeracki <jakub@coder.com>
2025-07-15 18:23:07 +10:00
gcp-cherry-pick-bot[bot] d027a3f51b chore: add rdp icon (cherry-pick #18736) (#18737)
Cherry-picked chore: add rdp icon (#18736)

Co-authored-by: Atif Ali <atif@coder.com>
2025-07-03 12:02:35 +05:00
gcp-cherry-pick-bot[bot] f97bd76bb5 fix: handle task sidebar app health check disabled correctly (cherry-pick #18687) (#18726)
Co-authored-by: Hugo Dutka <hugo@coder.com>
2025-07-02 13:14:55 -05:00
gcp-cherry-pick-bot[bot] 5059c23b43 fix: handle null response from the template presets endpoint (cherry-pick #18723) (#18724)
Co-authored-by: Hugo Dutka <hugo@coder.com>
2025-07-02 12:47:50 -05:00
Stephen Kirby e5a74a775d chore: pull in cherry picks for v2.24 (#18674)
Co-authored-by: Danny Kopping <danny@coder.com>
Co-authored-by: Steven Masley <Emyrk@users.noreply.github.com>
Co-authored-by: Bruno Quaresma <bruno@coder.com>
Co-authored-by: Sas Swart <sas.swart.cdk@gmail.com>
Co-authored-by: Susana Ferreira <susana@coder.com>
Co-authored-by: Danielle Maywood <danielle@themaywoods.com>
Co-authored-by: Mathias Fredriksson <mafredri@gmail.com>
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
Co-authored-by: Asher <ash@coder.com>
Co-authored-by: Hugo Dutka <hugo@coder.com>
2025-07-01 14:33:21 -05:00
gcp-cherry-pick-bot[bot] de494d0a49 feat: add Coder registry links to template creation and editing (cherry-pick #18680) (#18697)
Co-authored-by: Claude <noreply@anthropic.com>
Co-authored-by: Garrett Delfosse <garrett@coder.com>
2025-07-01 23:15:30 +05:00
gcp-cherry-pick-bot[bot] 774792476c fix: stop tearing down non-TTY processes on SSH session end (cherry-pick #18673) (#18676)
Cherry-picked fix: stop tearing down non-TTY processes on SSH session
end (#18673)

(possibly temporary) fix for #18519

Matches OpenSSH for non-tty sessions, where we don't actively terminate
the process.

Adds explicit tracking to the SSH server for these processes so that if
we are shutting down we terminate them: this ensures that we can shut
down quickly to allow shutdown scripts to run. It also ensures our tests
don't leak system resources.

Co-authored-by: Spike Curtis <spike@coder.com>
2025-06-30 23:09:37 +04:00
gcp-cherry-pick-bot[bot] 4a61bbeae4 chore: bump github.com/go-viper/mapstructure/v2 from 2.2.1 to 2.3.0 (cherry-pick #18647) (#18649)
Co-authored-by: dependabot[bot]
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-06-27 22:59:21 +05:00
137 changed files with 3628 additions and 8930 deletions
+2
View File
@@ -1259,6 +1259,8 @@ jobs:
# do (see above).
CODER_SIGN_WINDOWS: "1"
CODER_WINDOWS_RESOURCES: "1"
CODER_SIGN_GPG: "1"
CODER_GPG_RELEASE_KEY_BASE64: ${{ secrets.GPG_RELEASE_KEY_BASE64 }}
EV_KEY: ${{ secrets.EV_KEY }}
EV_KEYSTORE: ${{ secrets.EV_KEYSTORE }}
EV_TSA_URL: ${{ secrets.EV_TSA_URL }}
+2
View File
@@ -323,6 +323,8 @@ jobs:
env:
CODER_SIGN_WINDOWS: "1"
CODER_SIGN_DARWIN: "1"
CODER_SIGN_GPG: "1"
CODER_GPG_RELEASE_KEY_BASE64: ${{ secrets.GPG_RELEASE_KEY_BASE64 }}
CODER_WINDOWS_RESOURCES: "1"
AC_CERTIFICATE_FILE: /tmp/apple_cert.p12
AC_CERTIFICATE_PASSWORD_FILE: /tmp/apple_cert_password.txt
+4
View File
@@ -252,6 +252,10 @@ $(CODER_ALL_BINARIES): go.mod go.sum \
fi
cp "$@" "./site/out/bin/coder-$$os-$$arch$$dot_ext"
if [[ "$${CODER_SIGN_GPG:-0}" == "1" ]]; then
cp "$@.asc" "./site/out/bin/coder-$$os-$$arch$$dot_ext.asc"
fi
fi
# This task builds Coder Desktop dylibs
+92 -28
View File
@@ -91,6 +91,7 @@ type Options struct {
Execer agentexec.Execer
Devcontainers bool
DevcontainerAPIOptions []agentcontainers.Option // Enable Devcontainers for these to be effective.
Clock quartz.Clock
}
type Client interface {
@@ -144,6 +145,9 @@ func New(options Options) Agent {
if options.PortCacheDuration == 0 {
options.PortCacheDuration = 1 * time.Second
}
if options.Clock == nil {
options.Clock = quartz.NewReal()
}
prometheusRegistry := options.PrometheusRegistry
if prometheusRegistry == nil {
@@ -157,6 +161,7 @@ func New(options Options) Agent {
hardCtx, hardCancel := context.WithCancel(context.Background())
gracefulCtx, gracefulCancel := context.WithCancel(hardCtx)
a := &agent{
clock: options.Clock,
tailnetListenPort: options.TailnetListenPort,
reconnectingPTYTimeout: options.ReconnectingPTYTimeout,
logger: options.Logger,
@@ -204,6 +209,7 @@ func New(options Options) Agent {
}
type agent struct {
clock quartz.Clock
logger slog.Logger
client Client
exchangeToken func(ctx context.Context) (string, error)
@@ -273,7 +279,7 @@ type agent struct {
devcontainers bool
containerAPIOptions []agentcontainers.Option
containerAPI atomic.Pointer[agentcontainers.API] // Set by apiHandler.
containerAPI *agentcontainers.API
}
func (a *agent) TailnetConn() *tailnet.Conn {
@@ -330,6 +336,19 @@ func (a *agent) init() {
// will not report anywhere.
a.scriptRunner.RegisterMetrics(a.prometheusRegistry)
if a.devcontainers {
containerAPIOpts := []agentcontainers.Option{
agentcontainers.WithExecer(a.execer),
agentcontainers.WithCommandEnv(a.sshServer.CommandEnv),
agentcontainers.WithScriptLogger(func(logSourceID uuid.UUID) agentcontainers.ScriptLogger {
return a.logSender.GetScriptLogger(logSourceID)
}),
}
containerAPIOpts = append(containerAPIOpts, a.containerAPIOptions...)
a.containerAPI = agentcontainers.NewAPI(a.logger.Named("containers"), containerAPIOpts...)
}
a.reconnectingPTYServer = reconnectingpty.NewServer(
a.logger.Named("reconnecting-pty"),
a.sshServer,
@@ -1141,17 +1160,27 @@ func (a *agent) handleManifest(manifestOK *checkpoint) func(ctx context.Context,
}
var (
scripts = manifest.Scripts
scriptRunnerOpts []agentscripts.InitOption
scripts = manifest.Scripts
devcontainerScripts map[uuid.UUID]codersdk.WorkspaceAgentScript
)
if a.devcontainers {
var dcScripts []codersdk.WorkspaceAgentScript
scripts, dcScripts = agentcontainers.ExtractAndInitializeDevcontainerScripts(manifest.Devcontainers, scripts)
// See ExtractAndInitializeDevcontainerScripts for motivation
// behind running dcScripts as post start scripts.
scriptRunnerOpts = append(scriptRunnerOpts, agentscripts.WithPostStartScripts(dcScripts...))
if a.containerAPI != nil {
// Init the container API with the manifest and client so that
// we can start accepting requests. The final start of the API
// happens after the startup scripts have been executed to
// ensure the presence of required tools. This means we can
// return existing devcontainers but actual container detection
// and creation will be deferred.
a.containerAPI.Init(
agentcontainers.WithManifestInfo(manifest.OwnerName, manifest.WorkspaceName, manifest.AgentName),
agentcontainers.WithDevcontainers(manifest.Devcontainers, manifest.Scripts),
agentcontainers.WithSubAgentClient(agentcontainers.NewSubAgentClientFromAPI(a.logger, aAPI)),
)
// Since devcontainer are enabled, remove devcontainer scripts
// from the main scripts list to avoid showing an error.
scripts, devcontainerScripts = agentcontainers.ExtractDevcontainerScripts(manifest.Devcontainers, scripts)
}
err = a.scriptRunner.Init(scripts, aAPI.ScriptCompleted, scriptRunnerOpts...)
err = a.scriptRunner.Init(scripts, aAPI.ScriptCompleted)
if err != nil {
return xerrors.Errorf("init script runner: %w", err)
}
@@ -1168,7 +1197,18 @@ func (a *agent) handleManifest(manifestOK *checkpoint) func(ctx context.Context,
// finished (both start and post start). For instance, an
// autostarted devcontainer will be included in this time.
err := a.scriptRunner.Execute(a.gracefulCtx, agentscripts.ExecuteStartScripts)
err = errors.Join(err, a.scriptRunner.Execute(a.gracefulCtx, agentscripts.ExecutePostStartScripts))
if a.containerAPI != nil {
// Start the container API after the startup scripts have
// been executed to ensure that the required tools can be
// installed.
a.containerAPI.Start()
for _, dc := range manifest.Devcontainers {
cErr := a.createDevcontainer(ctx, aAPI, dc, devcontainerScripts[dc.ID])
err = errors.Join(err, cErr)
}
}
dur := time.Since(start).Seconds()
if err != nil {
a.logger.Warn(ctx, "startup script(s) failed", slog.Error(err))
@@ -1187,14 +1227,6 @@ func (a *agent) handleManifest(manifestOK *checkpoint) func(ctx context.Context,
}
a.metrics.startupScriptSeconds.WithLabelValues(label).Set(dur)
a.scriptRunner.StartCron()
// If the container API is enabled, trigger an immediate refresh
// for quick sub agent injection.
if cAPI := a.containerAPI.Load(); cAPI != nil {
if err := cAPI.RefreshContainers(ctx); err != nil {
a.logger.Error(ctx, "failed to refresh containers", slog.Error(err))
}
}
})
if err != nil {
return xerrors.Errorf("track conn goroutine: %w", err)
@@ -1204,6 +1236,38 @@ func (a *agent) handleManifest(manifestOK *checkpoint) func(ctx context.Context,
}
}
func (a *agent) createDevcontainer(
ctx context.Context,
aAPI proto.DRPCAgentClient26,
dc codersdk.WorkspaceAgentDevcontainer,
script codersdk.WorkspaceAgentScript,
) (err error) {
var (
exitCode = int32(0)
startTime = a.clock.Now()
status = proto.Timing_OK
)
if err = a.containerAPI.CreateDevcontainer(dc.WorkspaceFolder, dc.ConfigPath); err != nil {
exitCode = 1
status = proto.Timing_EXIT_FAILURE
}
endTime := a.clock.Now()
if _, scriptErr := aAPI.ScriptCompleted(ctx, &proto.WorkspaceAgentScriptCompletedRequest{
Timing: &proto.Timing{
ScriptId: script.ID[:],
Start: timestamppb.New(startTime),
End: timestamppb.New(endTime),
ExitCode: exitCode,
Stage: proto.Timing_START,
Status: status,
},
}); scriptErr != nil {
a.logger.Warn(ctx, "reporting script completed failed", slog.Error(scriptErr))
}
return err
}
// createOrUpdateNetwork waits for the manifest to be set using manifestOK, then creates or updates
// the tailnet using the information in the manifest
func (a *agent) createOrUpdateNetwork(manifestOK, networkOK *checkpoint) func(context.Context, proto.DRPCAgentClient26) error {
@@ -1227,7 +1291,6 @@ func (a *agent) createOrUpdateNetwork(manifestOK, networkOK *checkpoint) func(co
// agent API.
network, err = a.createTailnet(
a.gracefulCtx,
aAPI,
manifest.AgentID,
manifest.DERPMap,
manifest.DERPForceWebSockets,
@@ -1262,9 +1325,9 @@ func (a *agent) createOrUpdateNetwork(manifestOK, networkOK *checkpoint) func(co
network.SetBlockEndpoints(manifest.DisableDirectConnections)
// Update the subagent client if the container API is available.
if cAPI := a.containerAPI.Load(); cAPI != nil {
if a.containerAPI != nil {
client := agentcontainers.NewSubAgentClientFromAPI(a.logger, aAPI)
cAPI.UpdateSubAgentClient(client)
a.containerAPI.UpdateSubAgentClient(client)
}
}
return nil
@@ -1382,7 +1445,6 @@ func (a *agent) trackGoroutine(fn func()) error {
func (a *agent) createTailnet(
ctx context.Context,
aAPI proto.DRPCAgentClient26,
agentID uuid.UUID,
derpMap *tailcfg.DERPMap,
derpForceWebSockets, disableDirectConnections bool,
@@ -1515,10 +1577,7 @@ func (a *agent) createTailnet(
}()
if err = a.trackGoroutine(func() {
defer apiListener.Close()
apiHandler, closeAPIHAndler := a.apiHandler(aAPI)
defer func() {
_ = closeAPIHAndler()
}()
apiHandler := a.apiHandler()
server := &http.Server{
BaseContext: func(net.Listener) context.Context { return ctx },
Handler: apiHandler,
@@ -1532,7 +1591,6 @@ func (a *agent) createTailnet(
case <-ctx.Done():
case <-a.hardCtx.Done():
}
_ = closeAPIHAndler()
_ = server.Close()
}()
@@ -1871,6 +1929,12 @@ func (a *agent) Close() error {
a.logger.Error(a.hardCtx, "script runner close", slog.Error(err))
}
if a.containerAPI != nil {
if err := a.containerAPI.Close(); err != nil {
a.logger.Error(a.hardCtx, "container API close", slog.Error(err))
}
}
// Wait for the graceful shutdown to complete, but don't wait forever so
// that we don't break user expectations.
go func() {
+179 -104
View File
@@ -53,7 +53,6 @@ type API struct {
cancel context.CancelFunc
watcherDone chan struct{}
updaterDone chan struct{}
initialUpdateDone chan struct{} // Closed after first update in updaterLoop.
updateTrigger chan chan error // Channel to trigger manual refresh.
updateInterval time.Duration // Interval for periodic container updates.
logger slog.Logger
@@ -71,13 +70,16 @@ type API struct {
ownerName string
workspaceName string
parentAgent string
mu sync.RWMutex
mu sync.RWMutex // Protects the following fields.
initDone chan struct{} // Closed by Init.
closed bool
containers codersdk.WorkspaceAgentListContainersResponse // Output from the last list operation.
containersErr error // Error from the last list operation.
devcontainerNames map[string]bool // By devcontainer name.
knownDevcontainers map[string]codersdk.WorkspaceAgentDevcontainer // By workspace folder.
devcontainerLogSourceIDs map[string]uuid.UUID // By workspace folder.
configFileModifiedTimes map[string]time.Time // By config file path.
recreateSuccessTimes map[string]time.Time // By workspace folder.
recreateErrorTimes map[string]time.Time // By workspace folder.
@@ -85,8 +87,6 @@ type API struct {
usingWorkspaceFolderName map[string]bool // By workspace folder.
ignoredDevcontainers map[string]bool // By workspace folder. Tracks three states (true, false and not checked).
asyncWg sync.WaitGroup
devcontainerLogSourceIDs map[string]uuid.UUID // By workspace folder.
}
type subAgentProcess struct {
@@ -188,10 +188,11 @@ func WithSubAgentEnv(env ...string) Option {
// WithManifestInfo sets the owner name, and workspace name
// for the sub-agent.
func WithManifestInfo(owner, workspace string) Option {
func WithManifestInfo(owner, workspace, parentAgent string) Option {
return func(api *API) {
api.ownerName = owner
api.workspaceName = workspace
api.parentAgent = parentAgent
}
}
@@ -207,6 +208,29 @@ func WithDevcontainers(devcontainers []codersdk.WorkspaceAgentDevcontainer, scri
api.devcontainerNames = make(map[string]bool, len(devcontainers))
api.devcontainerLogSourceIDs = make(map[string]uuid.UUID)
for _, dc := range devcontainers {
if dc.Status == "" {
dc.Status = codersdk.WorkspaceAgentDevcontainerStatusStarting
}
logger := api.logger.With(
slog.F("devcontainer_id", dc.ID),
slog.F("devcontainer_name", dc.Name),
slog.F("workspace_folder", dc.WorkspaceFolder),
slog.F("config_path", dc.ConfigPath),
)
// Devcontainers have a name originating from Terraform, but
// we need to ensure that the name is unique. We will use
// the workspace folder name to generate a unique agent name,
// and if that fails, we will fall back to the devcontainers
// original name.
name, usingWorkspaceFolder := api.makeAgentName(dc.WorkspaceFolder, dc.Name)
if name != dc.Name {
logger = logger.With(slog.F("devcontainer_name", name))
logger.Debug(api.ctx, "updating devcontainer name", slog.F("devcontainer_old_name", dc.Name))
dc.Name = name
api.usingWorkspaceFolderName[dc.WorkspaceFolder] = usingWorkspaceFolder
}
api.knownDevcontainers[dc.WorkspaceFolder] = dc
api.devcontainerNames[dc.Name] = true
for _, script := range scripts {
@@ -218,12 +242,7 @@ func WithDevcontainers(devcontainers []codersdk.WorkspaceAgentDevcontainer, scri
}
}
if api.devcontainerLogSourceIDs[dc.WorkspaceFolder] == uuid.Nil {
api.logger.Error(api.ctx, "devcontainer log source ID not found for devcontainer",
slog.F("devcontainer_id", dc.ID),
slog.F("devcontainer_name", dc.Name),
slog.F("workspace_folder", dc.WorkspaceFolder),
slog.F("config_path", dc.ConfigPath),
)
logger.Error(api.ctx, "devcontainer log source ID not found for devcontainer")
}
}
}
@@ -265,9 +284,7 @@ func NewAPI(logger slog.Logger, options ...Option) *API {
api := &API{
ctx: ctx,
cancel: cancel,
watcherDone: make(chan struct{}),
updaterDone: make(chan struct{}),
initialUpdateDone: make(chan struct{}),
initDone: make(chan struct{}),
updateTrigger: make(chan chan error),
updateInterval: defaultUpdateInterval,
logger: logger,
@@ -315,10 +332,47 @@ func NewAPI(logger slog.Logger, options ...Option) *API {
api.subAgentClient.Store(&c)
}
return api
}
// Init applies a final set of options to the API and then
// closes initDone. This method can only be called once.
func (api *API) Init(opts ...Option) {
api.mu.Lock()
defer api.mu.Unlock()
if api.closed {
return
}
select {
case <-api.initDone:
return
default:
}
defer close(api.initDone)
for _, opt := range opts {
opt(api)
}
}
// Start starts the API by initializing the watcher and updater loops.
// This method calls Init, if it is desired to apply options after
// the API has been created, it should be done by calling Init before
// Start. This method must only be called once.
func (api *API) Start() {
api.Init()
api.mu.Lock()
defer api.mu.Unlock()
if api.closed {
return
}
api.watcherDone = make(chan struct{})
api.updaterDone = make(chan struct{})
go api.watcherLoop()
go api.updaterLoop()
return api
}
func (api *API) watcherLoop() {
@@ -391,21 +445,23 @@ func (api *API) updaterLoop() {
} else {
api.logger.Debug(api.ctx, "initial containers update complete")
}
// Signal that the initial update attempt (successful or not) is done.
// Other services can wait on this if they need the first data to be available.
close(api.initialUpdateDone)
// We utilize a TickerFunc here instead of a regular Ticker so that
// we can guarantee execution of the updateContainers method after
// advancing the clock.
ticker := api.clock.TickerFunc(api.ctx, api.updateInterval, func() error {
done := make(chan error, 1)
defer close(done)
var sent bool
defer func() {
if !sent {
close(done)
}
}()
select {
case <-api.ctx.Done():
return api.ctx.Err()
case api.updateTrigger <- done:
sent = true
err := <-done
if err != nil {
if errors.Is(err, context.Canceled) {
@@ -434,6 +490,7 @@ func (api *API) updaterLoop() {
// Note that although we pass api.ctx here, updateContainers
// has an internal timeout to prevent long blocking calls.
done <- api.updateContainers(api.ctx)
close(done)
}
}
}
@@ -447,7 +504,7 @@ func (api *API) UpdateSubAgentClient(client SubAgentClient) {
func (api *API) Routes() http.Handler {
r := chi.NewRouter()
ensureInitialUpdateDoneMW := func(next http.Handler) http.Handler {
ensureInitDoneMW := func(next http.Handler) http.Handler {
return http.HandlerFunc(func(rw http.ResponseWriter, r *http.Request) {
select {
case <-api.ctx.Done():
@@ -458,9 +515,8 @@ func (api *API) Routes() http.Handler {
return
case <-r.Context().Done():
return
case <-api.initialUpdateDone:
// Initial update is done, we can start processing
// requests.
case <-api.initDone:
// API init is done, we can start processing requests.
}
next.ServeHTTP(rw, r)
})
@@ -469,13 +525,13 @@ func (api *API) Routes() http.Handler {
// For now, all endpoints require the initial update to be done.
// If we want to allow some endpoints to be available before
// the initial update, we can enable this per-route.
r.Use(ensureInitialUpdateDoneMW)
r.Use(ensureInitDoneMW)
r.Get("/", api.handleList)
// TODO(mafredri): Simplify this route as the previous /devcontainers
// /-route was dropped. We can drop the /devcontainers prefix here too.
r.Route("/devcontainers", func(r chi.Router) {
r.Post("/container/{container}/recreate", api.handleDevcontainerRecreate)
r.Route("/devcontainers/{devcontainer}", func(r chi.Router) {
r.Post("/recreate", api.handleDevcontainerRecreate)
})
return r
@@ -508,7 +564,6 @@ func (api *API) updateContainers(ctx context.Context) error {
// will clear up on the next update.
if !errors.Is(err, context.Canceled) {
api.mu.Lock()
api.containers = codersdk.WorkspaceAgentListContainersResponse{}
api.containersErr = err
api.mu.Unlock()
}
@@ -571,7 +626,8 @@ func (api *API) processUpdatedContainersLocked(ctx context.Context, updated code
slog.F("config_file", configFile),
)
if len(api.containerLabelIncludeFilter) > 0 {
// Filter out devcontainer tests, unless explicitly set in include filters.
if len(api.containerLabelIncludeFilter) > 0 || container.Labels[DevcontainerIsTestRunLabel] == "true" {
var ok bool
for label, value := range api.containerLabelIncludeFilter {
if v, found := container.Labels[label]; found && v == value {
@@ -777,12 +833,19 @@ func (api *API) RefreshContainers(ctx context.Context) (err error) {
}()
done := make(chan error, 1)
var sent bool
defer func() {
if !sent {
close(done)
}
}()
select {
case <-api.ctx.Done():
return xerrors.Errorf("API closed: %w", api.ctx.Err())
case <-ctx.Done():
return ctx.Err()
case api.updateTrigger <- done:
sent = true
select {
case <-api.ctx.Done():
return xerrors.Errorf("API closed: %w", api.ctx.Err())
@@ -823,7 +886,7 @@ func (api *API) getContainers() (codersdk.WorkspaceAgentListContainersResponse,
devcontainers = append(devcontainers, dc)
}
slices.SortFunc(devcontainers, func(a, b codersdk.WorkspaceAgentDevcontainer) int {
return strings.Compare(a.Name, b.Name)
return strings.Compare(a.WorkspaceFolder, b.WorkspaceFolder)
})
}
@@ -838,68 +901,40 @@ func (api *API) getContainers() (codersdk.WorkspaceAgentListContainersResponse,
// devcontainer by referencing the container.
func (api *API) handleDevcontainerRecreate(w http.ResponseWriter, r *http.Request) {
ctx := r.Context()
containerID := chi.URLParam(r, "container")
devcontainerID := chi.URLParam(r, "devcontainer")
if containerID == "" {
if devcontainerID == "" {
httpapi.Write(ctx, w, http.StatusBadRequest, codersdk.Response{
Message: "Missing container ID or name",
Detail: "Container ID or name is required to recreate a devcontainer.",
})
return
}
containers, err := api.getContainers()
if err != nil {
httpapi.Write(ctx, w, http.StatusInternalServerError, codersdk.Response{
Message: "Could not list containers",
Detail: err.Error(),
})
return
}
containerIdx := slices.IndexFunc(containers.Containers, func(c codersdk.WorkspaceAgentContainer) bool { return c.Match(containerID) })
if containerIdx == -1 {
httpapi.Write(ctx, w, http.StatusNotFound, codersdk.Response{
Message: "Container not found",
Detail: "Container ID or name not found in the list of containers.",
})
return
}
container := containers.Containers[containerIdx]
workspaceFolder := container.Labels[DevcontainerLocalFolderLabel]
configPath := container.Labels[DevcontainerConfigFileLabel]
// Workspace folder is required to recreate a container, we don't verify
// the config path here because it's optional.
if workspaceFolder == "" {
httpapi.Write(ctx, w, http.StatusBadRequest, codersdk.Response{
Message: "Missing workspace folder label",
Detail: "The container is not a devcontainer, the container must have the workspace folder label to support recreation.",
Message: "Missing devcontainer ID",
Detail: "Devcontainer ID is required to recreate a devcontainer.",
})
return
}
api.mu.Lock()
dc, ok := api.knownDevcontainers[workspaceFolder]
switch {
case !ok:
var dc codersdk.WorkspaceAgentDevcontainer
for _, knownDC := range api.knownDevcontainers {
if knownDC.ID.String() == devcontainerID {
dc = knownDC
break
}
}
if dc.ID == uuid.Nil {
api.mu.Unlock()
// This case should not happen if the container is a valid devcontainer.
api.logger.Error(ctx, "devcontainer not found for workspace folder", slog.F("workspace_folder", workspaceFolder))
httpapi.Write(ctx, w, http.StatusInternalServerError, codersdk.Response{
httpapi.Write(ctx, w, http.StatusNotFound, codersdk.Response{
Message: "Devcontainer not found.",
Detail: fmt.Sprintf("Could not find devcontainer for workspace folder: %q", workspaceFolder),
Detail: fmt.Sprintf("Could not find devcontainer with ID: %q", devcontainerID),
})
return
case dc.Status == codersdk.WorkspaceAgentDevcontainerStatusStarting:
}
if dc.Status == codersdk.WorkspaceAgentDevcontainerStatusStarting {
api.mu.Unlock()
httpapi.Write(ctx, w, http.StatusConflict, codersdk.Response{
Message: "Devcontainer recreation already in progress",
Detail: fmt.Sprintf("Recreation for workspace folder %q is already underway.", dc.WorkspaceFolder),
Detail: fmt.Sprintf("Recreation for devcontainer %q is already underway.", dc.Name),
})
return
}
@@ -909,51 +944,65 @@ func (api *API) handleDevcontainerRecreate(w http.ResponseWriter, r *http.Reques
dc.Status = codersdk.WorkspaceAgentDevcontainerStatusStarting
dc.Container = nil
api.knownDevcontainers[dc.WorkspaceFolder] = dc
api.asyncWg.Add(1)
go api.recreateDevcontainer(dc, configPath)
go func() {
_ = api.CreateDevcontainer(dc.WorkspaceFolder, dc.ConfigPath, WithRemoveExistingContainer())
}()
api.mu.Unlock()
httpapi.Write(ctx, w, http.StatusAccepted, codersdk.Response{
Message: "Devcontainer recreation initiated",
Detail: fmt.Sprintf("Recreation process for workspace folder %q has started.", dc.WorkspaceFolder),
Detail: fmt.Sprintf("Recreation process for devcontainer %q has started.", dc.Name),
})
}
// recreateDevcontainer should run in its own goroutine and is responsible for
// createDevcontainer should run in its own goroutine and is responsible for
// recreating a devcontainer based on the provided devcontainer configuration.
// It updates the devcontainer status and logs the process. The configPath is
// passed as a parameter for the odd chance that the container being recreated
// has a different config file than the one stored in the devcontainer state.
// The devcontainer state must be set to starting and the asyncWg must be
// incremented before calling this function.
func (api *API) recreateDevcontainer(dc codersdk.WorkspaceAgentDevcontainer, configPath string) {
defer api.asyncWg.Done()
func (api *API) CreateDevcontainer(workspaceFolder, configPath string, opts ...DevcontainerCLIUpOptions) error {
api.mu.Lock()
if api.closed {
api.mu.Unlock()
return nil
}
dc, found := api.knownDevcontainers[workspaceFolder]
if !found {
api.mu.Unlock()
return xerrors.Errorf("devcontainer not found")
}
var (
err error
ctx = api.ctx
logger = api.logger.With(
slog.F("devcontainer_id", dc.ID),
slog.F("devcontainer_name", dc.Name),
slog.F("workspace_folder", dc.WorkspaceFolder),
slog.F("config_path", configPath),
slog.F("config_path", dc.ConfigPath),
)
)
// Send logs via agent logging facilities.
logSourceID := api.devcontainerLogSourceIDs[dc.WorkspaceFolder]
if logSourceID == uuid.Nil {
api.logger.Debug(api.ctx, "devcontainer log source ID not found, falling back to external log source ID")
logSourceID = agentsdk.ExternalLogSourceID
}
api.asyncWg.Add(1)
defer api.asyncWg.Done()
api.mu.Unlock()
if dc.ConfigPath != configPath {
logger.Warn(ctx, "devcontainer config path mismatch",
slog.F("config_path_param", configPath),
)
}
// Send logs via agent logging facilities.
logSourceID := api.devcontainerLogSourceIDs[dc.WorkspaceFolder]
if logSourceID == uuid.Nil {
// Fallback to the external log source ID if not found.
logSourceID = agentsdk.ExternalLogSourceID
}
scriptLogger := api.scriptLogger(logSourceID)
defer func() {
flushCtx, cancel := context.WithTimeout(api.ctx, 5*time.Second)
@@ -969,12 +1018,15 @@ func (api *API) recreateDevcontainer(dc codersdk.WorkspaceAgentDevcontainer, con
logger.Debug(ctx, "starting devcontainer recreation")
_, err = api.dccli.Up(ctx, dc.WorkspaceFolder, configPath, WithUpOutput(infoW, errW), WithRemoveExistingContainer())
upOptions := []DevcontainerCLIUpOptions{WithUpOutput(infoW, errW)}
upOptions = append(upOptions, opts...)
_, err := api.dccli.Up(ctx, dc.WorkspaceFolder, configPath, upOptions...)
if err != nil {
// No need to log if the API is closing (context canceled), as this
// is expected behavior when the API is shutting down.
if !errors.Is(err, context.Canceled) {
logger.Error(ctx, "devcontainer recreation failed", slog.Error(err))
logger.Error(ctx, "devcontainer creation failed", slog.Error(err))
}
api.mu.Lock()
@@ -983,10 +1035,11 @@ func (api *API) recreateDevcontainer(dc codersdk.WorkspaceAgentDevcontainer, con
api.knownDevcontainers[dc.WorkspaceFolder] = dc
api.recreateErrorTimes[dc.WorkspaceFolder] = api.clock.Now("agentcontainers", "recreate", "errorTimes")
api.mu.Unlock()
return
return xerrors.Errorf("start devcontainer: %w", err)
}
logger.Info(ctx, "devcontainer recreated successfully")
logger.Info(ctx, "devcontainer created successfully")
api.mu.Lock()
dc = api.knownDevcontainers[dc.WorkspaceFolder]
@@ -1009,8 +1062,11 @@ func (api *API) recreateDevcontainer(dc codersdk.WorkspaceAgentDevcontainer, con
// Ensure an immediate refresh to accurately reflect the
// devcontainer state after recreation.
if err := api.RefreshContainers(ctx); err != nil {
logger.Error(ctx, "failed to trigger immediate refresh after devcontainer recreation", slog.Error(err))
logger.Error(ctx, "failed to trigger immediate refresh after devcontainer creation", slog.Error(err))
return xerrors.Errorf("refresh containers: %w", err)
}
return nil
}
// markDevcontainerDirty finds the devcontainer with the given config file path
@@ -1259,6 +1315,7 @@ func (api *API) maybeInjectSubAgentIntoContainerLocked(ctx context.Context, dc c
}
var (
featureOptionsAsEnvs []string
appsWithPossibleDuplicates []SubAgentApp
workspaceFolder = DevcontainerDefaultContainerWorkspaceFolder
)
@@ -1270,12 +1327,16 @@ func (api *API) maybeInjectSubAgentIntoContainerLocked(ctx context.Context, dc c
)
readConfig := func() (DevcontainerConfig, error) {
return api.dccli.ReadConfig(ctx, dc.WorkspaceFolder, dc.ConfigPath, []string{
fmt.Sprintf("CODER_WORKSPACE_AGENT_NAME=%s", subAgentConfig.Name),
fmt.Sprintf("CODER_WORKSPACE_OWNER_NAME=%s", api.ownerName),
fmt.Sprintf("CODER_WORKSPACE_NAME=%s", api.workspaceName),
fmt.Sprintf("CODER_URL=%s", api.subAgentURL),
})
return api.dccli.ReadConfig(ctx, dc.WorkspaceFolder, dc.ConfigPath,
append(featureOptionsAsEnvs, []string{
fmt.Sprintf("CODER_WORKSPACE_AGENT_NAME=%s", subAgentConfig.Name),
fmt.Sprintf("CODER_WORKSPACE_OWNER_NAME=%s", api.ownerName),
fmt.Sprintf("CODER_WORKSPACE_NAME=%s", api.workspaceName),
fmt.Sprintf("CODER_WORKSPACE_PARENT_AGENT_NAME=%s", api.parentAgent),
fmt.Sprintf("CODER_URL=%s", api.subAgentURL),
fmt.Sprintf("CONTAINER_ID=%s", container.ID),
}...),
)
}
if config, err = readConfig(); err != nil {
@@ -1291,6 +1352,11 @@ func (api *API) maybeInjectSubAgentIntoContainerLocked(ctx context.Context, dc c
workspaceFolder = config.Workspace.WorkspaceFolder
featureOptionsAsEnvs = config.MergedConfiguration.Features.OptionsAsEnvs()
if len(featureOptionsAsEnvs) > 0 {
configOutdated = true
}
// NOTE(DanielleMaywood):
// We only want to take an agent name specified in the root customization layer.
// This restricts the ability for a feature to specify the agent name. We may revisit
@@ -1415,6 +1481,11 @@ func (api *API) maybeInjectSubAgentIntoContainerLocked(ctx context.Context, dc c
return xerrors.Errorf("set agent binary executable: %w", err)
}
// Make sure the agent binary is owned by a valid user so we can run it.
if _, err := api.ccli.ExecAs(ctx, container.ID, "root", "/bin/sh", "-c", fmt.Sprintf("chown $(id -u):$(id -g) %s", coderPathInsideContainer)); err != nil {
return xerrors.Errorf("set agent binary ownership: %w", err)
}
// Attempt to add CAP_NET_ADMIN to the binary to improve network
// performance (optional, allow to fail). See `bootstrap_linux.sh`.
// TODO(mafredri): Disable for now until we can figure out why this
@@ -1609,8 +1680,12 @@ func (api *API) Close() error {
err := api.watcher.Close()
// Wait for loops to finish.
<-api.watcherDone
<-api.updaterDone
if api.watcherDone != nil {
<-api.watcherDone
}
if api.updaterDone != nil {
<-api.updaterDone
}
// Wait for all async tasks to complete.
api.asyncWg.Wait()
+321 -53
View File
@@ -437,6 +437,7 @@ func TestAPI(t *testing.T) {
agentcontainers.WithContainerCLI(mLister),
agentcontainers.WithContainerLabelIncludeFilter("this.label.does.not.exist.ignore.devcontainers", "true"),
)
api.Start()
defer api.Close()
r.Mount("/", api.Routes())
@@ -492,78 +493,77 @@ func TestAPI(t *testing.T) {
t.Run("Recreate", func(t *testing.T) {
t.Parallel()
validContainer := codersdk.WorkspaceAgentContainer{
ID: "container-id",
FriendlyName: "container-name",
devcontainerID1 := uuid.New()
devcontainerID2 := uuid.New()
workspaceFolder1 := "/workspace/test1"
workspaceFolder2 := "/workspace/test2"
configPath1 := "/workspace/test1/.devcontainer/devcontainer.json"
configPath2 := "/workspace/test2/.devcontainer/devcontainer.json"
// Create a container that represents an existing devcontainer
devContainer1 := codersdk.WorkspaceAgentContainer{
ID: "container-1",
FriendlyName: "test-container-1",
Running: true,
Labels: map[string]string{
agentcontainers.DevcontainerLocalFolderLabel: "/workspaces",
agentcontainers.DevcontainerConfigFileLabel: "/workspace/.devcontainer/devcontainer.json",
agentcontainers.DevcontainerLocalFolderLabel: workspaceFolder1,
agentcontainers.DevcontainerConfigFileLabel: configPath1,
},
}
missingFolderContainer := codersdk.WorkspaceAgentContainer{
ID: "missing-folder-container",
FriendlyName: "missing-folder-container",
Labels: map[string]string{},
devContainer2 := codersdk.WorkspaceAgentContainer{
ID: "container-2",
FriendlyName: "test-container-2",
Running: true,
Labels: map[string]string{
agentcontainers.DevcontainerLocalFolderLabel: workspaceFolder2,
agentcontainers.DevcontainerConfigFileLabel: configPath2,
},
}
tests := []struct {
name string
containerID string
lister *fakeContainerCLI
devcontainerCLI *fakeDevcontainerCLI
wantStatus []int
wantBody []string
name string
devcontainerID string
setupDevcontainers []codersdk.WorkspaceAgentDevcontainer
lister *fakeContainerCLI
devcontainerCLI *fakeDevcontainerCLI
wantStatus []int
wantBody []string
}{
{
name: "Missing container ID",
containerID: "",
name: "Missing devcontainer ID",
devcontainerID: "",
lister: &fakeContainerCLI{},
devcontainerCLI: &fakeDevcontainerCLI{},
wantStatus: []int{http.StatusBadRequest},
wantBody: []string{"Missing container ID or name"},
wantBody: []string{"Missing devcontainer ID"},
},
{
name: "List error",
containerID: "container-id",
name: "Devcontainer not found",
devcontainerID: uuid.NewString(),
lister: &fakeContainerCLI{
listErr: xerrors.New("list error"),
},
devcontainerCLI: &fakeDevcontainerCLI{},
wantStatus: []int{http.StatusInternalServerError},
wantBody: []string{"Could not list containers"},
},
{
name: "Container not found",
containerID: "nonexistent-container",
lister: &fakeContainerCLI{
containers: codersdk.WorkspaceAgentListContainersResponse{
Containers: []codersdk.WorkspaceAgentContainer{validContainer},
},
arch: "<none>", // Unsupported architecture, don't inject subagent.
},
devcontainerCLI: &fakeDevcontainerCLI{},
wantStatus: []int{http.StatusNotFound},
wantBody: []string{"Container not found"},
wantBody: []string{"Devcontainer not found"},
},
{
name: "Missing workspace folder label",
containerID: "missing-folder-container",
lister: &fakeContainerCLI{
containers: codersdk.WorkspaceAgentListContainersResponse{
Containers: []codersdk.WorkspaceAgentContainer{missingFolderContainer},
name: "Devcontainer CLI error",
devcontainerID: devcontainerID1.String(),
setupDevcontainers: []codersdk.WorkspaceAgentDevcontainer{
{
ID: devcontainerID1,
Name: "test-devcontainer-1",
WorkspaceFolder: workspaceFolder1,
ConfigPath: configPath1,
Status: codersdk.WorkspaceAgentDevcontainerStatusRunning,
Container: &devContainer1,
},
},
devcontainerCLI: &fakeDevcontainerCLI{},
wantStatus: []int{http.StatusBadRequest},
wantBody: []string{"Missing workspace folder label"},
},
{
name: "Devcontainer CLI error",
containerID: "container-id",
lister: &fakeContainerCLI{
containers: codersdk.WorkspaceAgentListContainersResponse{
Containers: []codersdk.WorkspaceAgentContainer{validContainer},
Containers: []codersdk.WorkspaceAgentContainer{devContainer1},
},
arch: "<none>", // Unsupported architecture, don't inject subagent.
},
@@ -574,11 +574,21 @@ func TestAPI(t *testing.T) {
wantBody: []string{"Devcontainer recreation initiated", "Devcontainer recreation already in progress"},
},
{
name: "OK",
containerID: "container-id",
name: "OK",
devcontainerID: devcontainerID2.String(),
setupDevcontainers: []codersdk.WorkspaceAgentDevcontainer{
{
ID: devcontainerID2,
Name: "test-devcontainer-2",
WorkspaceFolder: workspaceFolder2,
ConfigPath: configPath2,
Status: codersdk.WorkspaceAgentDevcontainerStatusRunning,
Container: &devContainer2,
},
},
lister: &fakeContainerCLI{
containers: codersdk.WorkspaceAgentListContainersResponse{
Containers: []codersdk.WorkspaceAgentContainer{validContainer},
Containers: []codersdk.WorkspaceAgentContainer{devContainer2},
},
arch: "<none>", // Unsupported architecture, don't inject subagent.
},
@@ -607,13 +617,17 @@ func TestAPI(t *testing.T) {
// Setup router with the handler under test.
r := chi.NewRouter()
api := agentcontainers.NewAPI(
logger,
agentcontainers.WithClock(mClock),
agentcontainers.WithContainerCLI(tt.lister),
agentcontainers.WithDevcontainerCLI(tt.devcontainerCLI),
agentcontainers.WithWatcher(watcher.NewNoop()),
agentcontainers.WithDevcontainers(tt.setupDevcontainers, nil),
)
api.Start()
defer api.Close()
r.Mount("/", api.Routes())
@@ -624,7 +638,7 @@ func TestAPI(t *testing.T) {
for i := range tt.wantStatus {
// Simulate HTTP request to the recreate endpoint.
req := httptest.NewRequest(http.MethodPost, "/devcontainers/container/"+tt.containerID+"/recreate", nil).
req := httptest.NewRequest(http.MethodPost, "/devcontainers/"+tt.devcontainerID+"/recreate", nil).
WithContext(ctx)
rec := httptest.NewRecorder()
r.ServeHTTP(rec, req)
@@ -747,6 +761,7 @@ func TestAPI(t *testing.T) {
knownDevcontainers []codersdk.WorkspaceAgentDevcontainer
wantStatus int
wantCount int
wantTestContainer bool
verify func(t *testing.T, devcontainers []codersdk.WorkspaceAgentDevcontainer)
}{
{
@@ -993,6 +1008,13 @@ func TestAPI(t *testing.T) {
assert.Len(t, names, 4, "should have four unique devcontainer names")
},
},
{
name: "Include test containers",
lister: &fakeContainerCLI{},
wantStatus: http.StatusOK,
wantTestContainer: true,
wantCount: 1, // Will be appended.
},
}
for _, tt := range tests {
@@ -1005,14 +1027,33 @@ func TestAPI(t *testing.T) {
mClock.Set(time.Now()).MustWait(testutil.Context(t, testutil.WaitShort))
tickerTrap := mClock.Trap().TickerFunc("updaterLoop")
// This container should be ignored unless explicitly included.
tt.lister.containers.Containers = append(tt.lister.containers.Containers, codersdk.WorkspaceAgentContainer{
ID: "test-container-1",
FriendlyName: "test-container-1",
Running: true,
Labels: map[string]string{
agentcontainers.DevcontainerLocalFolderLabel: "/workspace/test1",
agentcontainers.DevcontainerConfigFileLabel: "/workspace/test1/.devcontainer/devcontainer.json",
agentcontainers.DevcontainerIsTestRunLabel: "true",
},
})
// Setup router with the handler under test.
r := chi.NewRouter()
apiOptions := []agentcontainers.Option{
agentcontainers.WithClock(mClock),
agentcontainers.WithContainerCLI(tt.lister),
agentcontainers.WithDevcontainerCLI(&fakeDevcontainerCLI{}),
agentcontainers.WithWatcher(watcher.NewNoop()),
}
if tt.wantTestContainer {
apiOptions = append(apiOptions, agentcontainers.WithContainerLabelIncludeFilter(
agentcontainers.DevcontainerIsTestRunLabel, "true",
))
}
// Generate matching scripts for the known devcontainers
// (required to extract log source ID).
var scripts []codersdk.WorkspaceAgentScript
@@ -1027,6 +1068,7 @@ func TestAPI(t *testing.T) {
}
api := agentcontainers.NewAPI(logger, apiOptions...)
api.Start()
defer api.Close()
r.Mount("/", api.Routes())
@@ -1038,6 +1080,11 @@ func TestAPI(t *testing.T) {
tickerTrap.MustWait(ctx).MustRelease(ctx)
tickerTrap.Close()
for _, dc := range tt.knownDevcontainers {
err := api.CreateDevcontainer(dc.WorkspaceFolder, dc.ConfigPath)
require.NoError(t, err)
}
// Advance the clock to run the updater loop.
_, aw := mClock.AdvanceNext()
aw.MustWait(ctx)
@@ -1111,6 +1158,7 @@ func TestAPI(t *testing.T) {
[]codersdk.WorkspaceAgentScript{{LogSourceID: uuid.New(), ID: dc.ID}},
),
)
api.Start()
defer api.Close()
// Make sure the ticker function has been registered
@@ -1206,6 +1254,7 @@ func TestAPI(t *testing.T) {
agentcontainers.WithWatcher(fWatcher),
agentcontainers.WithClock(mClock),
)
api.Start()
defer api.Close()
r := chi.NewRouter()
@@ -1343,6 +1392,7 @@ func TestAPI(t *testing.T) {
mCCLI.EXPECT().ExecAs(gomock.Any(), "test-container-id", "root", "mkdir", "-p", "/.coder-agent").Return(nil, nil),
mCCLI.EXPECT().Copy(gomock.Any(), "test-container-id", coderBin, "/.coder-agent/coder").Return(nil),
mCCLI.EXPECT().ExecAs(gomock.Any(), "test-container-id", "root", "chmod", "0755", "/.coder-agent", "/.coder-agent/coder").Return(nil, nil),
mCCLI.EXPECT().ExecAs(gomock.Any(), "test-container-id", "root", "/bin/sh", "-c", "chown $(id -u):$(id -g) /.coder-agent/coder").Return(nil, nil),
)
mClock.Set(time.Now()).MustWait(ctx)
@@ -1356,8 +1406,9 @@ func TestAPI(t *testing.T) {
agentcontainers.WithSubAgentClient(fakeSAC),
agentcontainers.WithSubAgentURL("test-subagent-url"),
agentcontainers.WithDevcontainerCLI(fakeDCCLI),
agentcontainers.WithManifestInfo("test-user", "test-workspace"),
agentcontainers.WithManifestInfo("test-user", "test-workspace", "test-parent-agent"),
)
api.Start()
apiClose := func() {
closeOnce.Do(func() {
// Close before api.Close() defer to avoid deadlock after test.
@@ -1377,7 +1428,9 @@ func TestAPI(t *testing.T) {
assert.Contains(t, envs, "CODER_WORKSPACE_AGENT_NAME=coder")
assert.Contains(t, envs, "CODER_WORKSPACE_NAME=test-workspace")
assert.Contains(t, envs, "CODER_WORKSPACE_OWNER_NAME=test-user")
assert.Contains(t, envs, "CODER_WORKSPACE_PARENT_AGENT_NAME=test-parent-agent")
assert.Contains(t, envs, "CODER_URL=test-subagent-url")
assert.Contains(t, envs, "CONTAINER_ID=test-container-id")
return nil
})
@@ -1428,6 +1481,7 @@ func TestAPI(t *testing.T) {
mCCLI.EXPECT().ExecAs(gomock.Any(), "test-container-id", "root", "mkdir", "-p", "/.coder-agent").Return(nil, nil),
mCCLI.EXPECT().Copy(gomock.Any(), "test-container-id", coderBin, "/.coder-agent/coder").Return(nil),
mCCLI.EXPECT().ExecAs(gomock.Any(), "test-container-id", "root", "chmod", "0755", "/.coder-agent", "/.coder-agent/coder").Return(nil, nil),
mCCLI.EXPECT().ExecAs(gomock.Any(), "test-container-id", "root", "/bin/sh", "-c", "chown $(id -u):$(id -g) /.coder-agent/coder").Return(nil, nil),
)
// Verify that the agent has started.
@@ -1488,6 +1542,7 @@ func TestAPI(t *testing.T) {
mCCLI.EXPECT().ExecAs(gomock.Any(), "new-test-container-id", "root", "mkdir", "-p", "/.coder-agent").Return(nil, nil),
mCCLI.EXPECT().Copy(gomock.Any(), "new-test-container-id", coderBin, "/.coder-agent/coder").Return(nil),
mCCLI.EXPECT().ExecAs(gomock.Any(), "new-test-container-id", "root", "chmod", "0755", "/.coder-agent", "/.coder-agent/coder").Return(nil, nil),
mCCLI.EXPECT().ExecAs(gomock.Any(), "new-test-container-id", "root", "/bin/sh", "-c", "chown $(id -u):$(id -g) /.coder-agent/coder").Return(nil, nil),
)
fakeDCCLI.readConfig.MergedConfiguration.Customizations.Coder = []agentcontainers.CoderCustomization{
@@ -1519,7 +1574,9 @@ func TestAPI(t *testing.T) {
assert.Contains(t, envs, "CODER_WORKSPACE_AGENT_NAME=coder")
assert.Contains(t, envs, "CODER_WORKSPACE_NAME=test-workspace")
assert.Contains(t, envs, "CODER_WORKSPACE_OWNER_NAME=test-user")
assert.Contains(t, envs, "CODER_WORKSPACE_PARENT_AGENT_NAME=test-parent-agent")
assert.Contains(t, envs, "CODER_URL=test-subagent-url")
assert.NotContains(t, envs, "CONTAINER_ID=test-container-id")
return nil
})
@@ -1578,6 +1635,7 @@ func TestAPI(t *testing.T) {
agentcontainers.WithSubAgentClient(fakeSAC),
agentcontainers.WithDevcontainerCLI(&fakeDevcontainerCLI{}),
)
api.Start()
defer api.Close()
tickerTrap.MustWait(ctx).MustRelease(ctx)
@@ -1886,6 +1944,7 @@ func TestAPI(t *testing.T) {
mCCLI.EXPECT().ExecAs(gomock.Any(), testContainer.ID, "root", "mkdir", "-p", "/.coder-agent").Return(nil, nil),
mCCLI.EXPECT().Copy(gomock.Any(), testContainer.ID, coderBin, "/.coder-agent/coder").Return(nil),
mCCLI.EXPECT().ExecAs(gomock.Any(), testContainer.ID, "root", "chmod", "0755", "/.coder-agent", "/.coder-agent/coder").Return(nil, nil),
mCCLI.EXPECT().ExecAs(gomock.Any(), testContainer.ID, "root", "/bin/sh", "-c", "chown $(id -u):$(id -g) /.coder-agent/coder").Return(nil, nil),
)
mClock.Set(time.Now()).MustWait(ctx)
@@ -1899,6 +1958,7 @@ func TestAPI(t *testing.T) {
agentcontainers.WithSubAgentURL("test-subagent-url"),
agentcontainers.WithWatcher(watcher.NewNoop()),
)
api.Start()
defer api.Close()
// Close before api.Close() defer to avoid deadlock after test.
@@ -1978,6 +2038,7 @@ func TestAPI(t *testing.T) {
mCCLI.EXPECT().ExecAs(gomock.Any(), testContainer.ID, "root", "mkdir", "-p", "/.coder-agent").Return(nil, nil),
mCCLI.EXPECT().Copy(gomock.Any(), testContainer.ID, coderBin, "/.coder-agent/coder").Return(nil),
mCCLI.EXPECT().ExecAs(gomock.Any(), testContainer.ID, "root", "chmod", "0755", "/.coder-agent", "/.coder-agent/coder").Return(nil, nil),
mCCLI.EXPECT().ExecAs(gomock.Any(), testContainer.ID, "root", "/bin/sh", "-c", "chown $(id -u):$(id -g) /.coder-agent/coder").Return(nil, nil),
)
mClock.Set(time.Now()).MustWait(ctx)
@@ -1991,6 +2052,7 @@ func TestAPI(t *testing.T) {
agentcontainers.WithSubAgentURL("test-subagent-url"),
agentcontainers.WithWatcher(watcher.NewNoop()),
)
api.Start()
defer api.Close()
// Close before api.Close() defer to avoid deadlock after test.
@@ -2019,6 +2081,127 @@ func TestAPI(t *testing.T) {
require.Len(t, fSAC.created, 1)
})
t.Run("ReadConfigWithFeatureOptions", func(t *testing.T) {
t.Parallel()
if runtime.GOOS == "windows" {
t.Skip("Dev Container tests are not supported on Windows (this test uses mocks but fails due to Windows paths)")
}
var (
ctx = testutil.Context(t, testutil.WaitMedium)
logger = testutil.Logger(t)
mClock = quartz.NewMock(t)
mCCLI = acmock.NewMockContainerCLI(gomock.NewController(t))
fSAC = &fakeSubAgentClient{
logger: logger.Named("fakeSubAgentClient"),
createErrC: make(chan error, 1),
}
fDCCLI = &fakeDevcontainerCLI{
readConfig: agentcontainers.DevcontainerConfig{
MergedConfiguration: agentcontainers.DevcontainerMergedConfiguration{
Features: agentcontainers.DevcontainerFeatures{
"./code-server": map[string]any{
"port": 9090,
},
"ghcr.io/devcontainers/features/docker-in-docker:2": map[string]any{
"moby": "false",
},
},
},
Workspace: agentcontainers.DevcontainerWorkspace{
WorkspaceFolder: "/workspaces/coder",
},
},
readConfigErrC: make(chan func(envs []string) error, 2),
}
testContainer = codersdk.WorkspaceAgentContainer{
ID: "test-container-id",
FriendlyName: "test-container",
Image: "test-image",
Running: true,
CreatedAt: time.Now(),
Labels: map[string]string{
agentcontainers.DevcontainerLocalFolderLabel: "/workspaces/coder",
agentcontainers.DevcontainerConfigFileLabel: "/workspaces/coder/.devcontainer/devcontainer.json",
},
}
)
coderBin, err := os.Executable()
require.NoError(t, err)
// Mock the `List` function to always return our test container.
mCCLI.EXPECT().List(gomock.Any()).Return(codersdk.WorkspaceAgentListContainersResponse{
Containers: []codersdk.WorkspaceAgentContainer{testContainer},
}, nil).AnyTimes()
// Mock the steps used for injecting the coder agent.
gomock.InOrder(
mCCLI.EXPECT().DetectArchitecture(gomock.Any(), testContainer.ID).Return(runtime.GOARCH, nil),
mCCLI.EXPECT().ExecAs(gomock.Any(), testContainer.ID, "root", "mkdir", "-p", "/.coder-agent").Return(nil, nil),
mCCLI.EXPECT().Copy(gomock.Any(), testContainer.ID, coderBin, "/.coder-agent/coder").Return(nil),
mCCLI.EXPECT().ExecAs(gomock.Any(), testContainer.ID, "root", "chmod", "0755", "/.coder-agent", "/.coder-agent/coder").Return(nil, nil),
mCCLI.EXPECT().ExecAs(gomock.Any(), testContainer.ID, "root", "/bin/sh", "-c", "chown $(id -u):$(id -g) /.coder-agent/coder").Return(nil, nil),
)
mClock.Set(time.Now()).MustWait(ctx)
tickerTrap := mClock.Trap().TickerFunc("updaterLoop")
api := agentcontainers.NewAPI(logger,
agentcontainers.WithClock(mClock),
agentcontainers.WithContainerCLI(mCCLI),
agentcontainers.WithDevcontainerCLI(fDCCLI),
agentcontainers.WithSubAgentClient(fSAC),
agentcontainers.WithSubAgentURL("test-subagent-url"),
agentcontainers.WithWatcher(watcher.NewNoop()),
agentcontainers.WithManifestInfo("test-user", "test-workspace", "test-parent-agent"),
)
api.Start()
defer api.Close()
// Close before api.Close() defer to avoid deadlock after test.
defer close(fSAC.createErrC)
defer close(fDCCLI.readConfigErrC)
// Allow agent creation and injection to succeed.
testutil.RequireSend(ctx, t, fSAC.createErrC, nil)
testutil.RequireSend(ctx, t, fDCCLI.readConfigErrC, func(envs []string) error {
assert.Contains(t, envs, "CODER_WORKSPACE_AGENT_NAME=coder")
assert.Contains(t, envs, "CODER_WORKSPACE_NAME=test-workspace")
assert.Contains(t, envs, "CODER_WORKSPACE_OWNER_NAME=test-user")
assert.Contains(t, envs, "CODER_WORKSPACE_PARENT_AGENT_NAME=test-parent-agent")
assert.Contains(t, envs, "CODER_URL=test-subagent-url")
assert.Contains(t, envs, "CONTAINER_ID=test-container-id")
// First call should not have feature envs.
assert.NotContains(t, envs, "FEATURE_CODE_SERVER_OPTION_PORT=9090")
assert.NotContains(t, envs, "FEATURE_DOCKER_IN_DOCKER_OPTION_MOBY=false")
return nil
})
testutil.RequireSend(ctx, t, fDCCLI.readConfigErrC, func(envs []string) error {
assert.Contains(t, envs, "CODER_WORKSPACE_AGENT_NAME=coder")
assert.Contains(t, envs, "CODER_WORKSPACE_NAME=test-workspace")
assert.Contains(t, envs, "CODER_WORKSPACE_OWNER_NAME=test-user")
assert.Contains(t, envs, "CODER_WORKSPACE_PARENT_AGENT_NAME=test-parent-agent")
assert.Contains(t, envs, "CODER_URL=test-subagent-url")
assert.Contains(t, envs, "CONTAINER_ID=test-container-id")
// Second call should have feature envs from the first config read.
assert.Contains(t, envs, "FEATURE_CODE_SERVER_OPTION_PORT=9090")
assert.Contains(t, envs, "FEATURE_DOCKER_IN_DOCKER_OPTION_MOBY=false")
return nil
})
// Wait until the ticker has been registered.
tickerTrap.MustWait(ctx).MustRelease(ctx)
tickerTrap.Close()
// Verify agent was created successfully
require.Len(t, fSAC.created, 1)
})
t.Run("CommandEnv", func(t *testing.T) {
t.Parallel()
@@ -2045,6 +2228,7 @@ func TestAPI(t *testing.T) {
agentcontainers.WithExecer(fakeExec),
agentcontainers.WithCommandEnv(commandEnv),
)
api.Start()
defer api.Close()
// Call RefreshContainers directly to trigger CommandEnv usage.
@@ -2134,12 +2318,16 @@ func TestAPI(t *testing.T) {
agentcontainers.WithWatcher(fWatcher),
agentcontainers.WithClock(mClock),
)
api.Start()
defer func() {
close(fakeSAC.createErrC)
close(fakeSAC.deleteErrC)
api.Close()
}()
err := api.RefreshContainers(ctx)
require.NoError(t, err, "RefreshContainers should not error")
r := chi.NewRouter()
r.Mount("/", api.Routes())
@@ -2150,7 +2338,7 @@ func TestAPI(t *testing.T) {
require.Equal(t, http.StatusOK, rec.Code)
var response codersdk.WorkspaceAgentListContainersResponse
err := json.NewDecoder(rec.Body).Decode(&response)
err = json.NewDecoder(rec.Body).Decode(&response)
require.NoError(t, err)
assert.Empty(t, response.Devcontainers, "ignored devcontainer should not be in response when ignore=true")
@@ -2334,6 +2522,7 @@ func TestSubAgentCreationWithNameRetry(t *testing.T) {
agentcontainers.WithSubAgentClient(fSAC),
agentcontainers.WithWatcher(watcher.NewNoop()),
)
api.Start()
defer api.Close()
tickerTrap.MustWait(ctx).MustRelease(ctx)
@@ -2407,3 +2596,82 @@ func fakeContainer(t *testing.T, mut ...func(*codersdk.WorkspaceAgentContainer))
}
return ct
}
func TestWithDevcontainersNameGeneration(t *testing.T) {
t.Parallel()
if runtime.GOOS == "windows" {
t.Skip("Dev Container tests are not supported on Windows")
}
devcontainers := []codersdk.WorkspaceAgentDevcontainer{
{
ID: uuid.New(),
Name: "original-name",
WorkspaceFolder: "/home/coder/foo/project",
ConfigPath: "/home/coder/foo/project/.devcontainer/devcontainer.json",
},
{
ID: uuid.New(),
Name: "another-name",
WorkspaceFolder: "/home/coder/bar/project",
ConfigPath: "/home/coder/bar/project/.devcontainer/devcontainer.json",
},
}
scripts := []codersdk.WorkspaceAgentScript{
{ID: devcontainers[0].ID, LogSourceID: uuid.New()},
{ID: devcontainers[1].ID, LogSourceID: uuid.New()},
}
logger := testutil.Logger(t)
// This should trigger the WithDevcontainers code path where names are generated
api := agentcontainers.NewAPI(logger,
agentcontainers.WithDevcontainers(devcontainers, scripts),
agentcontainers.WithContainerCLI(&fakeContainerCLI{
containers: codersdk.WorkspaceAgentListContainersResponse{
Containers: []codersdk.WorkspaceAgentContainer{
fakeContainer(t, func(c *codersdk.WorkspaceAgentContainer) {
c.ID = "some-container-id-1"
c.FriendlyName = "container-name-1"
c.Labels[agentcontainers.DevcontainerLocalFolderLabel] = "/home/coder/baz/project"
c.Labels[agentcontainers.DevcontainerConfigFileLabel] = "/home/coder/baz/project/.devcontainer/devcontainer.json"
}),
},
},
}),
agentcontainers.WithDevcontainerCLI(&fakeDevcontainerCLI{}),
agentcontainers.WithSubAgentClient(&fakeSubAgentClient{}),
agentcontainers.WithWatcher(watcher.NewNoop()),
)
defer api.Close()
api.Start()
r := chi.NewRouter()
r.Mount("/", api.Routes())
ctx := context.Background()
err := api.RefreshContainers(ctx)
require.NoError(t, err, "RefreshContainers should not error")
// Initial request returns the initial data.
req := httptest.NewRequest(http.MethodGet, "/", nil).
WithContext(ctx)
rec := httptest.NewRecorder()
r.ServeHTTP(rec, req)
require.Equal(t, http.StatusOK, rec.Code)
var response codersdk.WorkspaceAgentListContainersResponse
err = json.NewDecoder(rec.Body).Decode(&response)
require.NoError(t, err)
// Verify the devcontainers have the expected names.
require.Len(t, response.Devcontainers, 3, "should have two devcontainers")
assert.NotEqual(t, "original-name", response.Devcontainers[2].Name, "first devcontainer should not keep original name")
assert.Equal(t, "project", response.Devcontainers[2].Name, "first devcontainer should use the project folder name")
assert.NotEqual(t, "another-name", response.Devcontainers[0].Name, "second devcontainer should not keep original name")
assert.Equal(t, "bar-project", response.Devcontainers[0].Name, "second devcontainer should has a collision and uses the folder name with a prefix")
assert.Equal(t, "baz-project", response.Devcontainers[1].Name, "third devcontainer should use the folder name with a prefix since it collides with the first two")
}
+9 -39
View File
@@ -2,10 +2,10 @@ package agentcontainers
import (
"context"
"fmt"
"os"
"path/filepath"
"strings"
"github.com/google/uuid"
"cdr.dev/slog"
"github.com/coder/coder/v2/codersdk"
@@ -18,37 +18,25 @@ const (
// DevcontainerConfigFileLabel is the label that contains the path to
// the devcontainer.json configuration file.
DevcontainerConfigFileLabel = "devcontainer.config_file"
// DevcontainerIsTestRunLabel is set if the devcontainer is part of a test
// and should be excluded.
DevcontainerIsTestRunLabel = "devcontainer.is_test_run"
// The default workspace folder inside the devcontainer.
DevcontainerDefaultContainerWorkspaceFolder = "/workspaces"
)
const devcontainerUpScriptTemplate = `
if ! which devcontainer > /dev/null 2>&1; then
echo "ERROR: Unable to start devcontainer, @devcontainers/cli is not installed or not found in \$PATH." 1>&2
echo "Please install @devcontainers/cli by running \"npm install -g @devcontainers/cli\" or by using the \"devcontainers-cli\" Coder module." 1>&2
exit 1
fi
devcontainer up %s
`
// ExtractAndInitializeDevcontainerScripts extracts devcontainer scripts from
// the given scripts and devcontainers. The devcontainer scripts are removed
// from the returned scripts so that they can be run separately.
//
// Dev Containers have an inherent dependency on start scripts, since they
// initialize the workspace (e.g. git clone, npm install, etc). This is
// important if e.g. a Coder module to install @devcontainer/cli is used.
func ExtractAndInitializeDevcontainerScripts(
func ExtractDevcontainerScripts(
devcontainers []codersdk.WorkspaceAgentDevcontainer,
scripts []codersdk.WorkspaceAgentScript,
) (filteredScripts []codersdk.WorkspaceAgentScript, devcontainerScripts []codersdk.WorkspaceAgentScript) {
) (filteredScripts []codersdk.WorkspaceAgentScript, devcontainerScripts map[uuid.UUID]codersdk.WorkspaceAgentScript) {
devcontainerScripts = make(map[uuid.UUID]codersdk.WorkspaceAgentScript)
ScriptLoop:
for _, script := range scripts {
for _, dc := range devcontainers {
// The devcontainer scripts match the devcontainer ID for
// identification.
if script.ID == dc.ID {
devcontainerScripts = append(devcontainerScripts, devcontainerStartupScript(dc, script))
devcontainerScripts[dc.ID] = script
continue ScriptLoop
}
}
@@ -59,24 +47,6 @@ ScriptLoop:
return filteredScripts, devcontainerScripts
}
func devcontainerStartupScript(dc codersdk.WorkspaceAgentDevcontainer, script codersdk.WorkspaceAgentScript) codersdk.WorkspaceAgentScript {
args := []string{
"--log-format json",
fmt.Sprintf("--workspace-folder %q", dc.WorkspaceFolder),
}
if dc.ConfigPath != "" {
args = append(args, fmt.Sprintf("--config %q", dc.ConfigPath))
}
cmd := fmt.Sprintf(devcontainerUpScriptTemplate, strings.Join(args, " "))
// Force the script to run in /bin/sh, since some shells (e.g. fish)
// don't support the script.
script.Script = fmt.Sprintf("/bin/sh -c '%s'", cmd)
// Disable RunOnStart, scripts have this set so that when devcontainers
// have not been enabled, a warning will be surfaced in the agent logs.
script.RunOnStart = false
return script
}
// ExpandAllDevcontainerPaths expands all devcontainer paths in the given
// devcontainers. This is required by the devcontainer CLI, which requires
// absolute paths for the workspace folder and config path.
-274
View File
@@ -1,274 +0,0 @@
package agentcontainers_test
import (
"path/filepath"
"strings"
"testing"
"github.com/google/go-cmp/cmp"
"github.com/google/go-cmp/cmp/cmpopts"
"github.com/google/uuid"
"github.com/stretchr/testify/require"
"cdr.dev/slog/sloggers/slogtest"
"github.com/coder/coder/v2/agent/agentcontainers"
"github.com/coder/coder/v2/codersdk"
)
func TestExtractAndInitializeDevcontainerScripts(t *testing.T) {
t.Parallel()
scriptIDs := []uuid.UUID{uuid.New(), uuid.New()}
devcontainerIDs := []uuid.UUID{uuid.New(), uuid.New()}
type args struct {
expandPath func(string) (string, error)
devcontainers []codersdk.WorkspaceAgentDevcontainer
scripts []codersdk.WorkspaceAgentScript
}
tests := []struct {
name string
args args
wantFilteredScripts []codersdk.WorkspaceAgentScript
wantDevcontainerScripts []codersdk.WorkspaceAgentScript
skipOnWindowsDueToPathSeparator bool
}{
{
name: "no scripts",
args: args{
expandPath: nil,
devcontainers: nil,
scripts: nil,
},
wantFilteredScripts: nil,
wantDevcontainerScripts: nil,
},
{
name: "no devcontainers",
args: args{
expandPath: nil,
devcontainers: nil,
scripts: []codersdk.WorkspaceAgentScript{
{ID: scriptIDs[0]},
{ID: scriptIDs[1]},
},
},
wantFilteredScripts: []codersdk.WorkspaceAgentScript{
{ID: scriptIDs[0]},
{ID: scriptIDs[1]},
},
wantDevcontainerScripts: nil,
},
{
name: "no scripts match devcontainers",
args: args{
expandPath: nil,
devcontainers: []codersdk.WorkspaceAgentDevcontainer{
{ID: devcontainerIDs[0]},
{ID: devcontainerIDs[1]},
},
scripts: []codersdk.WorkspaceAgentScript{
{ID: scriptIDs[0]},
{ID: scriptIDs[1]},
},
},
wantFilteredScripts: []codersdk.WorkspaceAgentScript{
{ID: scriptIDs[0]},
{ID: scriptIDs[1]},
},
wantDevcontainerScripts: nil,
},
{
name: "scripts match devcontainers and sets RunOnStart=false",
args: args{
expandPath: nil,
devcontainers: []codersdk.WorkspaceAgentDevcontainer{
{ID: devcontainerIDs[0], WorkspaceFolder: "workspace1"},
{ID: devcontainerIDs[1], WorkspaceFolder: "workspace2"},
},
scripts: []codersdk.WorkspaceAgentScript{
{ID: scriptIDs[0], RunOnStart: true},
{ID: scriptIDs[1], RunOnStart: true},
{ID: devcontainerIDs[0], RunOnStart: true},
{ID: devcontainerIDs[1], RunOnStart: true},
},
},
wantFilteredScripts: []codersdk.WorkspaceAgentScript{
{ID: scriptIDs[0], RunOnStart: true},
{ID: scriptIDs[1], RunOnStart: true},
},
wantDevcontainerScripts: []codersdk.WorkspaceAgentScript{
{
ID: devcontainerIDs[0],
Script: "devcontainer up --log-format json --workspace-folder \"workspace1\"",
RunOnStart: false,
},
{
ID: devcontainerIDs[1],
Script: "devcontainer up --log-format json --workspace-folder \"workspace2\"",
RunOnStart: false,
},
},
},
{
name: "scripts match devcontainers with config path",
args: args{
expandPath: nil,
devcontainers: []codersdk.WorkspaceAgentDevcontainer{
{
ID: devcontainerIDs[0],
WorkspaceFolder: "workspace1",
ConfigPath: "config1",
},
{
ID: devcontainerIDs[1],
WorkspaceFolder: "workspace2",
ConfigPath: "config2",
},
},
scripts: []codersdk.WorkspaceAgentScript{
{ID: devcontainerIDs[0]},
{ID: devcontainerIDs[1]},
},
},
wantFilteredScripts: []codersdk.WorkspaceAgentScript{},
wantDevcontainerScripts: []codersdk.WorkspaceAgentScript{
{
ID: devcontainerIDs[0],
Script: "devcontainer up --log-format json --workspace-folder \"workspace1\" --config \"workspace1/config1\"",
RunOnStart: false,
},
{
ID: devcontainerIDs[1],
Script: "devcontainer up --log-format json --workspace-folder \"workspace2\" --config \"workspace2/config2\"",
RunOnStart: false,
},
},
skipOnWindowsDueToPathSeparator: true,
},
{
name: "scripts match devcontainers with expand path",
args: args{
expandPath: func(s string) (string, error) {
return "/home/" + s, nil
},
devcontainers: []codersdk.WorkspaceAgentDevcontainer{
{
ID: devcontainerIDs[0],
WorkspaceFolder: "workspace1",
ConfigPath: "config1",
},
{
ID: devcontainerIDs[1],
WorkspaceFolder: "workspace2",
ConfigPath: "config2",
},
},
scripts: []codersdk.WorkspaceAgentScript{
{ID: devcontainerIDs[0], RunOnStart: true},
{ID: devcontainerIDs[1], RunOnStart: true},
},
},
wantFilteredScripts: []codersdk.WorkspaceAgentScript{},
wantDevcontainerScripts: []codersdk.WorkspaceAgentScript{
{
ID: devcontainerIDs[0],
Script: "devcontainer up --log-format json --workspace-folder \"/home/workspace1\" --config \"/home/workspace1/config1\"",
RunOnStart: false,
},
{
ID: devcontainerIDs[1],
Script: "devcontainer up --log-format json --workspace-folder \"/home/workspace2\" --config \"/home/workspace2/config2\"",
RunOnStart: false,
},
},
skipOnWindowsDueToPathSeparator: true,
},
{
name: "expand config path when ~",
args: args{
expandPath: func(s string) (string, error) {
s = strings.Replace(s, "~/", "", 1)
if filepath.IsAbs(s) {
return s, nil
}
return "/home/" + s, nil
},
devcontainers: []codersdk.WorkspaceAgentDevcontainer{
{
ID: devcontainerIDs[0],
WorkspaceFolder: "workspace1",
ConfigPath: "~/config1",
},
{
ID: devcontainerIDs[1],
WorkspaceFolder: "workspace2",
ConfigPath: "/config2",
},
},
scripts: []codersdk.WorkspaceAgentScript{
{ID: devcontainerIDs[0], RunOnStart: true},
{ID: devcontainerIDs[1], RunOnStart: true},
},
},
wantFilteredScripts: []codersdk.WorkspaceAgentScript{},
wantDevcontainerScripts: []codersdk.WorkspaceAgentScript{
{
ID: devcontainerIDs[0],
Script: "devcontainer up --log-format json --workspace-folder \"/home/workspace1\" --config \"/home/config1\"",
RunOnStart: false,
},
{
ID: devcontainerIDs[1],
Script: "devcontainer up --log-format json --workspace-folder \"/home/workspace2\" --config \"/config2\"",
RunOnStart: false,
},
},
skipOnWindowsDueToPathSeparator: true,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
t.Parallel()
if tt.skipOnWindowsDueToPathSeparator && filepath.Separator == '\\' {
t.Skip("Skipping test on Windows due to path separator difference.")
}
logger := slogtest.Make(t, nil)
if tt.args.expandPath == nil {
tt.args.expandPath = func(s string) (string, error) {
return s, nil
}
}
gotFilteredScripts, gotDevcontainerScripts := agentcontainers.ExtractAndInitializeDevcontainerScripts(
agentcontainers.ExpandAllDevcontainerPaths(logger, tt.args.expandPath, tt.args.devcontainers),
tt.args.scripts,
)
if diff := cmp.Diff(tt.wantFilteredScripts, gotFilteredScripts, cmpopts.EquateEmpty()); diff != "" {
t.Errorf("ExtractAndInitializeDevcontainerScripts() gotFilteredScripts mismatch (-want +got):\n%s", diff)
}
// Preprocess the devcontainer scripts to remove scripting part.
for i := range gotDevcontainerScripts {
gotDevcontainerScripts[i].Script = textGrep("devcontainer up", gotDevcontainerScripts[i].Script)
require.NotEmpty(t, gotDevcontainerScripts[i].Script, "devcontainer up script not found")
}
if diff := cmp.Diff(tt.wantDevcontainerScripts, gotDevcontainerScripts); diff != "" {
t.Errorf("ExtractAndInitializeDevcontainerScripts() gotDevcontainerScripts mismatch (-want +got):\n%s", diff)
}
})
}
}
// textGrep returns matching lines from multiline string.
func textGrep(want, got string) (filtered string) {
var lines []string
for _, line := range strings.Split(got, "\n") {
if strings.Contains(line, want) {
lines = append(lines, line)
}
}
return strings.Join(lines, "\n")
}
+96 -31
View File
@@ -6,7 +6,10 @@ import (
"context"
"encoding/json"
"errors"
"fmt"
"io"
"slices"
"strings"
"golang.org/x/xerrors"
@@ -26,12 +29,55 @@ type DevcontainerConfig struct {
type DevcontainerMergedConfiguration struct {
Customizations DevcontainerMergedCustomizations `json:"customizations,omitempty"`
Features DevcontainerFeatures `json:"features,omitempty"`
}
type DevcontainerMergedCustomizations struct {
Coder []CoderCustomization `json:"coder,omitempty"`
}
type DevcontainerFeatures map[string]any
// OptionsAsEnvs converts the DevcontainerFeatures into a list of
// environment variables that can be used to set feature options.
// The format is FEATURE_<FEATURE_NAME>_OPTION_<OPTION_NAME>=<value>.
// For example, if the feature is:
//
// "ghcr.io/coder/devcontainer-features/code-server:1": {
// "port": 9090,
// }
//
// It will produce:
//
// FEATURE_CODE_SERVER_OPTION_PORT=9090
//
// Note that the feature name is derived from the last part of the key,
// so "ghcr.io/coder/devcontainer-features/code-server:1" becomes
// "CODE_SERVER". The version part (e.g. ":1") is removed, and dashes in
// the feature and option names are replaced with underscores.
func (f DevcontainerFeatures) OptionsAsEnvs() []string {
var env []string
for k, v := range f {
vv, ok := v.(map[string]any)
if !ok {
continue
}
// Take the last part of the key as the feature name/path.
k = k[strings.LastIndex(k, "/")+1:]
// Remove ":" and anything following it.
if idx := strings.Index(k, ":"); idx != -1 {
k = k[:idx]
}
k = strings.ReplaceAll(k, "-", "_")
for k2, v2 := range vv {
k2 = strings.ReplaceAll(k2, "-", "_")
env = append(env, fmt.Sprintf("FEATURE_%s_OPTION_%s=%s", strings.ToUpper(k), strings.ToUpper(k2), fmt.Sprintf("%v", v2)))
}
}
slices.Sort(env)
return env
}
type DevcontainerConfiguration struct {
Customizations DevcontainerCustomizations `json:"customizations,omitempty"`
}
@@ -140,7 +186,7 @@ func WithReadConfigOutput(stdout, stderr io.Writer) DevcontainerCLIReadConfigOpt
}
func applyDevcontainerCLIUpOptions(opts []DevcontainerCLIUpOptions) devcontainerCLIUpConfig {
conf := devcontainerCLIUpConfig{}
conf := devcontainerCLIUpConfig{stdout: io.Discard, stderr: io.Discard}
for _, opt := range opts {
if opt != nil {
opt(&conf)
@@ -150,7 +196,7 @@ func applyDevcontainerCLIUpOptions(opts []DevcontainerCLIUpOptions) devcontainer
}
func applyDevcontainerCLIExecOptions(opts []DevcontainerCLIExecOptions) devcontainerCLIExecConfig {
conf := devcontainerCLIExecConfig{}
conf := devcontainerCLIExecConfig{stdout: io.Discard, stderr: io.Discard}
for _, opt := range opts {
if opt != nil {
opt(&conf)
@@ -160,7 +206,7 @@ func applyDevcontainerCLIExecOptions(opts []DevcontainerCLIExecOptions) devconta
}
func applyDevcontainerCLIReadConfigOptions(opts []DevcontainerCLIReadConfigOptions) devcontainerCLIReadConfigConfig {
conf := devcontainerCLIReadConfigConfig{}
conf := devcontainerCLIReadConfigConfig{stdout: io.Discard, stderr: io.Discard}
for _, opt := range opts {
if opt != nil {
opt(&conf)
@@ -200,17 +246,20 @@ func (d *devcontainerCLI) Up(ctx context.Context, workspaceFolder, configPath st
// Capture stdout for parsing and stream logs for both default and provided writers.
var stdoutBuf bytes.Buffer
stdoutWriters := []io.Writer{&stdoutBuf, &devcontainerCLILogWriter{ctx: ctx, logger: logger.With(slog.F("stdout", true))}}
if conf.stdout != nil {
stdoutWriters = append(stdoutWriters, conf.stdout)
}
cmd.Stdout = io.MultiWriter(stdoutWriters...)
cmd.Stdout = io.MultiWriter(
&stdoutBuf,
&devcontainerCLILogWriter{
ctx: ctx,
logger: logger.With(slog.F("stdout", true)),
writer: conf.stdout,
},
)
// Stream stderr logs and provided writer if any.
stderrWriters := []io.Writer{&devcontainerCLILogWriter{ctx: ctx, logger: logger.With(slog.F("stderr", true))}}
if conf.stderr != nil {
stderrWriters = append(stderrWriters, conf.stderr)
cmd.Stderr = &devcontainerCLILogWriter{
ctx: ctx,
logger: logger.With(slog.F("stderr", true)),
writer: conf.stderr,
}
cmd.Stderr = io.MultiWriter(stderrWriters...)
if err := cmd.Run(); err != nil {
_, err2 := parseDevcontainerCLILastLine[devcontainerCLIResult](ctx, logger, stdoutBuf.Bytes())
@@ -249,16 +298,16 @@ func (d *devcontainerCLI) Exec(ctx context.Context, workspaceFolder, configPath
args = append(args, cmdArgs...)
c := d.execer.CommandContext(ctx, "devcontainer", args...)
stdoutWriters := []io.Writer{&devcontainerCLILogWriter{ctx: ctx, logger: logger.With(slog.F("stdout", true))}}
if conf.stdout != nil {
stdoutWriters = append(stdoutWriters, conf.stdout)
}
c.Stdout = io.MultiWriter(stdoutWriters...)
stderrWriters := []io.Writer{&devcontainerCLILogWriter{ctx: ctx, logger: logger.With(slog.F("stderr", true))}}
if conf.stderr != nil {
stderrWriters = append(stderrWriters, conf.stderr)
}
c.Stderr = io.MultiWriter(stderrWriters...)
c.Stdout = io.MultiWriter(conf.stdout, &devcontainerCLILogWriter{
ctx: ctx,
logger: logger.With(slog.F("stdout", true)),
writer: io.Discard,
})
c.Stderr = io.MultiWriter(conf.stderr, &devcontainerCLILogWriter{
ctx: ctx,
logger: logger.With(slog.F("stderr", true)),
writer: io.Discard,
})
if err := c.Run(); err != nil {
return xerrors.Errorf("devcontainer exec failed: %w", err)
@@ -283,16 +332,19 @@ func (d *devcontainerCLI) ReadConfig(ctx context.Context, workspaceFolder, confi
c.Env = append(c.Env, env...)
var stdoutBuf bytes.Buffer
stdoutWriters := []io.Writer{&stdoutBuf, &devcontainerCLILogWriter{ctx: ctx, logger: logger.With(slog.F("stdout", true))}}
if conf.stdout != nil {
stdoutWriters = append(stdoutWriters, conf.stdout)
c.Stdout = io.MultiWriter(
&stdoutBuf,
&devcontainerCLILogWriter{
ctx: ctx,
logger: logger.With(slog.F("stdout", true)),
writer: conf.stdout,
},
)
c.Stderr = &devcontainerCLILogWriter{
ctx: ctx,
logger: logger.With(slog.F("stderr", true)),
writer: conf.stderr,
}
c.Stdout = io.MultiWriter(stdoutWriters...)
stderrWriters := []io.Writer{&devcontainerCLILogWriter{ctx: ctx, logger: logger.With(slog.F("stderr", true))}}
if conf.stderr != nil {
stderrWriters = append(stderrWriters, conf.stderr)
}
c.Stderr = io.MultiWriter(stderrWriters...)
if err := c.Run(); err != nil {
return DevcontainerConfig{}, xerrors.Errorf("devcontainer read-configuration failed: %w", err)
@@ -385,6 +437,7 @@ type devcontainerCLIJSONLogLine struct {
type devcontainerCLILogWriter struct {
ctx context.Context
logger slog.Logger
writer io.Writer
}
func (l *devcontainerCLILogWriter) Write(p []byte) (n int, err error) {
@@ -405,8 +458,20 @@ func (l *devcontainerCLILogWriter) Write(p []byte) (n int, err error) {
}
if logLine.Level >= 3 {
l.logger.Info(l.ctx, "@devcontainer/cli", slog.F("line", string(line)))
_, _ = l.writer.Write([]byte(strings.TrimSpace(logLine.Text) + "\n"))
continue
}
// If we've successfully parsed the final log line, it will successfully parse
// but will not fill out any of the fields for `logLine`. In this scenario we
// assume it is the final log line, unmarshal it as that, and check if the
// outcome is a non-empty string.
if logLine.Level == 0 {
var lastLine devcontainerCLIResult
if err := json.Unmarshal(line, &lastLine); err == nil && lastLine.Outcome != "" {
_, _ = l.writer.Write(line)
_, _ = l.writer.Write([]byte{'\n'})
}
}
l.logger.Debug(l.ctx, "@devcontainer/cli", slog.F("line", string(line)))
}
if err := s.Err(); err != nil {
+112 -1
View File
@@ -3,6 +3,7 @@ package agentcontainers_test
import (
"bytes"
"context"
"encoding/json"
"errors"
"flag"
"fmt"
@@ -10,9 +11,11 @@ import (
"os"
"os/exec"
"path/filepath"
"runtime"
"strings"
"testing"
"github.com/google/go-cmp/cmp"
"github.com/ory/dockertest/v3"
"github.com/ory/dockertest/v3/docker"
"github.com/stretchr/testify/assert"
@@ -341,6 +344,10 @@ func TestDevcontainerCLI_WithOutput(t *testing.T) {
t.Run("Up", func(t *testing.T) {
t.Parallel()
if runtime.GOOS == "windows" {
t.Skip("Windows uses CRLF line endings, golden file is LF")
}
// Buffers to capture stdout and stderr.
outBuf := &bytes.Buffer{}
errBuf := &bytes.Buffer{}
@@ -363,7 +370,7 @@ func TestDevcontainerCLI_WithOutput(t *testing.T) {
require.NotEmpty(t, containerID, "expected non-empty container ID")
// Read expected log content.
expLog, err := os.ReadFile(filepath.Join("testdata", "devcontainercli", "parse", "up.log"))
expLog, err := os.ReadFile(filepath.Join("testdata", "devcontainercli", "parse", "up.golden"))
require.NoError(t, err, "reading expected log file")
// Verify stdout buffer contains the CLI logs and stderr is empty.
@@ -637,3 +644,107 @@ func removeDevcontainerByID(t *testing.T, pool *dockertest.Pool, id string) {
assert.NoError(t, err, "remove container failed")
}
}
func TestDevcontainerFeatures_OptionsAsEnvs(t *testing.T) {
t.Parallel()
realConfigJSON := `{
"mergedConfiguration": {
"features": {
"./code-server": {
"port": 9090
},
"ghcr.io/devcontainers/features/docker-in-docker:2": {
"moby": "false"
}
}
}
}`
var realConfig agentcontainers.DevcontainerConfig
err := json.Unmarshal([]byte(realConfigJSON), &realConfig)
require.NoError(t, err, "unmarshal JSON payload")
tests := []struct {
name string
features agentcontainers.DevcontainerFeatures
want []string
}{
{
name: "code-server feature",
features: agentcontainers.DevcontainerFeatures{
"./code-server": map[string]any{
"port": 9090,
},
},
want: []string{
"FEATURE_CODE_SERVER_OPTION_PORT=9090",
},
},
{
name: "docker-in-docker feature",
features: agentcontainers.DevcontainerFeatures{
"ghcr.io/devcontainers/features/docker-in-docker:2": map[string]any{
"moby": "false",
},
},
want: []string{
"FEATURE_DOCKER_IN_DOCKER_OPTION_MOBY=false",
},
},
{
name: "multiple features with multiple options",
features: agentcontainers.DevcontainerFeatures{
"./code-server": map[string]any{
"port": 9090,
"password": "secret",
},
"ghcr.io/devcontainers/features/docker-in-docker:2": map[string]any{
"moby": "false",
"docker-dash-compose-version": "v2",
},
},
want: []string{
"FEATURE_CODE_SERVER_OPTION_PASSWORD=secret",
"FEATURE_CODE_SERVER_OPTION_PORT=9090",
"FEATURE_DOCKER_IN_DOCKER_OPTION_DOCKER_DASH_COMPOSE_VERSION=v2",
"FEATURE_DOCKER_IN_DOCKER_OPTION_MOBY=false",
},
},
{
name: "feature with non-map value (should be ignored)",
features: agentcontainers.DevcontainerFeatures{
"./code-server": map[string]any{
"port": 9090,
},
"./invalid-feature": "not-a-map",
},
want: []string{
"FEATURE_CODE_SERVER_OPTION_PORT=9090",
},
},
{
name: "real config example",
features: realConfig.MergedConfiguration.Features,
want: []string{
"FEATURE_CODE_SERVER_OPTION_PORT=9090",
"FEATURE_DOCKER_IN_DOCKER_OPTION_MOBY=false",
},
},
{
name: "empty features",
features: agentcontainers.DevcontainerFeatures{},
want: nil,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
t.Parallel()
got := tt.features.OptionsAsEnvs()
if diff := cmp.Diff(tt.want, got); diff != "" {
require.Failf(t, "OptionsAsEnvs() mismatch (-want +got):\n%s", diff)
}
})
}
}
@@ -0,0 +1,64 @@
@devcontainers/cli 0.75.0. Node.js v23.9.0. darwin 24.4.0 arm64.
Resolving Feature dependencies for 'ghcr.io/devcontainers/features/docker-in-docker:2'...
Soft-dependency 'ghcr.io/devcontainers/features/common-utils' is not required. Removing from installation order...
Files to omit: ''
Run: docker buildx build --load --build-context dev_containers_feature_content_source=/var/folders/1y/cm8mblxd7_x9cljwl_jvfprh0000gn/T/devcontainercli/container-features/0.75.0-1744102171193 --build-arg _DEV_CONTAINERS_BASE_IMAGE=mcr.microsoft.com/devcontainers/javascript-node:1-18-bullseye --build-arg _DEV_CONTAINERS_IMAGE_USER=root --build-arg _DEV_CONTAINERS_FEATURE_CONTENT_SOURCE=dev_container_feature_content_temp --target dev_containers_target_stage -f /var/folders/1y/cm8mblxd7_x9cljwl_jvfprh0000gn/T/devcontainercli/container-features/0.75.0-1744102171193/Dockerfile.extended -t vsc-devcontainers-template-starter-81d8f17e32abef6d434cbb5a37fe05e5c8a6f8ccede47a61197f002dcbf60566-features /var/folders/1y/cm8mblxd7_x9cljwl_jvfprh0000gn/T/devcontainercli/empty-folder
#0 building with "orbstack" instance using docker driver
#1 [internal] load build definition from Dockerfile.extended
#1 transferring dockerfile: 3.09kB done
#1 DONE 0.0s
#2 resolve image config for docker-image://docker.io/docker/dockerfile:1.4
#2 DONE 1.3s
#3 docker-image://docker.io/docker/dockerfile:1.4@sha256:9ba7531bd80fb0a858632727cf7a112fbfd19b17e94c4e84ced81e24ef1a0dbc
#3 CACHED
#4 [internal] load .dockerignore
#4 transferring context: 2B done
#4 DONE 0.0s
#5 [internal] load metadata for mcr.microsoft.com/devcontainers/javascript-node:1-18-bullseye
#5 DONE 0.0s
#6 [context dev_containers_feature_content_source] load .dockerignore
#6 transferring dev_containers_feature_content_source: 2B done
#6 DONE 0.0s
#7 [dev_containers_feature_content_normalize 1/3] FROM mcr.microsoft.com/devcontainers/javascript-node:1-18-bullseye
#7 DONE 0.0s
#8 [context dev_containers_feature_content_source] load from client
#8 transferring dev_containers_feature_content_source: 82.11kB 0.0s done
#8 DONE 0.0s
#9 [dev_containers_feature_content_normalize 2/3] COPY --from=dev_containers_feature_content_source devcontainer-features.builtin.env /tmp/build-features/
#9 CACHED
#10 [dev_containers_target_stage 2/5] RUN mkdir -p /tmp/dev-container-features
#10 CACHED
#11 [dev_containers_target_stage 3/5] COPY --from=dev_containers_feature_content_normalize /tmp/build-features/ /tmp/dev-container-features
#11 CACHED
#12 [dev_containers_target_stage 4/5] RUN echo "_CONTAINER_USER_HOME=$( (command -v getent >/dev/null 2>&1 && getent passwd 'root' || grep -E '^root|^[^:]*:[^:]*:root:' /etc/passwd || true) | cut -d: -f6)" >> /tmp/dev-container-features/devcontainer-features.builtin.env && echo "_REMOTE_USER_HOME=$( (command -v getent >/dev/null 2>&1 && getent passwd 'node' || grep -E '^node|^[^:]*:[^:]*:node:' /etc/passwd || true) | cut -d: -f6)" >> /tmp/dev-container-features/devcontainer-features.builtin.env
#12 CACHED
#13 [dev_containers_feature_content_normalize 3/3] RUN chmod -R 0755 /tmp/build-features/
#13 CACHED
#14 [dev_containers_target_stage 5/5] RUN --mount=type=bind,from=dev_containers_feature_content_source,source=docker-in-docker_0,target=/tmp/build-features-src/docker-in-docker_0 cp -ar /tmp/build-features-src/docker-in-docker_0 /tmp/dev-container-features && chmod -R 0755 /tmp/dev-container-features/docker-in-docker_0 && cd /tmp/dev-container-features/docker-in-docker_0 && chmod +x ./devcontainer-features-install.sh && ./devcontainer-features-install.sh && rm -rf /tmp/dev-container-features/docker-in-docker_0
#14 CACHED
#15 exporting to image
#15 exporting layers done
#15 writing image sha256:275dc193c905d448ef3945e3fc86220cc315fe0cb41013988d6ff9f8d6ef2357 done
#15 naming to docker.io/library/vsc-devcontainers-template-starter-81d8f17e32abef6d434cbb5a37fe05e5c8a6f8ccede47a61197f002dcbf60566-features done
#15 DONE 0.0s
Run: docker buildx build --load --build-context dev_containers_feature_content_source=/var/folders/1y/cm8mblxd7_x9cljwl_jvfprh0000gn/T/devcontainercli/container-features/0.75.0-1744102171193 --build-arg _DEV_CONTAINERS_BASE_IMAGE=mcr.microsoft.com/devcontainers/javascript-node:1-18-bullseye --build-arg _DEV_CONTAINERS_IMAGE_USER=root --build-arg _DEV_CONTAINERS_FEATURE_CONTENT_SOURCE=dev_container_feature_content_temp --target dev_containers_target_stage -f /var/folders/1y/cm8mblxd7_x9cljwl_jvfprh0000gn/T/devcontainercli/container-features/0.75.0-1744102171193/Dockerfile.extended -t vsc-devcontainers-template-starter-81d8f17e32abef6d434cbb5a37fe05e5c8a6f8ccede47a61197f002dcbf60566-features /var/folders/1y/cm8mblxd7_x9cljwl_jvfprh0000gn/T/devcontainercli/empty-folder
Run: docker run --sig-proxy=false -a STDOUT -a STDERR --mount type=bind,source=/code/devcontainers-template-starter,target=/workspaces/devcontainers-template-starter,consistency=cached --mount type=volume,src=dind-var-lib-docker-0pctifo8bbg3pd06g3j5s9ae8j7lp5qfcd67m25kuahurel7v7jm,dst=/var/lib/docker -l devcontainer.local_folder=/code/devcontainers-template-starter -l devcontainer.config_file=/code/devcontainers-template-starter/.devcontainer/devcontainer.json --privileged --entrypoint /bin/sh vsc-devcontainers-template-starter-81d8f17e32abef6d434cbb5a37fe05e5c8a6f8ccede47a61197f002dcbf60566-features -c echo Container started
Container started
Not setting dockerd DNS manually.
Running the postCreateCommand from devcontainer.json...
added 1 package in 784ms
{"outcome":"success","containerId":"bc72db8d0c4c4e941bd9ffc341aee64a18d3397fd45b87cd93d4746150967ba8","remoteUser":"node","remoteWorkspaceFolder":"/workspaces/devcontainers-template-starter"}
+4 -34
View File
@@ -79,21 +79,6 @@ func New(opts Options) *Runner {
type ScriptCompletedFunc func(context.Context, *proto.WorkspaceAgentScriptCompletedRequest) (*proto.WorkspaceAgentScriptCompletedResponse, error)
type runnerScript struct {
runOnPostStart bool
codersdk.WorkspaceAgentScript
}
func toRunnerScript(scripts ...codersdk.WorkspaceAgentScript) []runnerScript {
var rs []runnerScript
for _, s := range scripts {
rs = append(rs, runnerScript{
WorkspaceAgentScript: s,
})
}
return rs
}
type Runner struct {
Options
@@ -103,7 +88,7 @@ type Runner struct {
closed chan struct{}
closeMutex sync.Mutex
cron *cron.Cron
scripts []runnerScript
scripts []codersdk.WorkspaceAgentScript
dataDir string
scriptCompleted ScriptCompletedFunc
@@ -138,19 +123,6 @@ func (r *Runner) RegisterMetrics(reg prometheus.Registerer) {
// InitOption describes an option for the runner initialization.
type InitOption func(*Runner)
// WithPostStartScripts adds scripts that should be run after the workspace
// start scripts but before the workspace is marked as started.
func WithPostStartScripts(scripts ...codersdk.WorkspaceAgentScript) InitOption {
return func(r *Runner) {
for _, s := range scripts {
r.scripts = append(r.scripts, runnerScript{
runOnPostStart: true,
WorkspaceAgentScript: s,
})
}
}
}
// Init initializes the runner with the provided scripts.
// It also schedules any scripts that have a schedule.
// This function must be called before Execute.
@@ -161,7 +133,7 @@ func (r *Runner) Init(scripts []codersdk.WorkspaceAgentScript, scriptCompleted S
return xerrors.New("init: already initialized")
}
r.initialized = true
r.scripts = toRunnerScript(scripts...)
r.scripts = scripts
r.scriptCompleted = scriptCompleted
for _, opt := range opts {
opt(r)
@@ -179,7 +151,7 @@ func (r *Runner) Init(scripts []codersdk.WorkspaceAgentScript, scriptCompleted S
}
script := script
_, err := r.cron.AddFunc(script.Cron, func() {
err := r.trackRun(r.cronCtx, script.WorkspaceAgentScript, ExecuteCronScripts)
err := r.trackRun(r.cronCtx, script, ExecuteCronScripts)
if err != nil {
r.Logger.Warn(context.Background(), "run agent script on schedule", slog.Error(err))
}
@@ -223,7 +195,6 @@ type ExecuteOption int
const (
ExecuteAllScripts ExecuteOption = iota
ExecuteStartScripts
ExecutePostStartScripts
ExecuteStopScripts
ExecuteCronScripts
)
@@ -246,7 +217,6 @@ func (r *Runner) Execute(ctx context.Context, option ExecuteOption) error {
for _, script := range r.scripts {
runScript := (option == ExecuteStartScripts && script.RunOnStart) ||
(option == ExecuteStopScripts && script.RunOnStop) ||
(option == ExecutePostStartScripts && script.runOnPostStart) ||
(option == ExecuteCronScripts && script.Cron != "") ||
option == ExecuteAllScripts
@@ -256,7 +226,7 @@ func (r *Runner) Execute(ctx context.Context, option ExecuteOption) error {
script := script
eg.Go(func() error {
err := r.trackRun(ctx, script.WorkspaceAgentScript, option)
err := r.trackRun(ctx, script, option)
if err != nil {
return xerrors.Errorf("run agent script %q: %w", script.LogSourceID, err)
}
+6 -20
View File
@@ -4,7 +4,6 @@ import (
"context"
"path/filepath"
"runtime"
"slices"
"sync"
"testing"
"time"
@@ -177,11 +176,6 @@ func TestExecuteOptions(t *testing.T) {
Script: "echo stop",
RunOnStop: true,
}
postStartScript := codersdk.WorkspaceAgentScript{
ID: uuid.New(),
LogSourceID: uuid.New(),
Script: "echo poststart",
}
regularScript := codersdk.WorkspaceAgentScript{
ID: uuid.New(),
LogSourceID: uuid.New(),
@@ -193,10 +187,9 @@ func TestExecuteOptions(t *testing.T) {
stopScript,
regularScript,
}
allScripts := append(slices.Clone(scripts), postStartScript)
scriptByID := func(t *testing.T, id uuid.UUID) codersdk.WorkspaceAgentScript {
for _, script := range allScripts {
for _, script := range scripts {
if script.ID == id {
return script
}
@@ -206,10 +199,9 @@ func TestExecuteOptions(t *testing.T) {
}
wantOutput := map[uuid.UUID]string{
startScript.ID: "start",
stopScript.ID: "stop",
postStartScript.ID: "poststart",
regularScript.ID: "regular",
startScript.ID: "start",
stopScript.ID: "stop",
regularScript.ID: "regular",
}
testCases := []struct {
@@ -220,18 +212,13 @@ func TestExecuteOptions(t *testing.T) {
{
name: "ExecuteAllScripts",
option: agentscripts.ExecuteAllScripts,
wantRun: []uuid.UUID{startScript.ID, stopScript.ID, regularScript.ID, postStartScript.ID},
wantRun: []uuid.UUID{startScript.ID, stopScript.ID, regularScript.ID},
},
{
name: "ExecuteStartScripts",
option: agentscripts.ExecuteStartScripts,
wantRun: []uuid.UUID{startScript.ID},
},
{
name: "ExecutePostStartScripts",
option: agentscripts.ExecutePostStartScripts,
wantRun: []uuid.UUID{postStartScript.ID},
},
{
name: "ExecuteStopScripts",
option: agentscripts.ExecuteStopScripts,
@@ -260,7 +247,6 @@ func TestExecuteOptions(t *testing.T) {
err := runner.Init(
scripts,
aAPI.ScriptCompleted,
agentscripts.WithPostStartScripts(postStartScript),
)
require.NoError(t, err)
@@ -274,7 +260,7 @@ func TestExecuteOptions(t *testing.T) {
"script %s should have run when using filter %s", scriptByID(t, id).Script, tc.name)
}
for _, script := range allScripts {
for _, script := range scripts {
if _, ok := gotRun[script.ID]; ok {
continue
}
+41 -1
View File
@@ -125,6 +125,7 @@ type Server struct {
listeners map[net.Listener]struct{}
conns map[net.Conn]struct{}
sessions map[ssh.Session]struct{}
processes map[*os.Process]struct{}
closing chan struct{}
// Wait for goroutines to exit, waited without
// a lock on mu but protected by closing.
@@ -183,6 +184,7 @@ func NewServer(ctx context.Context, logger slog.Logger, prometheusRegistry *prom
fs: fs,
conns: make(map[net.Conn]struct{}),
sessions: make(map[ssh.Session]struct{}),
processes: make(map[*os.Process]struct{}),
logger: logger,
config: config,
@@ -587,7 +589,10 @@ func (s *Server) startNonPTYSession(logger slog.Logger, session ssh.Session, mag
// otherwise context cancellation will not propagate properly
// and SSH server close may be delayed.
cmd.SysProcAttr = cmdSysProcAttr()
cmd.Cancel = cmdCancel(session.Context(), logger, cmd)
// to match OpenSSH, we don't actually tear a non-TTY command down, even if the session ends.
// c.f. https://github.com/coder/coder/issues/18519#issuecomment-3019118271
cmd.Cancel = nil
cmd.Stdout = session
cmd.Stderr = session.Stderr()
@@ -610,6 +615,16 @@ func (s *Server) startNonPTYSession(logger slog.Logger, session ssh.Session, mag
s.metrics.sessionErrors.WithLabelValues(magicTypeLabel, "no", "start_command").Add(1)
return xerrors.Errorf("start: %w", err)
}
// Since we don't cancel the process when the session stops, we still need to tear it down if we are closing. So
// track it here.
if !s.trackProcess(cmd.Process, true) {
// must be closing
err = cmdCancel(logger, cmd.Process)
return xerrors.Errorf("failed to track process: %w", err)
}
defer s.trackProcess(cmd.Process, false)
sigs := make(chan ssh.Signal, 1)
session.Signals(sigs)
defer func() {
@@ -1070,6 +1085,27 @@ func (s *Server) trackSession(ss ssh.Session, add bool) (ok bool) {
return true
}
// trackCommand registers the process with the server. If the server is
// closing, the process is not registered and should be closed.
//
//nolint:revive
func (s *Server) trackProcess(p *os.Process, add bool) (ok bool) {
s.mu.Lock()
defer s.mu.Unlock()
if add {
if s.closing != nil {
// Server closed.
return false
}
s.wg.Add(1)
s.processes[p] = struct{}{}
return true
}
s.wg.Done()
delete(s.processes, p)
return true
}
// Close the server and all active connections. Server can be re-used
// after Close is done.
func (s *Server) Close() error {
@@ -1109,6 +1145,10 @@ func (s *Server) Close() error {
_ = c.Close()
}
for p := range s.processes {
_ = cmdCancel(s.logger, p)
}
s.logger.Debug(ctx, "closing SSH server")
err := s.srv.Close()
+4 -6
View File
@@ -4,7 +4,7 @@ package agentssh
import (
"context"
"os/exec"
"os"
"syscall"
"cdr.dev/slog"
@@ -16,9 +16,7 @@ func cmdSysProcAttr() *syscall.SysProcAttr {
}
}
func cmdCancel(ctx context.Context, logger slog.Logger, cmd *exec.Cmd) func() error {
return func() error {
logger.Debug(ctx, "cmdCancel: sending SIGHUP to process and children", slog.F("pid", cmd.Process.Pid))
return syscall.Kill(-cmd.Process.Pid, syscall.SIGHUP)
}
func cmdCancel(logger slog.Logger, p *os.Process) error {
logger.Debug(context.Background(), "cmdCancel: sending SIGHUP to process and children", slog.F("pid", p.Pid))
return syscall.Kill(-p.Pid, syscall.SIGHUP)
}
+9 -11
View File
@@ -2,7 +2,7 @@ package agentssh
import (
"context"
"os/exec"
"os"
"syscall"
"cdr.dev/slog"
@@ -12,14 +12,12 @@ func cmdSysProcAttr() *syscall.SysProcAttr {
return &syscall.SysProcAttr{}
}
func cmdCancel(ctx context.Context, logger slog.Logger, cmd *exec.Cmd) func() error {
return func() error {
logger.Debug(ctx, "cmdCancel: killing process", slog.F("pid", cmd.Process.Pid))
// Windows doesn't support sending signals to process groups, so we
// have to kill the process directly. In the future, we may want to
// implement a more sophisticated solution for process groups on
// Windows, but for now, this is a simple way to ensure that the
// process is terminated when the context is cancelled.
return cmd.Process.Kill()
}
func cmdCancel(logger slog.Logger, p *os.Process) error {
logger.Debug(context.Background(), "cmdCancel: killing process", slog.F("pid", p.Pid))
// Windows doesn't support sending signals to process groups, so we
// have to kill the process directly. In the future, we may want to
// implement a more sophisticated solution for process groups on
// Windows, but for now, this is a simple way to ensure that the
// process is terminated when the context is cancelled.
return p.Kill()
}
+4 -40
View File
@@ -7,15 +7,11 @@ import (
"github.com/go-chi/chi/v5"
"github.com/google/uuid"
"github.com/coder/coder/v2/agent/agentcontainers"
"github.com/coder/coder/v2/agent/proto"
"github.com/coder/coder/v2/coderd/httpapi"
"github.com/coder/coder/v2/codersdk"
)
func (a *agent) apiHandler(aAPI proto.DRPCAgentClient26) (http.Handler, func() error) {
func (a *agent) apiHandler() http.Handler {
r := chi.NewRouter()
r.Get("/", func(rw http.ResponseWriter, r *http.Request) {
httpapi.Write(r.Context(), rw, http.StatusOK, codersdk.Response{
@@ -40,35 +36,8 @@ func (a *agent) apiHandler(aAPI proto.DRPCAgentClient26) (http.Handler, func() e
cacheDuration: cacheDuration,
}
if a.devcontainers {
containerAPIOpts := []agentcontainers.Option{
agentcontainers.WithExecer(a.execer),
agentcontainers.WithCommandEnv(a.sshServer.CommandEnv),
agentcontainers.WithScriptLogger(func(logSourceID uuid.UUID) agentcontainers.ScriptLogger {
return a.logSender.GetScriptLogger(logSourceID)
}),
agentcontainers.WithSubAgentClient(agentcontainers.NewSubAgentClientFromAPI(a.logger, aAPI)),
}
manifest := a.manifest.Load()
if manifest != nil {
containerAPIOpts = append(containerAPIOpts,
agentcontainers.WithManifestInfo(manifest.OwnerName, manifest.WorkspaceName),
)
if len(manifest.Devcontainers) > 0 {
containerAPIOpts = append(
containerAPIOpts,
agentcontainers.WithDevcontainers(manifest.Devcontainers, manifest.Scripts),
)
}
}
// Append after to allow the agent options to override the default options.
containerAPIOpts = append(containerAPIOpts, a.containerAPIOptions...)
containerAPI := agentcontainers.NewAPI(a.logger.Named("containers"), containerAPIOpts...)
r.Mount("/api/v0/containers", containerAPI.Routes())
a.containerAPI.Store(containerAPI)
if a.containerAPI != nil {
r.Mount("/api/v0/containers", a.containerAPI.Routes())
} else {
r.HandleFunc("/api/v0/containers", func(w http.ResponseWriter, r *http.Request) {
httpapi.Write(r.Context(), w, http.StatusForbidden, codersdk.Response{
@@ -89,12 +58,7 @@ func (a *agent) apiHandler(aAPI proto.DRPCAgentClient26) (http.Handler, func() e
r.Get("/debug/manifest", a.HandleHTTPDebugManifest)
r.Get("/debug/prometheus", promHandler.ServeHTTP)
return r, func() error {
if containerAPI := a.containerAPI.Load(); containerAPI != nil {
return containerAPI.Close()
}
return nil
}
return r
}
type listeningPortsHandler struct {
+60 -26
View File
@@ -362,11 +362,19 @@ func (*RootCmd) mcpConfigureCursor() *serpent.Command {
}
type taskReport struct {
link string
messageID int64
// link is optional.
link string
// messageID must be set if this update is from a *user* message. A user
// message only happens when interacting via the AI AgentAPI (as opposed to
// interacting with the terminal directly).
messageID *int64
// selfReported must be set if the update is directly from the AI agent
// (as opposed to the screen watcher).
selfReported bool
state codersdk.WorkspaceAppStatusState
summary string
// state must always be set.
state codersdk.WorkspaceAppStatusState
// summary is optional.
summary string
}
type mcpServer struct {
@@ -388,31 +396,48 @@ func (r *RootCmd) mcpServer() *serpent.Command {
return &serpent.Command{
Use: "server",
Handler: func(inv *serpent.Invocation) error {
// lastUserMessageID is the ID of the last *user* message that we saw. A
// user message only happens when interacting via the AI AgentAPI (as
// opposed to interacting with the terminal directly).
var lastUserMessageID int64
var lastReport taskReport
// Create a queue that skips duplicates and preserves summaries.
queue := cliutil.NewQueue[taskReport](512).WithPredicate(func(report taskReport) (taskReport, bool) {
// Use "working" status if this is a new user message. If this is not a
// new user message, and the status is "working" and not self-reported
// (meaning it came from the screen watcher), then it means one of two
// things:
// 1. The AI agent is still working, so there is nothing to update.
// 2. The AI agent stopped working, then the user has interacted with
// the terminal directly. For now, we are ignoring these updates.
// This risks missing cases where the user manually submits a new
// prompt and the AI agent becomes active and does not update itself,
// but it avoids spamming useless status updates as the user is
// typing, so the tradeoff is worth it. In the future, if we can
// reliably distinguish between user and AI agent activity, we can
// change this.
if report.messageID > lastUserMessageID {
report.state = codersdk.WorkspaceAppStatusStateWorking
} else if report.state == codersdk.WorkspaceAppStatusStateWorking && !report.selfReported {
// Avoid queuing empty statuses (this would probably indicate a
// developer error)
if report.state == "" {
return report, false
}
// If this is a user message, discard if it is not new.
if report.messageID != nil && lastReport.messageID != nil &&
*lastReport.messageID >= *report.messageID {
return report, false
}
// If this is not a user message, and the status is "working" and not
// self-reported (meaning it came from the screen watcher), then it
// means one of two things:
//
// 1. The AI agent is not working; the user is interacting with the
// terminal directly.
// 2. The AI agent is working.
//
// At the moment, we have no way to tell the difference between these
// two states. In the future, if we can reliably distinguish between
// user and AI agent activity, we can change this.
//
// If this is our first update, we assume it is the AI agent working and
// accept the update.
//
// Otherwise we discard the update. This risks missing cases where the
// user manually submits a new prompt and the AI agent becomes active
// (and does not update itself), but it avoids spamming useless status
// updates as the user is typing, so the tradeoff is worth it.
if report.messageID == nil &&
report.state == codersdk.WorkspaceAppStatusStateWorking &&
!report.selfReported && lastReport.state != "" {
return report, false
}
// Keep track of the last message ID so we can tell when a message is
// new or if it has been re-emitted.
if report.messageID == nil {
report.messageID = lastReport.messageID
}
// Preserve previous message and URI if there was no message.
if report.summary == "" {
report.summary = lastReport.summary
@@ -600,7 +625,8 @@ func (s *mcpServer) startWatcher(ctx context.Context, inv *serpent.Invocation) {
case agentapi.EventMessageUpdate:
if ev.Role == agentapi.RoleUser {
err := s.queue.Push(taskReport{
messageID: ev.Id,
messageID: &ev.Id,
state: codersdk.WorkspaceAppStatusStateWorking,
})
if err != nil {
cliui.Warnf(inv.Stderr, "Failed to queue update: %s", err)
@@ -650,10 +676,18 @@ func (s *mcpServer) startServer(ctx context.Context, inv *serpent.Invocation, in
// Add tool dependencies.
toolOpts := []func(*toolsdk.Deps){
toolsdk.WithTaskReporter(func(args toolsdk.ReportTaskArgs) error {
// The agent does not reliably report its status correctly. If AgentAPI
// is enabled, we will always set the status to "working" when we get an
// MCP message, and rely on the screen watcher to eventually catch the
// idle state.
state := codersdk.WorkspaceAppStatusStateWorking
if s.aiAgentAPIClient == nil {
state = codersdk.WorkspaceAppStatusState(args.State)
}
return s.queue.Push(taskReport{
link: args.Link,
selfReported: true,
state: codersdk.WorkspaceAppStatusState(args.State),
state: state,
summary: args.Summary,
})
}),
+332 -201
View File
@@ -763,220 +763,351 @@ func TestExpMcpReporter(t *testing.T) {
<-cmdDone
})
t.Run("OK", func(t *testing.T) {
t.Parallel()
makeStatusEvent := func(status agentapi.AgentStatus) *codersdk.ServerSentEvent {
return &codersdk.ServerSentEvent{
Type: ServerSentEventTypeStatusChange,
Data: agentapi.EventStatusChange{
Status: status,
},
}
}
// Create a test deployment and workspace.
client, db := coderdtest.NewWithDatabase(t, nil)
user := coderdtest.CreateFirstUser(t, client)
client, user2 := coderdtest.CreateAnotherUser(t, client, user.OrganizationID)
makeMessageEvent := func(id int64, role agentapi.ConversationRole) *codersdk.ServerSentEvent {
return &codersdk.ServerSentEvent{
Type: ServerSentEventTypeMessageUpdate,
Data: agentapi.EventMessageUpdate{
Id: id,
Role: role,
},
}
}
r := dbfake.WorkspaceBuild(t, db, database.WorkspaceTable{
OrganizationID: user.OrganizationID,
OwnerID: user2.ID,
}).WithAgent(func(a []*proto.Agent) []*proto.Agent {
a[0].Apps = []*proto.App{
type test struct {
// event simulates an event from the screen watcher.
event *codersdk.ServerSentEvent
// state, summary, and uri simulate a tool call from the AI agent.
state codersdk.WorkspaceAppStatusState
summary string
uri string
expected *codersdk.WorkspaceAppStatus
}
runs := []struct {
name string
tests []test
disableAgentAPI bool
}{
// In this run the AI agent starts with a state change but forgets to update
// that it finished.
{
name: "Active",
tests: []test{
// First the AI agent updates with a state change.
{
Slug: "vscode",
state: codersdk.WorkspaceAppStatusStateWorking,
summary: "doing work",
uri: "https://dev.coder.com",
expected: &codersdk.WorkspaceAppStatus{
State: codersdk.WorkspaceAppStatusStateWorking,
Message: "doing work",
URI: "https://dev.coder.com",
},
},
}
return a
}).Do()
makeStatusEvent := func(status agentapi.AgentStatus) *codersdk.ServerSentEvent {
return &codersdk.ServerSentEvent{
Type: ServerSentEventTypeStatusChange,
Data: agentapi.EventStatusChange{
Status: status,
// Terminal goes quiet but the AI agent forgot the update, and it is
// caught by the screen watcher. Message and URI are preserved.
{
event: makeStatusEvent(agentapi.StatusStable),
expected: &codersdk.WorkspaceAppStatus{
State: codersdk.WorkspaceAppStatusStateIdle,
Message: "doing work",
URI: "https://dev.coder.com",
},
},
}
}
makeMessageEvent := func(id int64, role agentapi.ConversationRole) *codersdk.ServerSentEvent {
return &codersdk.ServerSentEvent{
Type: ServerSentEventTypeMessageUpdate,
Data: agentapi.EventMessageUpdate{
Id: id,
Role: role,
// A stable update now from the watcher should be discarded, as it is a
// duplicate.
{
event: makeStatusEvent(agentapi.StatusStable),
},
}
}
// Terminal becomes active again according to the screen watcher, but no
// new user message. This could be the AI agent being active again, but
// it could also be the user messing around. We will prefer not updating
// the status so the "working" update here should be skipped.
//
// TODO: How do we test the no-op updates? This update is skipped
// because of the logic mentioned above, but how do we prove this update
// was skipped because of that and not that the next update was skipped
// because it is a duplicate state? We could mock the queue?
{
event: makeStatusEvent(agentapi.StatusRunning),
},
// Agent messages are ignored.
{
event: makeMessageEvent(0, agentapi.RoleAgent),
},
// The watcher reports the screen is active again...
{
event: makeStatusEvent(agentapi.StatusRunning),
},
// ... but this time we have a new user message so we know there is AI
// agent activity. This time the "working" update will not be skipped.
{
event: makeMessageEvent(1, agentapi.RoleUser),
expected: &codersdk.WorkspaceAppStatus{
State: codersdk.WorkspaceAppStatusStateWorking,
Message: "doing work",
URI: "https://dev.coder.com",
},
},
// Watcher reports stable again.
{
event: makeStatusEvent(agentapi.StatusStable),
expected: &codersdk.WorkspaceAppStatus{
State: codersdk.WorkspaceAppStatusStateIdle,
Message: "doing work",
URI: "https://dev.coder.com",
},
},
},
},
// In this run the AI agent never sends any state changes.
{
name: "Inactive",
tests: []test{
// The "working" status from the watcher should be accepted, even though
// there is no new user message, because it is the first update.
{
event: makeStatusEvent(agentapi.StatusRunning),
expected: &codersdk.WorkspaceAppStatus{
State: codersdk.WorkspaceAppStatusStateWorking,
Message: "",
URI: "",
},
},
// Stable update should be accepted.
{
event: makeStatusEvent(agentapi.StatusStable),
expected: &codersdk.WorkspaceAppStatus{
State: codersdk.WorkspaceAppStatusStateIdle,
Message: "",
URI: "",
},
},
// Zero ID should be accepted.
{
event: makeMessageEvent(0, agentapi.RoleUser),
expected: &codersdk.WorkspaceAppStatus{
State: codersdk.WorkspaceAppStatusStateWorking,
Message: "",
URI: "",
},
},
// Stable again.
{
event: makeStatusEvent(agentapi.StatusStable),
expected: &codersdk.WorkspaceAppStatus{
State: codersdk.WorkspaceAppStatusStateIdle,
Message: "",
URI: "",
},
},
// Next ID.
{
event: makeMessageEvent(1, agentapi.RoleUser),
expected: &codersdk.WorkspaceAppStatus{
State: codersdk.WorkspaceAppStatusStateWorking,
Message: "",
URI: "",
},
},
},
},
// We ignore the state from the agent and assume "working".
{
name: "IgnoreAgentState",
// AI agent reports that it is finished but the summary says it is doing
// work.
tests: []test{
{
state: codersdk.WorkspaceAppStatusStateIdle,
summary: "doing work",
expected: &codersdk.WorkspaceAppStatus{
State: codersdk.WorkspaceAppStatusStateWorking,
Message: "doing work",
},
},
// AI agent reports finished again, with a matching summary. We still
// assume it is working.
{
state: codersdk.WorkspaceAppStatusStateIdle,
summary: "finished",
expected: &codersdk.WorkspaceAppStatus{
State: codersdk.WorkspaceAppStatusStateWorking,
Message: "finished",
},
},
// Once the watcher reports stable, then we record idle.
{
event: makeStatusEvent(agentapi.StatusStable),
expected: &codersdk.WorkspaceAppStatus{
State: codersdk.WorkspaceAppStatusStateIdle,
Message: "finished",
},
},
},
},
// When AgentAPI is not being used, we accept agent state updates as-is.
{
name: "KeepAgentState",
tests: []test{
{
state: codersdk.WorkspaceAppStatusStateWorking,
summary: "doing work",
expected: &codersdk.WorkspaceAppStatus{
State: codersdk.WorkspaceAppStatusStateWorking,
Message: "doing work",
},
},
{
state: codersdk.WorkspaceAppStatusStateIdle,
summary: "finished",
expected: &codersdk.WorkspaceAppStatus{
State: codersdk.WorkspaceAppStatusStateIdle,
Message: "finished",
},
},
},
disableAgentAPI: true,
},
}
ctx, cancel := context.WithCancel(testutil.Context(t, testutil.WaitShort))
for _, run := range runs {
run := run
t.Run(run.name, func(t *testing.T) {
t.Parallel()
// Mock the AI AgentAPI server.
listening := make(chan func(sse codersdk.ServerSentEvent) error)
srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
send, closed, err := httpapi.ServerSentEventSender(w, r)
if err != nil {
httpapi.Write(ctx, w, http.StatusInternalServerError, codersdk.Response{
Message: "Internal error setting up server-sent events.",
Detail: err.Error(),
})
return
}
// Send initial message.
send(*makeMessageEvent(0, agentapi.RoleAgent))
listening <- send
<-closed
}))
t.Cleanup(srv.Close)
aiAgentAPIURL := srv.URL
ctx, cancel := context.WithCancel(testutil.Context(t, testutil.WaitShort))
// Watch the workspace for changes.
watcher, err := client.WatchWorkspace(ctx, r.Workspace.ID)
require.NoError(t, err)
var lastAppStatus codersdk.WorkspaceAppStatus
nextUpdate := func() codersdk.WorkspaceAppStatus {
for {
select {
case <-ctx.Done():
require.FailNow(t, "timed out waiting for status update")
case w, ok := <-watcher:
require.True(t, ok, "watch channel closed")
if w.LatestAppStatus != nil && w.LatestAppStatus.ID != lastAppStatus.ID {
lastAppStatus = *w.LatestAppStatus
return lastAppStatus
// Create a test deployment and workspace.
client, db := coderdtest.NewWithDatabase(t, nil)
user := coderdtest.CreateFirstUser(t, client)
client, user2 := coderdtest.CreateAnotherUser(t, client, user.OrganizationID)
r := dbfake.WorkspaceBuild(t, db, database.WorkspaceTable{
OrganizationID: user.OrganizationID,
OwnerID: user2.ID,
}).WithAgent(func(a []*proto.Agent) []*proto.Agent {
a[0].Apps = []*proto.App{
{
Slug: "vscode",
},
}
return a
}).Do()
// Watch the workspace for changes.
watcher, err := client.WatchWorkspace(ctx, r.Workspace.ID)
require.NoError(t, err)
var lastAppStatus codersdk.WorkspaceAppStatus
nextUpdate := func() codersdk.WorkspaceAppStatus {
for {
select {
case <-ctx.Done():
require.FailNow(t, "timed out waiting for status update")
case w, ok := <-watcher:
require.True(t, ok, "watch channel closed")
if w.LatestAppStatus != nil && w.LatestAppStatus.ID != lastAppStatus.ID {
t.Logf("Got status update: %s > %s", lastAppStatus.State, w.LatestAppStatus.State)
lastAppStatus = *w.LatestAppStatus
return lastAppStatus
}
}
}
}
}
inv, _ := clitest.New(t,
"exp", "mcp", "server",
// We need the agent credentials, AI AgentAPI url, and a slug for reporting.
"--agent-url", client.URL.String(),
"--agent-token", r.AgentToken,
"--app-status-slug", "vscode",
"--ai-agentapi-url", aiAgentAPIURL,
"--allowed-tools=coder_report_task",
)
inv = inv.WithContext(ctx)
pty := ptytest.New(t)
inv.Stdin = pty.Input()
inv.Stdout = pty.Output()
stderr := ptytest.New(t)
inv.Stderr = stderr.Output()
// Run the MCP server.
cmdDone := make(chan struct{})
go func() {
defer close(cmdDone)
err := inv.Run()
assert.NoError(t, err)
}()
// Initialize.
payload := `{"jsonrpc":"2.0","id":1,"method":"initialize"}`
pty.WriteLine(payload)
_ = pty.ReadLine(ctx) // ignore echo
_ = pty.ReadLine(ctx) // ignore init response
sender := <-listening
tests := []struct {
// event simulates an event from the screen watcher.
event *codersdk.ServerSentEvent
// state, summary, and uri simulate a tool call from the AI agent.
state codersdk.WorkspaceAppStatusState
summary string
uri string
expected *codersdk.WorkspaceAppStatus
}{
// First the AI agent updates with a state change.
{
state: codersdk.WorkspaceAppStatusStateWorking,
summary: "doing work",
uri: "https://dev.coder.com",
expected: &codersdk.WorkspaceAppStatus{
State: codersdk.WorkspaceAppStatusStateWorking,
Message: "doing work",
URI: "https://dev.coder.com",
},
},
// Terminal goes quiet but the AI agent forgot the update, and it is
// caught by the screen watcher. Message and URI are preserved.
{
event: makeStatusEvent(agentapi.StatusStable),
expected: &codersdk.WorkspaceAppStatus{
State: codersdk.WorkspaceAppStatusStateIdle,
Message: "doing work",
URI: "https://dev.coder.com",
},
},
// A completed update at this point from the watcher should be discarded.
{
event: makeStatusEvent(agentapi.StatusStable),
},
// Terminal becomes active again according to the screen watcher, but no
// new user message. This could be the AI agent being active again, but
// it could also be the user messing around. We will prefer not updating
// the status so the "working" update here should be skipped.
{
event: makeStatusEvent(agentapi.StatusRunning),
},
// Agent messages are ignored.
{
event: makeMessageEvent(1, agentapi.RoleAgent),
},
// AI agent reports that it failed and URI is blank.
{
state: codersdk.WorkspaceAppStatusStateFailure,
summary: "oops",
expected: &codersdk.WorkspaceAppStatus{
State: codersdk.WorkspaceAppStatusStateFailure,
Message: "oops",
URI: "",
},
},
// The watcher reports the screen is active again...
{
event: makeStatusEvent(agentapi.StatusRunning),
},
// ... but this time we have a new user message so we know there is AI
// agent activity. This time the "working" update will not be skipped.
{
event: makeMessageEvent(2, agentapi.RoleUser),
expected: &codersdk.WorkspaceAppStatus{
State: codersdk.WorkspaceAppStatusStateWorking,
Message: "oops",
URI: "",
},
},
// Watcher reports stable again.
{
event: makeStatusEvent(agentapi.StatusStable),
expected: &codersdk.WorkspaceAppStatus{
State: codersdk.WorkspaceAppStatusStateIdle,
Message: "oops",
URI: "",
},
},
}
for _, test := range tests {
if test.event != nil {
err := sender(*test.event)
require.NoError(t, err)
} else {
// Call the tool and ensure it works.
payload := fmt.Sprintf(`{"jsonrpc":"2.0","id":3,"method":"tools/call", "params": {"name": "coder_report_task", "arguments": {"state": %q, "summary": %q, "link": %q}}}`, test.state, test.summary, test.uri)
pty.WriteLine(payload)
_ = pty.ReadLine(ctx) // ignore echo
output := pty.ReadLine(ctx)
require.NotEmpty(t, output, "did not receive a response from coder_report_task")
// Ensure it is valid JSON.
_, err = json.Marshal(output)
require.NoError(t, err, "did not receive valid JSON from coder_report_task")
args := []string{
"exp", "mcp", "server",
// We need the agent credentials, AI AgentAPI url (if not
// disabled), and a slug for reporting.
"--agent-url", client.URL.String(),
"--agent-token", r.AgentToken,
"--app-status-slug", "vscode",
"--allowed-tools=coder_report_task",
}
if test.expected != nil {
got := nextUpdate()
require.Equal(t, got.State, test.expected.State)
require.Equal(t, got.Message, test.expected.Message)
require.Equal(t, got.URI, test.expected.URI)
// Mock the AI AgentAPI server.
listening := make(chan func(sse codersdk.ServerSentEvent) error)
if !run.disableAgentAPI {
srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
send, closed, err := httpapi.ServerSentEventSender(w, r)
if err != nil {
httpapi.Write(ctx, w, http.StatusInternalServerError, codersdk.Response{
Message: "Internal error setting up server-sent events.",
Detail: err.Error(),
})
return
}
// Send initial message.
send(*makeMessageEvent(0, agentapi.RoleAgent))
listening <- send
<-closed
}))
t.Cleanup(srv.Close)
aiAgentAPIURL := srv.URL
args = append(args, "--ai-agentapi-url", aiAgentAPIURL)
}
}
cancel()
<-cmdDone
})
inv, _ := clitest.New(t, args...)
inv = inv.WithContext(ctx)
pty := ptytest.New(t)
inv.Stdin = pty.Input()
inv.Stdout = pty.Output()
stderr := ptytest.New(t)
inv.Stderr = stderr.Output()
// Run the MCP server.
cmdDone := make(chan struct{})
go func() {
defer close(cmdDone)
err := inv.Run()
assert.NoError(t, err)
}()
// Initialize.
payload := `{"jsonrpc":"2.0","id":1,"method":"initialize"}`
pty.WriteLine(payload)
_ = pty.ReadLine(ctx) // ignore echo
_ = pty.ReadLine(ctx) // ignore init response
var sender func(sse codersdk.ServerSentEvent) error
if !run.disableAgentAPI {
sender = <-listening
}
for _, test := range run.tests {
if test.event != nil {
err := sender(*test.event)
require.NoError(t, err)
} else {
// Call the tool and ensure it works.
payload := fmt.Sprintf(`{"jsonrpc":"2.0","id":3,"method":"tools/call", "params": {"name": "coder_report_task", "arguments": {"state": %q, "summary": %q, "link": %q}}}`, test.state, test.summary, test.uri)
pty.WriteLine(payload)
_ = pty.ReadLine(ctx) // ignore echo
output := pty.ReadLine(ctx)
require.NotEmpty(t, output, "did not receive a response from coder_report_task")
// Ensure it is valid JSON.
_, err = json.Marshal(output)
require.NoError(t, err, "did not receive valid JSON from coder_report_task")
}
if test.expected != nil {
got := nextUpdate()
require.Equal(t, got.State, test.expected.State)
require.Equal(t, got.Message, test.expected.Message)
require.Equal(t, got.URI, test.expected.URI)
}
}
cancel()
<-cmdDone
})
}
}
-89
View File
@@ -61,7 +61,6 @@ import (
"github.com/coder/serpent"
"github.com/coder/wgtunnel/tunnelsdk"
"github.com/coder/coder/v2/coderd/ai"
"github.com/coder/coder/v2/coderd/entitlements"
"github.com/coder/coder/v2/coderd/notifications/reports"
"github.com/coder/coder/v2/coderd/runtimeconfig"
@@ -611,22 +610,6 @@ func (r *RootCmd) Server(newAPI func(context.Context, *coderd.Options) (*coderd.
)
}
aiProviders, err := ReadAIProvidersFromEnv(os.Environ())
if err != nil {
return xerrors.Errorf("read ai providers from env: %w", err)
}
vals.AI.Value.Providers = append(vals.AI.Value.Providers, aiProviders...)
for _, provider := range aiProviders {
logger.Debug(
ctx, "loaded ai provider",
slog.F("type", provider.Type),
)
}
languageModels, err := ai.ModelsFromConfig(ctx, vals.AI.Value.Providers)
if err != nil {
return xerrors.Errorf("create language models: %w", err)
}
realIPConfig, err := httpmw.ParseRealIPConfig(vals.ProxyTrustedHeaders, vals.ProxyTrustedOrigins)
if err != nil {
return xerrors.Errorf("parse real ip config: %w", err)
@@ -657,7 +640,6 @@ func (r *RootCmd) Server(newAPI func(context.Context, *coderd.Options) (*coderd.
CacheDir: cacheDir,
GoogleTokenValidator: googleTokenValidator,
ExternalAuthConfigs: externalAuthConfigs,
LanguageModels: languageModels,
RealIPConfig: realIPConfig,
SSHKeygenAlgorithm: sshKeygenAlgorithm,
TracerProvider: tracerProvider,
@@ -2642,77 +2624,6 @@ func redirectHTTPToHTTPSDeprecation(ctx context.Context, logger slog.Logger, inv
}
}
func ReadAIProvidersFromEnv(environ []string) ([]codersdk.AIProviderConfig, error) {
// The index numbers must be in-order.
sort.Strings(environ)
var providers []codersdk.AIProviderConfig
for _, v := range serpent.ParseEnviron(environ, "CODER_AI_PROVIDER_") {
tokens := strings.SplitN(v.Name, "_", 2)
if len(tokens) != 2 {
return nil, xerrors.Errorf("invalid env var: %s", v.Name)
}
providerNum, err := strconv.Atoi(tokens[0])
if err != nil {
return nil, xerrors.Errorf("parse number: %s", v.Name)
}
var provider codersdk.AIProviderConfig
switch {
case len(providers) < providerNum:
return nil, xerrors.Errorf(
"provider num %v skipped: %s",
len(providers),
v.Name,
)
case len(providers) == providerNum:
// At the next next provider.
providers = append(providers, provider)
case len(providers) == providerNum+1:
// At the current provider.
provider = providers[providerNum]
}
key := tokens[1]
switch key {
case "TYPE":
provider.Type = v.Value
case "API_KEY":
provider.APIKey = v.Value
case "BASE_URL":
provider.BaseURL = v.Value
case "MODELS":
provider.Models = strings.Split(v.Value, ",")
}
providers[providerNum] = provider
}
for _, envVar := range environ {
tokens := strings.SplitN(envVar, "=", 2)
if len(tokens) != 2 {
continue
}
switch tokens[0] {
case "OPENAI_API_KEY":
providers = append(providers, codersdk.AIProviderConfig{
Type: "openai",
APIKey: tokens[1],
})
case "ANTHROPIC_API_KEY":
providers = append(providers, codersdk.AIProviderConfig{
Type: "anthropic",
APIKey: tokens[1],
})
case "GOOGLE_API_KEY":
providers = append(providers, codersdk.AIProviderConfig{
Type: "google",
APIKey: tokens[1],
})
}
}
return providers, nil
}
// ReadExternalAuthProvidersFromEnv is provided for compatibility purposes with
// the viper CLI.
func ReadExternalAuthProvidersFromEnv(environ []string) ([]codersdk.ExternalAuthConfig, error) {
+6
View File
@@ -677,6 +677,12 @@ workspaces stopping during the day due to template scheduling.
must be *. Only one hour and minute can be specified (ranges or comma
separated values are not supported).
WORKSPACE PREBUILDS OPTIONS:
Configure how workspace prebuilds behave.
--workspace-prebuilds-reconciliation-interval duration, $CODER_WORKSPACE_PREBUILDS_RECONCILIATION_INTERVAL (default: 15s)
How often to reconcile workspace prebuilds state.
⚠️ DANGEROUS OPTIONS:
--dangerous-allow-path-app-sharing bool, $CODER_DANGEROUS_ALLOW_PATH_APP_SHARING
Allow workspace apps that are not served from subdomains to be shared.
-3
View File
@@ -526,9 +526,6 @@ client:
# Support links to display in the top right drop down menu.
# (default: <unset>, type: struct[[]codersdk.LinkConfig])
supportLinks: []
# Configure AI providers.
# (default: <unset>, type: struct[codersdk.AIConfig])
ai: {}
# External Authentication providers.
# (default: <unset>, type: struct[[]codersdk.ExternalAuthConfig])
externalAuthProviders: []
+12 -1
View File
@@ -2,7 +2,9 @@ package agentapi
import (
"context"
"crypto/sha256"
"database/sql"
"encoding/base32"
"errors"
"fmt"
"strings"
@@ -165,11 +167,20 @@ func (a *SubAgentAPI) CreateSubAgent(ctx context.Context, req *agentproto.Create
}
}
// NOTE(DanielleMaywood):
// Slugs must be unique PER workspace/template. As of 2025-06-25,
// there is no database-layer enforcement of this constraint.
// We can get around this by creating a slug that *should* be
// unique (at least highly probable).
slugHash := sha256.Sum256([]byte(subAgent.Name + "/" + app.Slug))
slugHashEnc := base32.HexEncoding.WithPadding(base32.NoPadding).EncodeToString(slugHash[:])
computedSlug := strings.ToLower(slugHashEnc[:8]) + "-" + app.Slug
_, err := a.Database.UpsertWorkspaceApp(ctx, database.UpsertWorkspaceAppParams{
ID: uuid.New(), // NOTE: we may need to maintain the app's ID here for stability, but for now we'll leave this as-is.
CreatedAt: createdAt,
AgentID: subAgent.ID,
Slug: app.Slug,
Slug: computedSlug,
DisplayName: app.GetDisplayName(),
Icon: app.GetIcon(),
Command: sql.NullString{
+15 -15
View File
@@ -216,7 +216,7 @@ func TestSubAgentAPI(t *testing.T) {
},
expectApps: []database.WorkspaceApp{
{
Slug: "code-server",
Slug: "fdqf0lpd-code-server",
DisplayName: "VS Code",
Icon: "/icon/code.svg",
Command: sql.NullString{},
@@ -234,7 +234,7 @@ func TestSubAgentAPI(t *testing.T) {
DisplayGroup: sql.NullString{},
},
{
Slug: "vim",
Slug: "547knu0f-vim",
DisplayName: "Vim",
Icon: "/icon/vim.svg",
Command: sql.NullString{Valid: true, String: "vim"},
@@ -377,7 +377,7 @@ func TestSubAgentAPI(t *testing.T) {
},
expectApps: []database.WorkspaceApp{
{
Slug: "valid-app",
Slug: "511ctirn-valid-app",
DisplayName: "Valid App",
SharingLevel: database.AppSharingLevelOwner,
Health: database.WorkspaceAppHealthDisabled,
@@ -410,19 +410,19 @@ func TestSubAgentAPI(t *testing.T) {
},
expectApps: []database.WorkspaceApp{
{
Slug: "authenticated-app",
Slug: "atpt261l-authenticated-app",
SharingLevel: database.AppSharingLevelAuthenticated,
Health: database.WorkspaceAppHealthDisabled,
OpenIn: database.WorkspaceAppOpenInSlimWindow,
},
{
Slug: "owner-app",
Slug: "eh5gp1he-owner-app",
SharingLevel: database.AppSharingLevelOwner,
Health: database.WorkspaceAppHealthDisabled,
OpenIn: database.WorkspaceAppOpenInSlimWindow,
},
{
Slug: "public-app",
Slug: "oopjevf1-public-app",
SharingLevel: database.AppSharingLevelPublic,
Health: database.WorkspaceAppHealthDisabled,
OpenIn: database.WorkspaceAppOpenInSlimWindow,
@@ -443,13 +443,13 @@ func TestSubAgentAPI(t *testing.T) {
},
expectApps: []database.WorkspaceApp{
{
Slug: "tab-app",
Slug: "ci9500rm-tab-app",
SharingLevel: database.AppSharingLevelOwner,
Health: database.WorkspaceAppHealthDisabled,
OpenIn: database.WorkspaceAppOpenInTab,
},
{
Slug: "window-app",
Slug: "p17s76re-window-app",
SharingLevel: database.AppSharingLevelOwner,
Health: database.WorkspaceAppHealthDisabled,
OpenIn: database.WorkspaceAppOpenInSlimWindow,
@@ -479,7 +479,7 @@ func TestSubAgentAPI(t *testing.T) {
},
expectApps: []database.WorkspaceApp{
{
Slug: "full-app",
Slug: "0ccdbg39-full-app",
Command: sql.NullString{Valid: true, String: "echo hello"},
DisplayName: "Full Featured App",
External: true,
@@ -507,7 +507,7 @@ func TestSubAgentAPI(t *testing.T) {
},
expectApps: []database.WorkspaceApp{
{
Slug: "no-health-app",
Slug: "nphrhbh6-no-health-app",
Health: database.WorkspaceAppHealthDisabled,
SharingLevel: database.AppSharingLevelOwner,
OpenIn: database.WorkspaceAppOpenInSlimWindow,
@@ -531,7 +531,7 @@ func TestSubAgentAPI(t *testing.T) {
},
expectApps: []database.WorkspaceApp{
{
Slug: "duplicate-app",
Slug: "uiklfckv-duplicate-app",
DisplayName: "First App",
SharingLevel: database.AppSharingLevelOwner,
Health: database.WorkspaceAppHealthDisabled,
@@ -568,14 +568,14 @@ func TestSubAgentAPI(t *testing.T) {
},
expectApps: []database.WorkspaceApp{
{
Slug: "duplicate-app",
Slug: "uiklfckv-duplicate-app",
DisplayName: "First Duplicate",
SharingLevel: database.AppSharingLevelOwner,
Health: database.WorkspaceAppHealthDisabled,
OpenIn: database.WorkspaceAppOpenInSlimWindow,
},
{
Slug: "valid-app",
Slug: "511ctirn-valid-app",
DisplayName: "Valid App",
SharingLevel: database.AppSharingLevelOwner,
Health: database.WorkspaceAppHealthDisabled,
@@ -754,7 +754,7 @@ func TestSubAgentAPI(t *testing.T) {
apps, err := db.GetWorkspaceAppsByAgentID(dbauthz.AsSystemRestricted(ctx), agentID) //nolint:gocritic // this is a test.
require.NoError(t, err)
require.Len(t, apps, 1)
require.Equal(t, "duplicate-slug", apps[0].Slug)
require.Equal(t, "k5jd7a99-duplicate-slug", apps[0].Slug)
require.Equal(t, "First Duplicate", apps[0].DisplayName)
})
})
@@ -1128,7 +1128,7 @@ func TestSubAgentAPI(t *testing.T) {
apps, err := api.Database.GetWorkspaceAppsByAgentID(dbauthz.AsSystemRestricted(ctx), agentID) //nolint:gocritic // this is a test.
require.NoError(t, err)
require.Len(t, apps, 1)
require.Equal(t, "custom-app", apps[0].Slug)
require.Equal(t, "v4qhkq17-custom-app", apps[0].Slug)
require.Equal(t, "Custom App", apps[0].DisplayName)
})
-167
View File
@@ -1,167 +0,0 @@
package ai
import (
"context"
"github.com/anthropics/anthropic-sdk-go"
anthropicoption "github.com/anthropics/anthropic-sdk-go/option"
"github.com/kylecarbs/aisdk-go"
"github.com/openai/openai-go"
openaioption "github.com/openai/openai-go/option"
"golang.org/x/xerrors"
"google.golang.org/genai"
"github.com/coder/coder/v2/codersdk"
)
type LanguageModel struct {
codersdk.LanguageModel
StreamFunc StreamFunc
}
type StreamOptions struct {
SystemPrompt string
Model string
Messages []aisdk.Message
Thinking bool
Tools []aisdk.Tool
}
type StreamFunc func(ctx context.Context, options StreamOptions) (aisdk.DataStream, error)
// LanguageModels is a map of language model ID to language model.
type LanguageModels map[string]LanguageModel
func ModelsFromConfig(ctx context.Context, configs []codersdk.AIProviderConfig) (LanguageModels, error) {
models := make(LanguageModels)
for _, config := range configs {
var streamFunc StreamFunc
switch config.Type {
case "openai":
opts := []openaioption.RequestOption{
openaioption.WithAPIKey(config.APIKey),
}
if config.BaseURL != "" {
opts = append(opts, openaioption.WithBaseURL(config.BaseURL))
}
client := openai.NewClient(opts...)
streamFunc = func(ctx context.Context, options StreamOptions) (aisdk.DataStream, error) {
openaiMessages, err := aisdk.MessagesToOpenAI(options.Messages)
if err != nil {
return nil, err
}
tools := aisdk.ToolsToOpenAI(options.Tools)
if options.SystemPrompt != "" {
openaiMessages = append([]openai.ChatCompletionMessageParamUnion{
openai.SystemMessage(options.SystemPrompt),
}, openaiMessages...)
}
return aisdk.OpenAIToDataStream(client.Chat.Completions.NewStreaming(ctx, openai.ChatCompletionNewParams{
Messages: openaiMessages,
Model: options.Model,
Tools: tools,
MaxTokens: openai.Int(8192),
})), nil
}
if config.Models == nil {
models, err := client.Models.List(ctx)
if err != nil {
return nil, err
}
config.Models = make([]string, len(models.Data))
for i, model := range models.Data {
config.Models[i] = model.ID
}
}
case "anthropic":
client := anthropic.NewClient(anthropicoption.WithAPIKey(config.APIKey))
streamFunc = func(ctx context.Context, options StreamOptions) (aisdk.DataStream, error) {
anthropicMessages, systemMessage, err := aisdk.MessagesToAnthropic(options.Messages)
if err != nil {
return nil, err
}
if options.SystemPrompt != "" {
systemMessage = []anthropic.TextBlockParam{
*anthropic.NewTextBlock(options.SystemPrompt).OfRequestTextBlock,
}
}
return aisdk.AnthropicToDataStream(client.Messages.NewStreaming(ctx, anthropic.MessageNewParams{
Messages: anthropicMessages,
Model: options.Model,
System: systemMessage,
Tools: aisdk.ToolsToAnthropic(options.Tools),
MaxTokens: 8192,
})), nil
}
if config.Models == nil {
models, err := client.Models.List(ctx, anthropic.ModelListParams{})
if err != nil {
return nil, err
}
config.Models = make([]string, len(models.Data))
for i, model := range models.Data {
config.Models[i] = model.ID
}
}
case "google":
client, err := genai.NewClient(ctx, &genai.ClientConfig{
APIKey: config.APIKey,
Backend: genai.BackendGeminiAPI,
})
if err != nil {
return nil, err
}
streamFunc = func(ctx context.Context, options StreamOptions) (aisdk.DataStream, error) {
googleMessages, err := aisdk.MessagesToGoogle(options.Messages)
if err != nil {
return nil, err
}
tools, err := aisdk.ToolsToGoogle(options.Tools)
if err != nil {
return nil, err
}
var systemInstruction *genai.Content
if options.SystemPrompt != "" {
systemInstruction = &genai.Content{
Parts: []*genai.Part{
genai.NewPartFromText(options.SystemPrompt),
},
Role: "model",
}
}
return aisdk.GoogleToDataStream(client.Models.GenerateContentStream(ctx, options.Model, googleMessages, &genai.GenerateContentConfig{
SystemInstruction: systemInstruction,
Tools: tools,
})), nil
}
if config.Models == nil {
models, err := client.Models.List(ctx, &genai.ListModelsConfig{})
if err != nil {
return nil, err
}
config.Models = make([]string, len(models.Items))
for i, model := range models.Items {
config.Models[i] = model.Name
}
}
default:
return nil, xerrors.Errorf("unsupported model type: %s", config.Type)
}
for _, model := range config.Models {
models[model] = LanguageModel{
LanguageModel: codersdk.LanguageModel{
ID: model,
DisplayName: model,
Provider: config.Type,
},
StreamFunc: streamFunc,
}
}
}
return models, nil
}
+10 -597
View File
@@ -343,173 +343,6 @@ const docTemplate = `{
}
}
},
"/chats": {
"get": {
"security": [
{
"CoderSessionToken": []
}
],
"produces": [
"application/json"
],
"tags": [
"Chat"
],
"summary": "List chats",
"operationId": "list-chats",
"responses": {
"200": {
"description": "OK",
"schema": {
"type": "array",
"items": {
"$ref": "#/definitions/codersdk.Chat"
}
}
}
}
},
"post": {
"security": [
{
"CoderSessionToken": []
}
],
"produces": [
"application/json"
],
"tags": [
"Chat"
],
"summary": "Create a chat",
"operationId": "create-a-chat",
"responses": {
"201": {
"description": "Created",
"schema": {
"$ref": "#/definitions/codersdk.Chat"
}
}
}
}
},
"/chats/{chat}": {
"get": {
"security": [
{
"CoderSessionToken": []
}
],
"produces": [
"application/json"
],
"tags": [
"Chat"
],
"summary": "Get a chat",
"operationId": "get-a-chat",
"parameters": [
{
"type": "string",
"description": "Chat ID",
"name": "chat",
"in": "path",
"required": true
}
],
"responses": {
"200": {
"description": "OK",
"schema": {
"$ref": "#/definitions/codersdk.Chat"
}
}
}
}
},
"/chats/{chat}/messages": {
"get": {
"security": [
{
"CoderSessionToken": []
}
],
"produces": [
"application/json"
],
"tags": [
"Chat"
],
"summary": "Get chat messages",
"operationId": "get-chat-messages",
"parameters": [
{
"type": "string",
"description": "Chat ID",
"name": "chat",
"in": "path",
"required": true
}
],
"responses": {
"200": {
"description": "OK",
"schema": {
"type": "array",
"items": {
"$ref": "#/definitions/aisdk.Message"
}
}
}
}
},
"post": {
"security": [
{
"CoderSessionToken": []
}
],
"consumes": [
"application/json"
],
"produces": [
"application/json"
],
"tags": [
"Chat"
],
"summary": "Create a chat message",
"operationId": "create-a-chat-message",
"parameters": [
{
"type": "string",
"description": "Chat ID",
"name": "chat",
"in": "path",
"required": true
},
{
"description": "Request body",
"name": "request",
"in": "body",
"required": true,
"schema": {
"$ref": "#/definitions/codersdk.CreateChatMessageRequest"
}
}
],
"responses": {
"200": {
"description": "OK",
"schema": {
"type": "array",
"items": {}
}
}
}
}
},
"/csp/reports": {
"post": {
"security": [
@@ -826,31 +659,6 @@ const docTemplate = `{
}
}
},
"/deployment/llms": {
"get": {
"security": [
{
"CoderSessionToken": []
}
],
"produces": [
"application/json"
],
"tags": [
"General"
],
"summary": "Get language models",
"operationId": "get-language-models",
"responses": {
"200": {
"description": "OK",
"schema": {
"$ref": "#/definitions/codersdk.LanguageModelConfig"
}
}
}
}
},
"/deployment/ssh": {
"get": {
"security": [
@@ -8645,7 +8453,7 @@ const docTemplate = `{
}
}
},
"/workspaceagents/{workspaceagent}/containers/devcontainers/container/{container}/recreate": {
"/workspaceagents/{workspaceagent}/containers/devcontainers/{devcontainer}/recreate": {
"post": {
"security": [
{
@@ -8671,8 +8479,8 @@ const docTemplate = `{
},
{
"type": "string",
"description": "Container ID or name",
"name": "container",
"description": "Devcontainer ID",
"name": "devcontainer",
"in": "path",
"required": true
}
@@ -10617,190 +10425,6 @@ const docTemplate = `{
"ReinitializeReasonPrebuildClaimed"
]
},
"aisdk.Attachment": {
"type": "object",
"properties": {
"contentType": {
"type": "string"
},
"name": {
"type": "string"
},
"url": {
"type": "string"
}
}
},
"aisdk.Message": {
"type": "object",
"properties": {
"annotations": {
"type": "array",
"items": {}
},
"content": {
"type": "string"
},
"createdAt": {
"type": "array",
"items": {
"type": "integer"
}
},
"experimental_attachments": {
"type": "array",
"items": {
"$ref": "#/definitions/aisdk.Attachment"
}
},
"id": {
"type": "string"
},
"parts": {
"type": "array",
"items": {
"$ref": "#/definitions/aisdk.Part"
}
},
"role": {
"type": "string"
}
}
},
"aisdk.Part": {
"type": "object",
"properties": {
"data": {
"type": "array",
"items": {
"type": "integer"
}
},
"details": {
"type": "array",
"items": {
"$ref": "#/definitions/aisdk.ReasoningDetail"
}
},
"mimeType": {
"description": "Type: \"file\"",
"type": "string"
},
"reasoning": {
"description": "Type: \"reasoning\"",
"type": "string"
},
"source": {
"description": "Type: \"source\"",
"allOf": [
{
"$ref": "#/definitions/aisdk.SourceInfo"
}
]
},
"text": {
"description": "Type: \"text\"",
"type": "string"
},
"toolInvocation": {
"description": "Type: \"tool-invocation\"",
"allOf": [
{
"$ref": "#/definitions/aisdk.ToolInvocation"
}
]
},
"type": {
"$ref": "#/definitions/aisdk.PartType"
}
}
},
"aisdk.PartType": {
"type": "string",
"enum": [
"text",
"reasoning",
"tool-invocation",
"source",
"file",
"step-start"
],
"x-enum-varnames": [
"PartTypeText",
"PartTypeReasoning",
"PartTypeToolInvocation",
"PartTypeSource",
"PartTypeFile",
"PartTypeStepStart"
]
},
"aisdk.ReasoningDetail": {
"type": "object",
"properties": {
"data": {
"type": "string"
},
"signature": {
"type": "string"
},
"text": {
"type": "string"
},
"type": {
"type": "string"
}
}
},
"aisdk.SourceInfo": {
"type": "object",
"properties": {
"contentType": {
"type": "string"
},
"data": {
"type": "string"
},
"metadata": {
"type": "object",
"additionalProperties": {}
},
"uri": {
"type": "string"
}
}
},
"aisdk.ToolInvocation": {
"type": "object",
"properties": {
"args": {},
"result": {},
"state": {
"$ref": "#/definitions/aisdk.ToolInvocationState"
},
"step": {
"type": "integer"
},
"toolCallId": {
"type": "string"
},
"toolName": {
"type": "string"
}
}
},
"aisdk.ToolInvocationState": {
"type": "string",
"enum": [
"call",
"partial-call",
"result"
],
"x-enum-varnames": [
"ToolInvocationStateCall",
"ToolInvocationStatePartialCall",
"ToolInvocationStateResult"
]
},
"coderd.SCIMUser": {
"type": "object",
"properties": {
@@ -10892,37 +10516,6 @@ const docTemplate = `{
}
}
},
"codersdk.AIConfig": {
"type": "object",
"properties": {
"providers": {
"type": "array",
"items": {
"$ref": "#/definitions/codersdk.AIProviderConfig"
}
}
}
},
"codersdk.AIProviderConfig": {
"type": "object",
"properties": {
"base_url": {
"description": "BaseURL is the base URL to use for the API provider.",
"type": "string"
},
"models": {
"description": "Models is the list of models to use for the API provider.",
"type": "array",
"items": {
"type": "string"
}
},
"type": {
"description": "Type is the type of the API provider.",
"type": "string"
}
}
},
"codersdk.APIKey": {
"type": "object",
"required": [
@@ -11480,12 +11073,14 @@ const docTemplate = `{
"enum": [
"initiator",
"autostart",
"autostop"
"autostop",
"dormancy"
],
"x-enum-varnames": [
"BuildReasonInitiator",
"BuildReasonAutostart",
"BuildReasonAutostop"
"BuildReasonAutostop",
"BuildReasonDormancy"
]
},
"codersdk.ChangePasswordWithOneTimePasscodeRequest": {
@@ -11508,62 +11103,6 @@ const docTemplate = `{
}
}
},
"codersdk.Chat": {
"type": "object",
"properties": {
"created_at": {
"type": "string",
"format": "date-time"
},
"id": {
"type": "string",
"format": "uuid"
},
"title": {
"type": "string"
},
"updated_at": {
"type": "string",
"format": "date-time"
}
}
},
"codersdk.ChatMessage": {
"type": "object",
"properties": {
"annotations": {
"type": "array",
"items": {}
},
"content": {
"type": "string"
},
"createdAt": {
"type": "array",
"items": {
"type": "integer"
}
},
"experimental_attachments": {
"type": "array",
"items": {
"$ref": "#/definitions/aisdk.Attachment"
}
},
"id": {
"type": "string"
},
"parts": {
"type": "array",
"items": {
"$ref": "#/definitions/aisdk.Part"
}
},
"role": {
"type": "string"
}
}
},
"codersdk.ConnectionLatency": {
"type": "object",
"properties": {
@@ -11597,20 +11136,6 @@ const docTemplate = `{
}
}
},
"codersdk.CreateChatMessageRequest": {
"type": "object",
"properties": {
"message": {
"$ref": "#/definitions/codersdk.ChatMessage"
},
"model": {
"type": "string"
},
"thinking": {
"type": "boolean"
}
}
},
"codersdk.CreateFirstUserRequest": {
"type": "object",
"required": [
@@ -11898,73 +11423,7 @@ const docTemplate = `{
}
},
"codersdk.CreateTestAuditLogRequest": {
"type": "object",
"properties": {
"action": {
"enum": [
"create",
"write",
"delete",
"start",
"stop"
],
"allOf": [
{
"$ref": "#/definitions/codersdk.AuditAction"
}
]
},
"additional_fields": {
"type": "array",
"items": {
"type": "integer"
}
},
"build_reason": {
"enum": [
"autostart",
"autostop",
"initiator"
],
"allOf": [
{
"$ref": "#/definitions/codersdk.BuildReason"
}
]
},
"organization_id": {
"type": "string",
"format": "uuid"
},
"request_id": {
"type": "string",
"format": "uuid"
},
"resource_id": {
"type": "string",
"format": "uuid"
},
"resource_type": {
"enum": [
"template",
"template_version",
"user",
"workspace",
"workspace_build",
"git_ssh_key",
"auditable_group"
],
"allOf": [
{
"$ref": "#/definitions/codersdk.ResourceType"
}
]
},
"time": {
"type": "string",
"format": "date-time"
}
}
"type": "object"
},
"codersdk.CreateTokenRequest": {
"type": "object",
@@ -12410,9 +11869,6 @@ const docTemplate = `{
"agent_stat_refresh_interval": {
"type": "integer"
},
"ai": {
"$ref": "#/definitions/serpent.Struct-codersdk_AIConfig"
},
"allow_workspace_renames": {
"type": "boolean"
},
@@ -12740,17 +12196,13 @@ const docTemplate = `{
"auto-fill-parameters",
"notifications",
"workspace-usage",
"web-push",
"workspace-prebuilds",
"agentic-chat"
"web-push"
],
"x-enum-comments": {
"ExperimentAgenticChat": "Enables the new agentic AI chat feature.",
"ExperimentAutoFillParameters": "This should not be taken out of experiments until we have redesigned the feature.",
"ExperimentExample": "This isn't used for anything.",
"ExperimentNotifications": "Sends notifications via SMTP and webhooks following certain events.",
"ExperimentWebPush": "Enables web push notifications through the browser.",
"ExperimentWorkspacePrebuilds": "Enables the new workspace prebuilds feature.",
"ExperimentWorkspaceUsage": "Enables the new workspace usage tracking."
},
"x-enum-varnames": [
@@ -12758,9 +12210,7 @@ const docTemplate = `{
"ExperimentAutoFillParameters",
"ExperimentNotifications",
"ExperimentWorkspaceUsage",
"ExperimentWebPush",
"ExperimentWorkspacePrebuilds",
"ExperimentAgenticChat"
"ExperimentWebPush"
]
},
"codersdk.ExternalAuth": {
@@ -13288,33 +12738,6 @@ const docTemplate = `{
"RequiredTemplateVariables"
]
},
"codersdk.LanguageModel": {
"type": "object",
"properties": {
"display_name": {
"type": "string"
},
"id": {
"description": "ID is used by the provider to identify the LLM.",
"type": "string"
},
"provider": {
"description": "Provider is the provider of the LLM. e.g. openai, anthropic, etc.",
"type": "string"
}
}
},
"codersdk.LanguageModelConfig": {
"type": "object",
"properties": {
"models": {
"type": "array",
"items": {
"$ref": "#/definitions/codersdk.LanguageModel"
}
}
}
},
"codersdk.License": {
"type": "object",
"properties": {
@@ -15233,7 +14656,6 @@ const docTemplate = `{
"assign_org_role",
"assign_role",
"audit_log",
"chat",
"crypto_key",
"debug_info",
"deployment_config",
@@ -15273,7 +14695,6 @@ const docTemplate = `{
"ResourceAssignOrgRole",
"ResourceAssignRole",
"ResourceAuditLog",
"ResourceChat",
"ResourceCryptoKey",
"ResourceDebugInfo",
"ResourceDeploymentConfig",
@@ -19342,14 +18763,6 @@ const docTemplate = `{
}
}
},
"serpent.Struct-codersdk_AIConfig": {
"type": "object",
"properties": {
"value": {
"$ref": "#/definitions/codersdk.AIConfig"
}
}
},
"serpent.URL": {
"type": "object",
"properties": {
+9 -557
View File
@@ -291,151 +291,6 @@
}
}
},
"/chats": {
"get": {
"security": [
{
"CoderSessionToken": []
}
],
"produces": ["application/json"],
"tags": ["Chat"],
"summary": "List chats",
"operationId": "list-chats",
"responses": {
"200": {
"description": "OK",
"schema": {
"type": "array",
"items": {
"$ref": "#/definitions/codersdk.Chat"
}
}
}
}
},
"post": {
"security": [
{
"CoderSessionToken": []
}
],
"produces": ["application/json"],
"tags": ["Chat"],
"summary": "Create a chat",
"operationId": "create-a-chat",
"responses": {
"201": {
"description": "Created",
"schema": {
"$ref": "#/definitions/codersdk.Chat"
}
}
}
}
},
"/chats/{chat}": {
"get": {
"security": [
{
"CoderSessionToken": []
}
],
"produces": ["application/json"],
"tags": ["Chat"],
"summary": "Get a chat",
"operationId": "get-a-chat",
"parameters": [
{
"type": "string",
"description": "Chat ID",
"name": "chat",
"in": "path",
"required": true
}
],
"responses": {
"200": {
"description": "OK",
"schema": {
"$ref": "#/definitions/codersdk.Chat"
}
}
}
}
},
"/chats/{chat}/messages": {
"get": {
"security": [
{
"CoderSessionToken": []
}
],
"produces": ["application/json"],
"tags": ["Chat"],
"summary": "Get chat messages",
"operationId": "get-chat-messages",
"parameters": [
{
"type": "string",
"description": "Chat ID",
"name": "chat",
"in": "path",
"required": true
}
],
"responses": {
"200": {
"description": "OK",
"schema": {
"type": "array",
"items": {
"$ref": "#/definitions/aisdk.Message"
}
}
}
}
},
"post": {
"security": [
{
"CoderSessionToken": []
}
],
"consumes": ["application/json"],
"produces": ["application/json"],
"tags": ["Chat"],
"summary": "Create a chat message",
"operationId": "create-a-chat-message",
"parameters": [
{
"type": "string",
"description": "Chat ID",
"name": "chat",
"in": "path",
"required": true
},
{
"description": "Request body",
"name": "request",
"in": "body",
"required": true,
"schema": {
"$ref": "#/definitions/codersdk.CreateChatMessageRequest"
}
}
],
"responses": {
"200": {
"description": "OK",
"schema": {
"type": "array",
"items": {}
}
}
}
}
},
"/csp/reports": {
"post": {
"security": [
@@ -708,27 +563,6 @@
}
}
},
"/deployment/llms": {
"get": {
"security": [
{
"CoderSessionToken": []
}
],
"produces": ["application/json"],
"tags": ["General"],
"summary": "Get language models",
"operationId": "get-language-models",
"responses": {
"200": {
"description": "OK",
"schema": {
"$ref": "#/definitions/codersdk.LanguageModelConfig"
}
}
}
}
},
"/deployment/ssh": {
"get": {
"security": [
@@ -7638,7 +7472,7 @@
}
}
},
"/workspaceagents/{workspaceagent}/containers/devcontainers/container/{container}/recreate": {
"/workspaceagents/{workspaceagent}/containers/devcontainers/{devcontainer}/recreate": {
"post": {
"security": [
{
@@ -7660,8 +7494,8 @@
},
{
"type": "string",
"description": "Container ID or name",
"name": "container",
"description": "Devcontainer ID",
"name": "devcontainer",
"in": "path",
"required": true
}
@@ -9410,186 +9244,6 @@
"enum": ["prebuild_claimed"],
"x-enum-varnames": ["ReinitializeReasonPrebuildClaimed"]
},
"aisdk.Attachment": {
"type": "object",
"properties": {
"contentType": {
"type": "string"
},
"name": {
"type": "string"
},
"url": {
"type": "string"
}
}
},
"aisdk.Message": {
"type": "object",
"properties": {
"annotations": {
"type": "array",
"items": {}
},
"content": {
"type": "string"
},
"createdAt": {
"type": "array",
"items": {
"type": "integer"
}
},
"experimental_attachments": {
"type": "array",
"items": {
"$ref": "#/definitions/aisdk.Attachment"
}
},
"id": {
"type": "string"
},
"parts": {
"type": "array",
"items": {
"$ref": "#/definitions/aisdk.Part"
}
},
"role": {
"type": "string"
}
}
},
"aisdk.Part": {
"type": "object",
"properties": {
"data": {
"type": "array",
"items": {
"type": "integer"
}
},
"details": {
"type": "array",
"items": {
"$ref": "#/definitions/aisdk.ReasoningDetail"
}
},
"mimeType": {
"description": "Type: \"file\"",
"type": "string"
},
"reasoning": {
"description": "Type: \"reasoning\"",
"type": "string"
},
"source": {
"description": "Type: \"source\"",
"allOf": [
{
"$ref": "#/definitions/aisdk.SourceInfo"
}
]
},
"text": {
"description": "Type: \"text\"",
"type": "string"
},
"toolInvocation": {
"description": "Type: \"tool-invocation\"",
"allOf": [
{
"$ref": "#/definitions/aisdk.ToolInvocation"
}
]
},
"type": {
"$ref": "#/definitions/aisdk.PartType"
}
}
},
"aisdk.PartType": {
"type": "string",
"enum": [
"text",
"reasoning",
"tool-invocation",
"source",
"file",
"step-start"
],
"x-enum-varnames": [
"PartTypeText",
"PartTypeReasoning",
"PartTypeToolInvocation",
"PartTypeSource",
"PartTypeFile",
"PartTypeStepStart"
]
},
"aisdk.ReasoningDetail": {
"type": "object",
"properties": {
"data": {
"type": "string"
},
"signature": {
"type": "string"
},
"text": {
"type": "string"
},
"type": {
"type": "string"
}
}
},
"aisdk.SourceInfo": {
"type": "object",
"properties": {
"contentType": {
"type": "string"
},
"data": {
"type": "string"
},
"metadata": {
"type": "object",
"additionalProperties": {}
},
"uri": {
"type": "string"
}
}
},
"aisdk.ToolInvocation": {
"type": "object",
"properties": {
"args": {},
"result": {},
"state": {
"$ref": "#/definitions/aisdk.ToolInvocationState"
},
"step": {
"type": "integer"
},
"toolCallId": {
"type": "string"
},
"toolName": {
"type": "string"
}
}
},
"aisdk.ToolInvocationState": {
"type": "string",
"enum": ["call", "partial-call", "result"],
"x-enum-varnames": [
"ToolInvocationStateCall",
"ToolInvocationStatePartialCall",
"ToolInvocationStateResult"
]
},
"coderd.SCIMUser": {
"type": "object",
"properties": {
@@ -9681,37 +9335,6 @@
}
}
},
"codersdk.AIConfig": {
"type": "object",
"properties": {
"providers": {
"type": "array",
"items": {
"$ref": "#/definitions/codersdk.AIProviderConfig"
}
}
}
},
"codersdk.AIProviderConfig": {
"type": "object",
"properties": {
"base_url": {
"description": "BaseURL is the base URL to use for the API provider.",
"type": "string"
},
"models": {
"description": "Models is the list of models to use for the API provider.",
"type": "array",
"items": {
"type": "string"
}
},
"type": {
"description": "Type is the type of the API provider.",
"type": "string"
}
}
},
"codersdk.APIKey": {
"type": "object",
"required": [
@@ -10235,11 +9858,12 @@
},
"codersdk.BuildReason": {
"type": "string",
"enum": ["initiator", "autostart", "autostop"],
"enum": ["initiator", "autostart", "autostop", "dormancy"],
"x-enum-varnames": [
"BuildReasonInitiator",
"BuildReasonAutostart",
"BuildReasonAutostop"
"BuildReasonAutostop",
"BuildReasonDormancy"
]
},
"codersdk.ChangePasswordWithOneTimePasscodeRequest": {
@@ -10258,62 +9882,6 @@
}
}
},
"codersdk.Chat": {
"type": "object",
"properties": {
"created_at": {
"type": "string",
"format": "date-time"
},
"id": {
"type": "string",
"format": "uuid"
},
"title": {
"type": "string"
},
"updated_at": {
"type": "string",
"format": "date-time"
}
}
},
"codersdk.ChatMessage": {
"type": "object",
"properties": {
"annotations": {
"type": "array",
"items": {}
},
"content": {
"type": "string"
},
"createdAt": {
"type": "array",
"items": {
"type": "integer"
}
},
"experimental_attachments": {
"type": "array",
"items": {
"$ref": "#/definitions/aisdk.Attachment"
}
},
"id": {
"type": "string"
},
"parts": {
"type": "array",
"items": {
"$ref": "#/definitions/aisdk.Part"
}
},
"role": {
"type": "string"
}
}
},
"codersdk.ConnectionLatency": {
"type": "object",
"properties": {
@@ -10344,20 +9912,6 @@
}
}
},
"codersdk.CreateChatMessageRequest": {
"type": "object",
"properties": {
"message": {
"$ref": "#/definitions/codersdk.ChatMessage"
},
"model": {
"type": "string"
},
"thinking": {
"type": "boolean"
}
}
},
"codersdk.CreateFirstUserRequest": {
"type": "object",
"required": ["email", "password", "username"],
@@ -10626,63 +10180,7 @@
}
},
"codersdk.CreateTestAuditLogRequest": {
"type": "object",
"properties": {
"action": {
"enum": ["create", "write", "delete", "start", "stop"],
"allOf": [
{
"$ref": "#/definitions/codersdk.AuditAction"
}
]
},
"additional_fields": {
"type": "array",
"items": {
"type": "integer"
}
},
"build_reason": {
"enum": ["autostart", "autostop", "initiator"],
"allOf": [
{
"$ref": "#/definitions/codersdk.BuildReason"
}
]
},
"organization_id": {
"type": "string",
"format": "uuid"
},
"request_id": {
"type": "string",
"format": "uuid"
},
"resource_id": {
"type": "string",
"format": "uuid"
},
"resource_type": {
"enum": [
"template",
"template_version",
"user",
"workspace",
"workspace_build",
"git_ssh_key",
"auditable_group"
],
"allOf": [
{
"$ref": "#/definitions/codersdk.ResourceType"
}
]
},
"time": {
"type": "string",
"format": "date-time"
}
}
"type": "object"
},
"codersdk.CreateTokenRequest": {
"type": "object",
@@ -11110,9 +10608,6 @@
"agent_stat_refresh_interval": {
"type": "integer"
},
"ai": {
"$ref": "#/definitions/serpent.Struct-codersdk_AIConfig"
},
"allow_workspace_renames": {
"type": "boolean"
},
@@ -11433,17 +10928,13 @@
"auto-fill-parameters",
"notifications",
"workspace-usage",
"web-push",
"workspace-prebuilds",
"agentic-chat"
"web-push"
],
"x-enum-comments": {
"ExperimentAgenticChat": "Enables the new agentic AI chat feature.",
"ExperimentAutoFillParameters": "This should not be taken out of experiments until we have redesigned the feature.",
"ExperimentExample": "This isn't used for anything.",
"ExperimentNotifications": "Sends notifications via SMTP and webhooks following certain events.",
"ExperimentWebPush": "Enables web push notifications through the browser.",
"ExperimentWorkspacePrebuilds": "Enables the new workspace prebuilds feature.",
"ExperimentWorkspaceUsage": "Enables the new workspace usage tracking."
},
"x-enum-varnames": [
@@ -11451,9 +10942,7 @@
"ExperimentAutoFillParameters",
"ExperimentNotifications",
"ExperimentWorkspaceUsage",
"ExperimentWebPush",
"ExperimentWorkspacePrebuilds",
"ExperimentAgenticChat"
"ExperimentWebPush"
]
},
"codersdk.ExternalAuth": {
@@ -11965,33 +11454,6 @@
"enum": ["REQUIRED_TEMPLATE_VARIABLES"],
"x-enum-varnames": ["RequiredTemplateVariables"]
},
"codersdk.LanguageModel": {
"type": "object",
"properties": {
"display_name": {
"type": "string"
},
"id": {
"description": "ID is used by the provider to identify the LLM.",
"type": "string"
},
"provider": {
"description": "Provider is the provider of the LLM. e.g. openai, anthropic, etc.",
"type": "string"
}
}
},
"codersdk.LanguageModelConfig": {
"type": "object",
"properties": {
"models": {
"type": "array",
"items": {
"$ref": "#/definitions/codersdk.LanguageModel"
}
}
}
},
"codersdk.License": {
"type": "object",
"properties": {
@@ -13825,7 +13287,6 @@
"assign_org_role",
"assign_role",
"audit_log",
"chat",
"crypto_key",
"debug_info",
"deployment_config",
@@ -13865,7 +13326,6 @@
"ResourceAssignOrgRole",
"ResourceAssignRole",
"ResourceAuditLog",
"ResourceChat",
"ResourceCryptoKey",
"ResourceDebugInfo",
"ResourceDeploymentConfig",
@@ -17720,14 +17180,6 @@
}
}
},
"serpent.Struct-codersdk_AIConfig": {
"type": "object",
"properties": {
"value": {
"$ref": "#/definitions/codersdk.AIConfig"
}
}
},
"serpent.URL": {
"type": "object",
"properties": {
+2
View File
@@ -520,6 +520,8 @@ func isEligibleForAutostart(user database.User, ws database.Workspace, build dat
return false
}
// Get the next allowed autostart time after the build's creation time,
// based on the workspace's schedule and the template's allowed days.
nextTransition, err := schedule.NextAllowedAutostart(build.CreatedAt, ws.AutostartSchedule.String, templateSchedule)
if err != nil {
return false
+349
View File
@@ -2,9 +2,16 @@ package autobuild_test
import (
"context"
"database/sql"
"errors"
"testing"
"time"
"github.com/coder/coder/v2/coderd/database/dbgen"
"github.com/coder/coder/v2/coderd/database/pubsub"
"github.com/coder/coder/v2/coderd/rbac"
"github.com/coder/quartz"
"github.com/google/uuid"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
@@ -1183,6 +1190,348 @@ func TestNotifications(t *testing.T) {
})
}
// TestExecutorPrebuilds verifies AGPL behavior for prebuilt workspaces.
// It ensures that workspace schedules do not trigger while the workspace
// is still in a prebuilt state. Scheduling behavior only applies after the
// workspace has been claimed and becomes a regular user workspace.
// For enterprise-related functionality, see enterprise/coderd/workspaces_test.go.
func TestExecutorPrebuilds(t *testing.T) {
t.Parallel()
if !dbtestutil.WillUsePostgres() {
t.Skip("this test requires postgres")
}
// Prebuild workspaces should not be autostopped when the deadline is reached.
// After being claimed, the workspace should stop at the deadline.
t.Run("OnlyStopsAfterClaimed", func(t *testing.T) {
t.Parallel()
// Setup
ctx := testutil.Context(t, testutil.WaitShort)
clock := quartz.NewMock(t)
db, pb := dbtestutil.NewDB(t, dbtestutil.WithDumpOnFailure())
var (
tickCh = make(chan time.Time)
statsCh = make(chan autobuild.Stats)
client = coderdtest.New(t, &coderdtest.Options{
Database: db,
Pubsub: pb,
AutobuildTicker: tickCh,
IncludeProvisionerDaemon: true,
AutobuildStats: statsCh,
})
)
// Setup user, template and template version
owner := coderdtest.CreateFirstUser(t, client)
_, user := coderdtest.CreateAnotherUser(t, client, owner.OrganizationID, rbac.RoleMember())
version := coderdtest.CreateTemplateVersion(t, client, owner.OrganizationID, nil)
coderdtest.AwaitTemplateVersionJobCompleted(t, client, version.ID)
template := coderdtest.CreateTemplate(t, client, owner.OrganizationID, version.ID)
// Database setup of a preset with a prebuild instance
preset := setupTestDBPreset(t, db, version.ID, int32(1))
// Given: a running prebuilt workspace with a deadline and ready to be claimed
dbPrebuild := setupTestDBPrebuiltWorkspace(
ctx, t, clock, db, pb,
owner.OrganizationID,
template.ID,
version.ID,
preset.ID,
)
prebuild := coderdtest.MustWorkspace(t, client, dbPrebuild.ID)
require.Equal(t, codersdk.WorkspaceTransitionStart, prebuild.LatestBuild.Transition)
require.NotZero(t, prebuild.LatestBuild.Deadline)
// When: the autobuild executor ticks *after* the deadline:
go func() {
tickCh <- prebuild.LatestBuild.Deadline.Time.Add(time.Minute)
}()
// Then: the prebuilt workspace should remain in a start transition
prebuildStats := <-statsCh
require.Len(t, prebuildStats.Errors, 0)
require.Len(t, prebuildStats.Transitions, 0)
require.Equal(t, codersdk.WorkspaceTransitionStart, prebuild.LatestBuild.Transition)
prebuild = coderdtest.MustWorkspace(t, client, prebuild.ID)
require.Equal(t, codersdk.BuildReasonInitiator, prebuild.LatestBuild.Reason)
// Given: a user claims the prebuilt workspace
dbWorkspace := dbgen.ClaimPrebuild(t, db, user.ID, "claimedWorkspace-autostop", preset.ID)
workspace := coderdtest.MustWorkspace(t, client, dbWorkspace.ID)
// When: the autobuild executor ticks *after* the deadline:
go func() {
tickCh <- workspace.LatestBuild.Deadline.Time.Add(time.Minute)
close(tickCh)
}()
// Then: the workspace should be stopped
workspaceStats := <-statsCh
require.Len(t, workspaceStats.Errors, 0)
require.Len(t, workspaceStats.Transitions, 1)
require.Contains(t, workspaceStats.Transitions, workspace.ID)
require.Equal(t, database.WorkspaceTransitionStop, workspaceStats.Transitions[workspace.ID])
workspace = coderdtest.MustWorkspace(t, client, workspace.ID)
require.Equal(t, codersdk.BuildReasonAutostop, workspace.LatestBuild.Reason)
})
// Prebuild workspaces should not be autostarted when the autostart scheduled is reached.
// After being claimed, the workspace should autostart at the schedule.
t.Run("OnlyStartsAfterClaimed", func(t *testing.T) {
t.Parallel()
// Setup
ctx := testutil.Context(t, testutil.WaitShort)
clock := quartz.NewMock(t)
db, pb := dbtestutil.NewDB(t, dbtestutil.WithDumpOnFailure())
var (
tickCh = make(chan time.Time)
statsCh = make(chan autobuild.Stats)
client = coderdtest.New(t, &coderdtest.Options{
Database: db,
Pubsub: pb,
AutobuildTicker: tickCh,
IncludeProvisionerDaemon: true,
AutobuildStats: statsCh,
})
)
// Setup user, template and template version
owner := coderdtest.CreateFirstUser(t, client)
_, user := coderdtest.CreateAnotherUser(t, client, owner.OrganizationID, rbac.RoleMember())
version := coderdtest.CreateTemplateVersion(t, client, owner.OrganizationID, nil)
coderdtest.AwaitTemplateVersionJobCompleted(t, client, version.ID)
template := coderdtest.CreateTemplate(t, client, owner.OrganizationID, version.ID)
// Database setup of a preset with a prebuild instance
preset := setupTestDBPreset(t, db, version.ID, int32(1))
// Given: prebuilt workspace is stopped and set to autostart daily at midnight
sched := mustSchedule(t, "CRON_TZ=UTC 0 0 * * *")
autostartSched := sql.NullString{
String: sched.String(),
Valid: true,
}
dbPrebuild := setupTestDBPrebuiltWorkspace(
ctx, t, clock, db, pb,
owner.OrganizationID,
template.ID,
version.ID,
preset.ID,
WithAutostartSchedule(autostartSched),
WithIsStopped(true),
)
prebuild := coderdtest.MustWorkspace(t, client, dbPrebuild.ID)
require.Equal(t, codersdk.WorkspaceTransitionStop, prebuild.LatestBuild.Transition)
require.NotNil(t, prebuild.AutostartSchedule)
// Tick at the next scheduled time after the prebuilds LatestBuild.CreatedAt,
// since the next allowed autostart is calculated starting from that point.
// When: the autobuild executor ticks after the scheduled time
go func() {
tickCh <- sched.Next(prebuild.LatestBuild.CreatedAt).Add(time.Minute)
}()
// Then: the prebuilt workspace should remain in a stop transition
prebuildStats := <-statsCh
require.Len(t, prebuildStats.Errors, 0)
require.Len(t, prebuildStats.Transitions, 0)
require.Equal(t, codersdk.WorkspaceTransitionStop, prebuild.LatestBuild.Transition)
prebuild = coderdtest.MustWorkspace(t, client, prebuild.ID)
require.Equal(t, codersdk.BuildReasonInitiator, prebuild.LatestBuild.Reason)
// Given: prebuilt workspace is in a start status
setupTestDBWorkspaceBuild(
ctx, t, clock, db, pb,
owner.OrganizationID,
prebuild.ID,
version.ID,
preset.ID,
database.WorkspaceTransitionStart)
// Given: a user claims the prebuilt workspace
dbWorkspace := dbgen.ClaimPrebuild(t, db, user.ID, "claimedWorkspace-autostart", preset.ID)
workspace := coderdtest.MustWorkspace(t, client, dbWorkspace.ID)
// Given: the prebuilt workspace goes to a stop status
setupTestDBWorkspaceBuild(
ctx, t, clock, db, pb,
owner.OrganizationID,
prebuild.ID,
version.ID,
preset.ID,
database.WorkspaceTransitionStop)
// Tick at the next scheduled time after the prebuilds LatestBuild.CreatedAt,
// since the next allowed autostart is calculated starting from that point.
// When: the autobuild executor ticks after the scheduled time
go func() {
tickCh <- sched.Next(workspace.LatestBuild.CreatedAt).Add(time.Minute)
close(tickCh)
}()
// Then: the workspace should eventually be started
workspaceStats := <-statsCh
require.Len(t, workspaceStats.Errors, 0)
require.Len(t, workspaceStats.Transitions, 1)
require.Contains(t, workspaceStats.Transitions, workspace.ID)
require.Equal(t, database.WorkspaceTransitionStart, workspaceStats.Transitions[workspace.ID])
workspace = coderdtest.MustWorkspace(t, client, workspace.ID)
require.Equal(t, codersdk.BuildReasonAutostart, workspace.LatestBuild.Reason)
})
}
func setupTestDBPreset(
t *testing.T,
db database.Store,
templateVersionID uuid.UUID,
desiredInstances int32,
) database.TemplateVersionPreset {
t.Helper()
preset := dbgen.Preset(t, db, database.InsertPresetParams{
TemplateVersionID: templateVersionID,
Name: "preset-test",
DesiredInstances: sql.NullInt32{
Valid: true,
Int32: desiredInstances,
},
})
dbgen.PresetParameter(t, db, database.InsertPresetParametersParams{
TemplateVersionPresetID: preset.ID,
Names: []string{"test-name"},
Values: []string{"test-value"},
})
return preset
}
type SetupPrebuiltOptions struct {
AutostartSchedule sql.NullString
IsStopped bool
}
func WithAutostartSchedule(sched sql.NullString) func(*SetupPrebuiltOptions) {
return func(o *SetupPrebuiltOptions) {
o.AutostartSchedule = sched
}
}
func WithIsStopped(isStopped bool) func(*SetupPrebuiltOptions) {
return func(o *SetupPrebuiltOptions) {
o.IsStopped = isStopped
}
}
func setupTestDBWorkspaceBuild(
ctx context.Context,
t *testing.T,
clock quartz.Clock,
db database.Store,
ps pubsub.Pubsub,
orgID uuid.UUID,
workspaceID uuid.UUID,
templateVersionID uuid.UUID,
presetID uuid.UUID,
transition database.WorkspaceTransition,
) (database.ProvisionerJob, database.WorkspaceBuild) {
t.Helper()
var buildNumber int32 = 1
latestWorkspaceBuild, err := db.GetLatestWorkspaceBuildByWorkspaceID(ctx, workspaceID)
if !errors.Is(err, sql.ErrNoRows) {
buildNumber = latestWorkspaceBuild.BuildNumber + 1
}
job := dbgen.ProvisionerJob(t, db, ps, database.ProvisionerJob{
InitiatorID: database.PrebuildsSystemUserID,
CreatedAt: clock.Now().Add(-time.Hour * 2),
StartedAt: sql.NullTime{Time: clock.Now().Add(-time.Hour * 2), Valid: true},
CompletedAt: sql.NullTime{Time: clock.Now().Add(-time.Hour), Valid: true},
OrganizationID: orgID,
JobStatus: database.ProvisionerJobStatusSucceeded,
})
workspaceBuild := dbgen.WorkspaceBuild(t, db, database.WorkspaceBuild{
WorkspaceID: workspaceID,
InitiatorID: database.PrebuildsSystemUserID,
TemplateVersionID: templateVersionID,
BuildNumber: buildNumber,
JobID: job.ID,
TemplateVersionPresetID: uuid.NullUUID{UUID: presetID, Valid: true},
Transition: transition,
CreatedAt: clock.Now(),
})
dbgen.WorkspaceBuildParameters(t, db, []database.WorkspaceBuildParameter{
{
WorkspaceBuildID: workspaceBuild.ID,
Name: "test",
Value: "test",
},
})
workspaceResource := dbgen.WorkspaceResource(t, db, database.WorkspaceResource{
JobID: job.ID,
Transition: database.WorkspaceTransitionStart,
Type: "compute",
Name: "main",
})
// Workspaces are eligible to be claimed once their agent is marked "ready"
dbgen.WorkspaceAgent(t, db, database.WorkspaceAgent{
Name: "test",
ResourceID: workspaceResource.ID,
Architecture: "i386",
OperatingSystem: "linux",
LifecycleState: database.WorkspaceAgentLifecycleStateReady,
StartedAt: sql.NullTime{Time: time.Now().Add(time.Hour), Valid: true},
ReadyAt: sql.NullTime{Time: time.Now().Add(-1 * time.Hour), Valid: true},
APIKeyScope: database.AgentKeyScopeEnumAll,
})
return job, workspaceBuild
}
func setupTestDBPrebuiltWorkspace(
ctx context.Context,
t *testing.T,
clock quartz.Clock,
db database.Store,
ps pubsub.Pubsub,
orgID uuid.UUID,
templateID uuid.UUID,
templateVersionID uuid.UUID,
presetID uuid.UUID,
opts ...func(*SetupPrebuiltOptions),
) database.WorkspaceTable {
t.Helper()
// Optional parameters
options := &SetupPrebuiltOptions{}
for _, opt := range opts {
opt(options)
}
buildTransition := database.WorkspaceTransitionStart
if options.IsStopped {
buildTransition = database.WorkspaceTransitionStop
}
workspace := dbgen.Workspace(t, db, database.WorkspaceTable{
TemplateID: templateID,
OrganizationID: orgID,
OwnerID: database.PrebuildsSystemUserID,
Deleted: false,
CreatedAt: time.Now().Add(-time.Hour * 2),
AutostartSchedule: options.AutostartSchedule,
})
setupTestDBWorkspaceBuild(ctx, t, clock, db, ps, orgID, workspace.ID, templateVersionID, presetID, buildTransition)
return workspace
}
func mustProvisionWorkspace(t *testing.T, client *codersdk.Client, mut ...func(*codersdk.CreateWorkspaceRequest)) codersdk.Workspace {
t.Helper()
user := coderdtest.CreateFirstUser(t, client)
-366
View File
@@ -1,366 +0,0 @@
package coderd
import (
"encoding/json"
"io"
"net/http"
"time"
"github.com/kylecarbs/aisdk-go"
"github.com/coder/coder/v2/coderd/ai"
"github.com/coder/coder/v2/coderd/database"
"github.com/coder/coder/v2/coderd/database/db2sdk"
"github.com/coder/coder/v2/coderd/database/dbtime"
"github.com/coder/coder/v2/coderd/httpapi"
"github.com/coder/coder/v2/coderd/httpmw"
"github.com/coder/coder/v2/coderd/util/strings"
"github.com/coder/coder/v2/codersdk"
"github.com/coder/coder/v2/codersdk/toolsdk"
)
// postChats creates a new chat.
//
// @Summary Create a chat
// @ID create-a-chat
// @Security CoderSessionToken
// @Produce json
// @Tags Chat
// @Success 201 {object} codersdk.Chat
// @Router /chats [post]
func (api *API) postChats(w http.ResponseWriter, r *http.Request) {
apiKey := httpmw.APIKey(r)
ctx := r.Context()
chat, err := api.Database.InsertChat(ctx, database.InsertChatParams{
OwnerID: apiKey.UserID,
CreatedAt: time.Now(),
UpdatedAt: time.Now(),
Title: "New Chat",
})
if err != nil {
httpapi.Write(ctx, w, http.StatusInternalServerError, codersdk.Response{
Message: "Failed to create chat",
Detail: err.Error(),
})
return
}
httpapi.Write(ctx, w, http.StatusCreated, db2sdk.Chat(chat))
}
// listChats lists all chats for a user.
//
// @Summary List chats
// @ID list-chats
// @Security CoderSessionToken
// @Produce json
// @Tags Chat
// @Success 200 {array} codersdk.Chat
// @Router /chats [get]
func (api *API) listChats(w http.ResponseWriter, r *http.Request) {
apiKey := httpmw.APIKey(r)
ctx := r.Context()
chats, err := api.Database.GetChatsByOwnerID(ctx, apiKey.UserID)
if err != nil {
httpapi.Write(ctx, w, http.StatusInternalServerError, codersdk.Response{
Message: "Failed to list chats",
Detail: err.Error(),
})
return
}
httpapi.Write(ctx, w, http.StatusOK, db2sdk.Chats(chats))
}
// chat returns a chat by ID.
//
// @Summary Get a chat
// @ID get-a-chat
// @Security CoderSessionToken
// @Produce json
// @Tags Chat
// @Param chat path string true "Chat ID"
// @Success 200 {object} codersdk.Chat
// @Router /chats/{chat} [get]
func (*API) chat(w http.ResponseWriter, r *http.Request) {
ctx := r.Context()
chat := httpmw.ChatParam(r)
httpapi.Write(ctx, w, http.StatusOK, db2sdk.Chat(chat))
}
// chatMessages returns the messages of a chat.
//
// @Summary Get chat messages
// @ID get-chat-messages
// @Security CoderSessionToken
// @Produce json
// @Tags Chat
// @Param chat path string true "Chat ID"
// @Success 200 {array} aisdk.Message
// @Router /chats/{chat}/messages [get]
func (api *API) chatMessages(w http.ResponseWriter, r *http.Request) {
ctx := r.Context()
chat := httpmw.ChatParam(r)
rawMessages, err := api.Database.GetChatMessagesByChatID(ctx, chat.ID)
if err != nil {
httpapi.Write(ctx, w, http.StatusInternalServerError, codersdk.Response{
Message: "Failed to get chat messages",
Detail: err.Error(),
})
return
}
messages := make([]aisdk.Message, len(rawMessages))
for i, message := range rawMessages {
var msg aisdk.Message
err = json.Unmarshal(message.Content, &msg)
if err != nil {
httpapi.Write(ctx, w, http.StatusInternalServerError, codersdk.Response{
Message: "Failed to unmarshal chat message",
Detail: err.Error(),
})
return
}
messages[i] = msg
}
httpapi.Write(ctx, w, http.StatusOK, messages)
}
// postChatMessages creates a new chat message and streams the response.
//
// @Summary Create a chat message
// @ID create-a-chat-message
// @Security CoderSessionToken
// @Accept json
// @Produce json
// @Tags Chat
// @Param chat path string true "Chat ID"
// @Param request body codersdk.CreateChatMessageRequest true "Request body"
// @Success 200 {array} aisdk.DataStreamPart
// @Router /chats/{chat}/messages [post]
func (api *API) postChatMessages(w http.ResponseWriter, r *http.Request) {
ctx := r.Context()
chat := httpmw.ChatParam(r)
var req codersdk.CreateChatMessageRequest
err := json.NewDecoder(r.Body).Decode(&req)
if err != nil {
httpapi.Write(ctx, w, http.StatusBadRequest, codersdk.Response{
Message: "Failed to decode chat message",
Detail: err.Error(),
})
return
}
dbMessages, err := api.Database.GetChatMessagesByChatID(ctx, chat.ID)
if err != nil {
httpapi.Write(ctx, w, http.StatusInternalServerError, codersdk.Response{
Message: "Failed to get chat messages",
Detail: err.Error(),
})
return
}
messages := make([]codersdk.ChatMessage, 0)
for _, dbMsg := range dbMessages {
var msg codersdk.ChatMessage
err = json.Unmarshal(dbMsg.Content, &msg)
if err != nil {
httpapi.Write(ctx, w, http.StatusInternalServerError, codersdk.Response{
Message: "Failed to unmarshal chat message",
Detail: err.Error(),
})
return
}
messages = append(messages, msg)
}
messages = append(messages, req.Message)
client := codersdk.New(api.AccessURL)
client.SetSessionToken(httpmw.APITokenFromRequest(r))
tools := make([]aisdk.Tool, 0)
handlers := map[string]toolsdk.GenericHandlerFunc{}
for _, tool := range toolsdk.All {
if tool.Name == "coder_report_task" {
continue // This tool requires an agent to run.
}
tools = append(tools, tool.Tool)
handlers[tool.Tool.Name] = tool.Handler
}
provider, ok := api.LanguageModels[req.Model]
if !ok {
httpapi.Write(ctx, w, http.StatusBadRequest, codersdk.Response{
Message: "Model not found",
})
return
}
// If it's the user's first message, generate a title for the chat.
if len(messages) == 1 {
var acc aisdk.DataStreamAccumulator
stream, err := provider.StreamFunc(ctx, ai.StreamOptions{
Model: req.Model,
SystemPrompt: `- You will generate a short title based on the user's message.
- It should be maximum of 40 characters.
- Do not use quotes, colons, special characters, or emojis.`,
Messages: messages,
Tools: []aisdk.Tool{}, // This initial stream doesn't use tools.
})
if err != nil {
httpapi.Write(ctx, w, http.StatusInternalServerError, codersdk.Response{
Message: "Failed to create stream",
Detail: err.Error(),
})
return
}
stream = stream.WithAccumulator(&acc)
err = stream.Pipe(io.Discard)
if err != nil {
httpapi.Write(ctx, w, http.StatusInternalServerError, codersdk.Response{
Message: "Failed to pipe stream",
Detail: err.Error(),
})
return
}
var newTitle string
accMessages := acc.Messages()
// If for some reason the stream didn't return any messages, use the
// original message as the title.
if len(accMessages) == 0 {
newTitle = strings.Truncate(messages[0].Content, 40)
} else {
newTitle = strings.Truncate(accMessages[0].Content, 40)
}
err = api.Database.UpdateChatByID(ctx, database.UpdateChatByIDParams{
ID: chat.ID,
Title: newTitle,
UpdatedAt: dbtime.Now(),
})
if err != nil {
httpapi.Write(ctx, w, http.StatusInternalServerError, codersdk.Response{
Message: "Failed to update chat title",
Detail: err.Error(),
})
return
}
}
// Write headers for the data stream!
aisdk.WriteDataStreamHeaders(w)
// Insert the user-requested message into the database!
raw, err := json.Marshal([]aisdk.Message{req.Message})
if err != nil {
httpapi.Write(ctx, w, http.StatusInternalServerError, codersdk.Response{
Message: "Failed to marshal chat message",
Detail: err.Error(),
})
return
}
_, err = api.Database.InsertChatMessages(ctx, database.InsertChatMessagesParams{
ChatID: chat.ID,
CreatedAt: dbtime.Now(),
Model: req.Model,
Provider: provider.Provider,
Content: raw,
})
if err != nil {
httpapi.Write(ctx, w, http.StatusInternalServerError, codersdk.Response{
Message: "Failed to insert chat messages",
Detail: err.Error(),
})
return
}
deps, err := toolsdk.NewDeps(client)
if err != nil {
httpapi.Write(ctx, w, http.StatusInternalServerError, codersdk.Response{
Message: "Failed to create tool dependencies",
Detail: err.Error(),
})
return
}
for {
var acc aisdk.DataStreamAccumulator
stream, err := provider.StreamFunc(ctx, ai.StreamOptions{
Model: req.Model,
Messages: messages,
Tools: tools,
SystemPrompt: `You are a chat assistant for Coder - an open-source platform for creating and managing cloud development environments on any infrastructure. You are expected to be precise, concise, and helpful.
You are running as an agent - please keep going until the user's query is completely resolved, before ending your turn and yielding back to the user. Only terminate your turn when you are sure that the problem is solved. Do NOT guess or make up an answer.`,
})
if err != nil {
httpapi.Write(ctx, w, http.StatusInternalServerError, codersdk.Response{
Message: "Failed to create stream",
Detail: err.Error(),
})
return
}
stream = stream.WithToolCalling(func(toolCall aisdk.ToolCall) aisdk.ToolCallResult {
tool, ok := handlers[toolCall.Name]
if !ok {
return nil
}
toolArgs, err := json.Marshal(toolCall.Args)
if err != nil {
return nil
}
result, err := tool(ctx, deps, toolArgs)
if err != nil {
return map[string]any{
"error": err.Error(),
}
}
return result
}).WithAccumulator(&acc)
err = stream.Pipe(w)
if err != nil {
// The client disppeared!
api.Logger.Error(ctx, "stream pipe error", "error", err)
return
}
// acc.Messages() may sometimes return nil. Serializing this
// will cause a pq error: "cannot extract elements from a scalar".
newMessages := append([]aisdk.Message{}, acc.Messages()...)
if len(newMessages) > 0 {
raw, err := json.Marshal(newMessages)
if err != nil {
httpapi.Write(ctx, w, http.StatusInternalServerError, codersdk.Response{
Message: "Failed to marshal chat message",
Detail: err.Error(),
})
return
}
messages = append(messages, newMessages...)
// Insert these messages into the database!
_, err = api.Database.InsertChatMessages(ctx, database.InsertChatMessagesParams{
ChatID: chat.ID,
CreatedAt: dbtime.Now(),
Model: req.Model,
Provider: provider.Provider,
Content: raw,
})
if err != nil {
httpapi.Write(ctx, w, http.StatusInternalServerError, codersdk.Response{
Message: "Failed to insert chat messages",
Detail: err.Error(),
})
return
}
}
if acc.FinishReason() == aisdk.FinishReasonToolCalls {
continue
}
break
}
}
-125
View File
@@ -1,125 +0,0 @@
package coderd_test
import (
"net/http"
"strings"
"testing"
"time"
"github.com/stretchr/testify/require"
"github.com/coder/coder/v2/coderd/coderdtest"
"github.com/coder/coder/v2/coderd/database"
"github.com/coder/coder/v2/coderd/database/dbgen"
"github.com/coder/coder/v2/coderd/database/dbtime"
"github.com/coder/coder/v2/codersdk"
"github.com/coder/coder/v2/testutil"
)
func TestChat(t *testing.T) {
t.Parallel()
t.Run("ExperimentAgenticChatDisabled", func(t *testing.T) {
t.Parallel()
client, _ := coderdtest.NewWithDatabase(t, nil)
owner := coderdtest.CreateFirstUser(t, client)
memberClient, _ := coderdtest.CreateAnotherUser(t, client, owner.OrganizationID)
// Hit the endpoint to get the chat. It should return a 404.
ctx := testutil.Context(t, testutil.WaitShort)
_, err := memberClient.ListChats(ctx)
require.Error(t, err, "list chats should fail")
var sdkErr *codersdk.Error
require.ErrorAs(t, err, &sdkErr, "request should fail with an SDK error")
require.Equal(t, http.StatusForbidden, sdkErr.StatusCode())
})
t.Run("ChatCRUD", func(t *testing.T) {
t.Parallel()
dv := coderdtest.DeploymentValues(t)
dv.Experiments = []string{string(codersdk.ExperimentAgenticChat)}
dv.AI.Value = codersdk.AIConfig{
Providers: []codersdk.AIProviderConfig{
{
Type: "fake",
APIKey: "",
BaseURL: "http://localhost",
Models: []string{"fake-model"},
},
},
}
client, db := coderdtest.NewWithDatabase(t, &coderdtest.Options{
DeploymentValues: dv,
})
owner := coderdtest.CreateFirstUser(t, client)
memberClient, memberUser := coderdtest.CreateAnotherUser(t, client, owner.OrganizationID)
// Seed the database with some data.
dbChat := dbgen.Chat(t, db, database.Chat{
OwnerID: memberUser.ID,
CreatedAt: dbtime.Now().Add(-time.Hour),
UpdatedAt: dbtime.Now().Add(-time.Hour),
Title: "This is a test chat",
})
_ = dbgen.ChatMessage(t, db, database.ChatMessage{
ChatID: dbChat.ID,
CreatedAt: dbtime.Now().Add(-time.Hour),
Content: []byte(`[{"content": "Hello world"}]`),
Model: "fake model",
Provider: "fake",
})
ctx := testutil.Context(t, testutil.WaitShort)
// Listing chats should return the chat we just inserted.
chats, err := memberClient.ListChats(ctx)
require.NoError(t, err, "list chats should succeed")
require.Len(t, chats, 1, "response should have one chat")
require.Equal(t, dbChat.ID, chats[0].ID, "unexpected chat ID")
require.Equal(t, dbChat.Title, chats[0].Title, "unexpected chat title")
require.Equal(t, dbChat.CreatedAt.UTC(), chats[0].CreatedAt.UTC(), "unexpected chat created at")
require.Equal(t, dbChat.UpdatedAt.UTC(), chats[0].UpdatedAt.UTC(), "unexpected chat updated at")
// Fetching a single chat by ID should return the same chat.
chat, err := memberClient.Chat(ctx, dbChat.ID)
require.NoError(t, err, "get chat should succeed")
require.Equal(t, chats[0], chat, "get chat should return the same chat")
// Listing chat messages should return the message we just inserted.
messages, err := memberClient.ChatMessages(ctx, dbChat.ID)
require.NoError(t, err, "list chat messages should succeed")
require.Len(t, messages, 1, "response should have one message")
require.Equal(t, "Hello world", messages[0].Content, "response should have the correct message content")
// Creating a new chat will fail because the model does not exist.
// TODO: Test the message streaming functionality with a mock model.
// Inserting a chat message will fail due to the model not existing.
_, err = memberClient.CreateChatMessage(ctx, dbChat.ID, codersdk.CreateChatMessageRequest{
Model: "echo",
Message: codersdk.ChatMessage{
Role: "user",
Content: "Hello world",
},
Thinking: false,
})
require.Error(t, err, "create chat message should fail")
var sdkErr *codersdk.Error
require.ErrorAs(t, err, &sdkErr, "create chat should fail with an SDK error")
require.Equal(t, http.StatusBadRequest, sdkErr.StatusCode(), "create chat should fail with a 400 when model does not exist")
// Creating a new chat message with malformed content should fail.
res, err := memberClient.Request(ctx, http.MethodPost, "/api/v2/chats/"+dbChat.ID.String()+"/messages", strings.NewReader(`{malformed json}`))
require.NoError(t, err)
defer res.Body.Close()
apiErr := codersdk.ReadBodyAsError(res)
require.Contains(t, apiErr.Error(), "Failed to decode chat message")
_, err = memberClient.CreateChat(ctx)
require.NoError(t, err, "create chat should succeed")
chats, err = memberClient.ListChats(ctx)
require.NoError(t, err, "list chats should succeed")
require.Len(t, chats, 2, "response should have two chats")
})
}
+1 -19
View File
@@ -45,7 +45,6 @@ import (
"github.com/coder/coder/v2/codersdk/drpcsdk"
"github.com/coder/coder/v2/coderd/ai"
"github.com/coder/coder/v2/coderd/cryptokeys"
"github.com/coder/coder/v2/coderd/entitlements"
"github.com/coder/coder/v2/coderd/files"
@@ -160,7 +159,6 @@ type Options struct {
Authorizer rbac.Authorizer
AzureCertificates x509.VerifyOptions
GoogleTokenValidator *idtoken.Validator
LanguageModels ai.LanguageModels
GithubOAuth2Config *GithubOAuth2Config
OIDCConfig *OIDCConfig
PrometheusRegistry *prometheus.Registry
@@ -976,7 +974,6 @@ func New(options *Options) *API {
r.Get("/config", api.deploymentValues)
r.Get("/stats", api.deploymentStats)
r.Get("/ssh", api.sshConfig)
r.Get("/llms", api.deploymentLLMs)
})
r.Route("/experiments", func(r chi.Router) {
r.Use(apiKeyMiddleware)
@@ -1019,21 +1016,6 @@ func New(options *Options) *API {
r.Get("/{fileID}", api.fileByID)
r.Post("/", api.postFile)
})
// Chats are an experimental feature
r.Route("/chats", func(r chi.Router) {
r.Use(
apiKeyMiddleware,
httpmw.RequireExperiment(api.Experiments, codersdk.ExperimentAgenticChat),
)
r.Get("/", api.listChats)
r.Post("/", api.postChats)
r.Route("/{chat}", func(r chi.Router) {
r.Use(httpmw.ExtractChatParam(options.Database))
r.Get("/", api.chat)
r.Get("/messages", api.chatMessages)
r.Post("/messages", api.postChatMessages)
})
})
r.Route("/external-auth", func(r chi.Router) {
r.Use(
apiKeyMiddleware,
@@ -1332,7 +1314,7 @@ func New(options *Options) *API {
r.Get("/listening-ports", api.workspaceAgentListeningPorts)
r.Get("/connection", api.workspaceAgentConnection)
r.Get("/containers", api.workspaceAgentListContainers)
r.Post("/containers/devcontainers/container/{container}/recreate", api.workspaceAgentRecreateDevcontainer)
r.Post("/containers/devcontainers/{devcontainer}/recreate", api.workspaceAgentRecreateDevcontainer)
r.Get("/coordinate", api.workspaceAgentClientCoordinate)
// PTY is part of workspaceAppServer.
+20 -3
View File
@@ -22,6 +22,9 @@ type DynamicParameterTemplateParams struct {
// StaticParams is used if the provisioner daemon version does not support dynamic parameters.
StaticParams []*proto.RichParameter
// TemplateID is used to update an existing template instead of creating a new one.
TemplateID uuid.UUID
}
func DynamicParameterTemplate(t *testing.T, client *codersdk.Client, org uuid.UUID, args DynamicParameterTemplateParams) (codersdk.Template, codersdk.TemplateVersion) {
@@ -40,16 +43,30 @@ func DynamicParameterTemplate(t *testing.T, client *codersdk.Client, org uuid.UU
},
}}
version := CreateTemplateVersion(t, client, org, files)
version := CreateTemplateVersion(t, client, org, files, func(request *codersdk.CreateTemplateVersionRequest) {
if args.TemplateID != uuid.Nil {
request.TemplateID = args.TemplateID
}
})
AwaitTemplateVersionJobCompleted(t, client, version.ID)
tpl := CreateTemplate(t, client, org, version.ID)
tplID := args.TemplateID
if args.TemplateID == uuid.Nil {
tpl := CreateTemplate(t, client, org, version.ID)
tplID = tpl.ID
}
var err error
tpl, err = client.UpdateTemplateMeta(t.Context(), tpl.ID, codersdk.UpdateTemplateMeta{
tpl, err := client.UpdateTemplateMeta(t.Context(), tplID, codersdk.UpdateTemplateMeta{
UseClassicParameterFlow: ptr.Ref(false),
})
require.NoError(t, err)
err = client.UpdateActiveTemplateVersion(t.Context(), tpl.ID, codersdk.UpdateActiveTemplateVersion{
ID: version.ID,
})
require.NoError(t, err)
return tpl, version
}
+2 -14
View File
@@ -16,6 +16,8 @@ import (
"golang.org/x/xerrors"
"tailscale.com/tailcfg"
previewtypes "github.com/coder/preview/types"
agentproto "github.com/coder/coder/v2/agent/proto"
"github.com/coder/coder/v2/coderd/database"
"github.com/coder/coder/v2/coderd/rbac"
@@ -26,7 +28,6 @@ import (
"github.com/coder/coder/v2/codersdk"
"github.com/coder/coder/v2/provisionersdk/proto"
"github.com/coder/coder/v2/tailnet"
previewtypes "github.com/coder/preview/types"
)
// List is a helper function to reduce boilerplate when converting slices of
@@ -803,19 +804,6 @@ func AgentProtoConnectionActionToAuditAction(action database.AuditAction) (agent
}
}
func Chat(chat database.Chat) codersdk.Chat {
return codersdk.Chat{
ID: chat.ID,
Title: chat.Title,
CreatedAt: chat.CreatedAt,
UpdatedAt: chat.UpdatedAt,
}
}
func Chats(chats []database.Chat) []codersdk.Chat {
return List(chats, Chat)
}
func PreviewParameter(param previewtypes.Parameter) codersdk.PreviewParameter {
return codersdk.PreviewParameter{
PreviewParameterData: codersdk.PreviewParameterData{
-42
View File
@@ -1373,10 +1373,6 @@ func (q *querier) DeleteApplicationConnectAPIKeysByUserID(ctx context.Context, u
return q.db.DeleteApplicationConnectAPIKeysByUserID(ctx, userID)
}
func (q *querier) DeleteChat(ctx context.Context, id uuid.UUID) error {
return deleteQ(q.log, q.auth, q.db.GetChatByID, q.db.DeleteChat)(ctx, id)
}
func (q *querier) DeleteCoordinator(ctx context.Context, id uuid.UUID) error {
if err := q.authorizeContext(ctx, policy.ActionDelete, rbac.ResourceTailnetCoordinator); err != nil {
return err
@@ -1814,22 +1810,6 @@ func (q *querier) GetAuthorizationUserRoles(ctx context.Context, userID uuid.UUI
return q.db.GetAuthorizationUserRoles(ctx, userID)
}
func (q *querier) GetChatByID(ctx context.Context, id uuid.UUID) (database.Chat, error) {
return fetch(q.log, q.auth, q.db.GetChatByID)(ctx, id)
}
func (q *querier) GetChatMessagesByChatID(ctx context.Context, chatID uuid.UUID) ([]database.ChatMessage, error) {
c, err := q.GetChatByID(ctx, chatID)
if err != nil {
return nil, err
}
return q.db.GetChatMessagesByChatID(ctx, c.ID)
}
func (q *querier) GetChatsByOwnerID(ctx context.Context, ownerID uuid.UUID) ([]database.Chat, error) {
return fetchWithPostFilter(q.auth, policy.ActionRead, q.db.GetChatsByOwnerID)(ctx, ownerID)
}
func (q *querier) GetCoordinatorResumeTokenSigningKey(ctx context.Context) (string, error) {
if err := q.authorizeContext(ctx, policy.ActionRead, rbac.ResourceSystem); err != nil {
return "", err
@@ -3525,21 +3505,6 @@ func (q *querier) InsertAuditLog(ctx context.Context, arg database.InsertAuditLo
return insert(q.log, q.auth, rbac.ResourceAuditLog, q.db.InsertAuditLog)(ctx, arg)
}
func (q *querier) InsertChat(ctx context.Context, arg database.InsertChatParams) (database.Chat, error) {
return insert(q.log, q.auth, rbac.ResourceChat.WithOwner(arg.OwnerID.String()), q.db.InsertChat)(ctx, arg)
}
func (q *querier) InsertChatMessages(ctx context.Context, arg database.InsertChatMessagesParams) ([]database.ChatMessage, error) {
c, err := q.db.GetChatByID(ctx, arg.ChatID)
if err != nil {
return nil, err
}
if err := q.authorizeContext(ctx, policy.ActionUpdate, c); err != nil {
return nil, err
}
return q.db.InsertChatMessages(ctx, arg)
}
func (q *querier) InsertCryptoKey(ctx context.Context, arg database.InsertCryptoKeyParams) (database.CryptoKey, error) {
if err := q.authorizeContext(ctx, policy.ActionCreate, rbac.ResourceCryptoKey); err != nil {
return database.CryptoKey{}, err
@@ -4201,13 +4166,6 @@ func (q *querier) UpdateAPIKeyByID(ctx context.Context, arg database.UpdateAPIKe
return update(q.log, q.auth, fetch, q.db.UpdateAPIKeyByID)(ctx, arg)
}
func (q *querier) UpdateChatByID(ctx context.Context, arg database.UpdateChatByIDParams) error {
fetch := func(ctx context.Context, arg database.UpdateChatByIDParams) (database.Chat, error) {
return q.db.GetChatByID(ctx, arg.ID)
}
return update(q.log, q.auth, fetch, q.db.UpdateChatByID)(ctx, arg)
}
func (q *querier) UpdateCryptoKeyDeletesAt(ctx context.Context, arg database.UpdateCryptoKeyDeletesAtParams) (database.CryptoKey, error) {
if err := q.authorizeContext(ctx, policy.ActionUpdate, rbac.ResourceCryptoKey); err != nil {
return database.CryptoKey{}, err
+1 -76
View File
@@ -5059,8 +5059,7 @@ func (s *MethodTestSuite) TestPrebuilds() {
}))
s.Run("GetPrebuildMetrics", s.Subtest(func(_ database.Store, check *expects) {
check.Args().
Asserts(rbac.ResourceWorkspace.All(), policy.ActionRead).
ErrorsWithInMemDB(dbmem.ErrUnimplemented)
Asserts(rbac.ResourceWorkspace.All(), policy.ActionRead)
}))
s.Run("CountInProgressPrebuilds", s.Subtest(func(_ database.Store, check *expects) {
check.Args().
@@ -5549,80 +5548,6 @@ func (s *MethodTestSuite) TestResourcesProvisionerdserver() {
}))
}
func (s *MethodTestSuite) TestChat() {
createChat := func(t *testing.T, db database.Store) (database.User, database.Chat, database.ChatMessage) {
t.Helper()
usr := dbgen.User(t, db, database.User{})
chat := dbgen.Chat(s.T(), db, database.Chat{
OwnerID: usr.ID,
})
msg := dbgen.ChatMessage(s.T(), db, database.ChatMessage{
ChatID: chat.ID,
})
return usr, chat, msg
}
s.Run("DeleteChat", s.Subtest(func(db database.Store, check *expects) {
_, c, _ := createChat(s.T(), db)
check.Args(c.ID).Asserts(c, policy.ActionDelete)
}))
s.Run("GetChatByID", s.Subtest(func(db database.Store, check *expects) {
_, c, _ := createChat(s.T(), db)
check.Args(c.ID).Asserts(c, policy.ActionRead).Returns(c)
}))
s.Run("GetChatMessagesByChatID", s.Subtest(func(db database.Store, check *expects) {
_, c, m := createChat(s.T(), db)
check.Args(c.ID).Asserts(c, policy.ActionRead).Returns([]database.ChatMessage{m})
}))
s.Run("GetChatsByOwnerID", s.Subtest(func(db database.Store, check *expects) {
u1, u1c1, _ := createChat(s.T(), db)
u1c2 := dbgen.Chat(s.T(), db, database.Chat{
OwnerID: u1.ID,
CreatedAt: u1c1.CreatedAt.Add(time.Hour),
})
_, _, _ = createChat(s.T(), db) // other user's chat
check.Args(u1.ID).Asserts(u1c2, policy.ActionRead, u1c1, policy.ActionRead).Returns([]database.Chat{u1c2, u1c1})
}))
s.Run("InsertChat", s.Subtest(func(db database.Store, check *expects) {
usr := dbgen.User(s.T(), db, database.User{})
check.Args(database.InsertChatParams{
OwnerID: usr.ID,
Title: "test chat",
CreatedAt: dbtime.Now(),
UpdatedAt: dbtime.Now(),
}).Asserts(rbac.ResourceChat.WithOwner(usr.ID.String()), policy.ActionCreate)
}))
s.Run("InsertChatMessages", s.Subtest(func(db database.Store, check *expects) {
usr := dbgen.User(s.T(), db, database.User{})
chat := dbgen.Chat(s.T(), db, database.Chat{
OwnerID: usr.ID,
})
check.Args(database.InsertChatMessagesParams{
ChatID: chat.ID,
CreatedAt: dbtime.Now(),
Model: "test-model",
Provider: "test-provider",
Content: []byte(`[]`),
}).Asserts(chat, policy.ActionUpdate)
}))
s.Run("UpdateChatByID", s.Subtest(func(db database.Store, check *expects) {
_, c, _ := createChat(s.T(), db)
check.Args(database.UpdateChatByIDParams{
ID: c.ID,
Title: "new title",
UpdatedAt: dbtime.Now(),
}).Asserts(c, policy.ActionUpdate)
}))
}
func (s *MethodTestSuite) TestAuthorizePrebuiltWorkspace() {
s.Run("PrebuildDelete/InsertWorkspaceBuild", s.Subtest(func(db database.Store, check *expects) {
u := dbgen.User(s.T(), db, database.User{})
+22 -24
View File
@@ -143,30 +143,6 @@ func APIKey(t testing.TB, db database.Store, seed database.APIKey) (key database
return key, fmt.Sprintf("%s-%s", key.ID, secret)
}
func Chat(t testing.TB, db database.Store, seed database.Chat) database.Chat {
chat, err := db.InsertChat(genCtx, database.InsertChatParams{
OwnerID: takeFirst(seed.OwnerID, uuid.New()),
CreatedAt: takeFirst(seed.CreatedAt, dbtime.Now()),
UpdatedAt: takeFirst(seed.UpdatedAt, dbtime.Now()),
Title: takeFirst(seed.Title, "Test Chat"),
})
require.NoError(t, err, "insert chat")
return chat
}
func ChatMessage(t testing.TB, db database.Store, seed database.ChatMessage) database.ChatMessage {
msg, err := db.InsertChatMessages(genCtx, database.InsertChatMessagesParams{
CreatedAt: takeFirst(seed.CreatedAt, dbtime.Now()),
ChatID: takeFirst(seed.ChatID, uuid.New()),
Model: takeFirst(seed.Model, "train"),
Provider: takeFirst(seed.Provider, "thomas"),
Content: takeFirstSlice(seed.Content, []byte(`[{"text": "Choo choo!"}]`)),
})
require.NoError(t, err, "insert chat message")
require.Len(t, msg, 1, "insert one chat message did not return exactly one message")
return msg[0]
}
func WorkspaceAgentPortShare(t testing.TB, db database.Store, orig database.WorkspaceAgentPortShare) database.WorkspaceAgentPortShare {
ps, err := db.UpsertWorkspaceAgentPortShare(genCtx, database.UpsertWorkspaceAgentPortShareParams{
WorkspaceID: takeFirst(orig.WorkspaceID, uuid.New()),
@@ -227,6 +203,17 @@ func WorkspaceAgent(t testing.TB, db database.Store, orig database.WorkspaceAgen
require.NoError(t, err, "update workspace agent first connected at")
}
// If the lifecycle state is "ready", update the agent with the corresponding timestamps
if orig.LifecycleState == database.WorkspaceAgentLifecycleStateReady && orig.StartedAt.Valid && orig.ReadyAt.Valid {
err := db.UpdateWorkspaceAgentLifecycleStateByID(genCtx, database.UpdateWorkspaceAgentLifecycleStateByIDParams{
ID: agt.ID,
LifecycleState: orig.LifecycleState,
StartedAt: orig.StartedAt,
ReadyAt: orig.ReadyAt,
})
require.NoError(t, err, "update workspace agent lifecycle state")
}
if orig.ParentID.UUID == uuid.Nil {
// Add a test antagonist. For every agent we add a deleted sub agent
// to discover cases where deletion should be handled.
@@ -1350,6 +1337,17 @@ func PresetParameter(t testing.TB, db database.Store, seed database.InsertPreset
return parameters
}
func ClaimPrebuild(t testing.TB, db database.Store, newUserID uuid.UUID, newName string, presetID uuid.UUID) database.ClaimPrebuiltWorkspaceRow {
claimedWorkspace, err := db.ClaimPrebuiltWorkspace(genCtx, database.ClaimPrebuiltWorkspaceParams{
NewUserID: newUserID,
NewName: newName,
PresetID: presetID,
})
require.NoError(t, err, "claim prebuilt workspace")
return claimedWorkspace
}
func provisionerJobTiming(t testing.TB, db database.Store, seed database.ProvisionerJobTiming) database.ProvisionerJobTiming {
timing, err := db.InsertProvisionerJobTimings(genCtx, database.InsertProvisionerJobTimingsParams{
JobID: takeFirst(seed.JobID, uuid.New()),
+1 -138
View File
@@ -215,8 +215,6 @@ type data struct {
// New tables
auditLogs []database.AuditLog
chats []database.Chat
chatMessages []database.ChatMessage
cryptoKeys []database.CryptoKey
dbcryptKeys []database.DBCryptKey
files []database.File
@@ -1909,19 +1907,6 @@ func (q *FakeQuerier) DeleteApplicationConnectAPIKeysByUserID(_ context.Context,
return nil
}
func (q *FakeQuerier) DeleteChat(ctx context.Context, id uuid.UUID) error {
q.mutex.Lock()
defer q.mutex.Unlock()
for i, chat := range q.chats {
if chat.ID == id {
q.chats = append(q.chats[:i], q.chats[i+1:]...)
return nil
}
}
return sql.ErrNoRows
}
func (*FakeQuerier) DeleteCoordinator(context.Context, uuid.UUID) error {
return ErrUnimplemented
}
@@ -2955,47 +2940,6 @@ func (q *FakeQuerier) GetAuthorizationUserRoles(_ context.Context, userID uuid.U
}, nil
}
func (q *FakeQuerier) GetChatByID(ctx context.Context, id uuid.UUID) (database.Chat, error) {
q.mutex.RLock()
defer q.mutex.RUnlock()
for _, chat := range q.chats {
if chat.ID == id {
return chat, nil
}
}
return database.Chat{}, sql.ErrNoRows
}
func (q *FakeQuerier) GetChatMessagesByChatID(ctx context.Context, chatID uuid.UUID) ([]database.ChatMessage, error) {
q.mutex.RLock()
defer q.mutex.RUnlock()
messages := []database.ChatMessage{}
for _, chatMessage := range q.chatMessages {
if chatMessage.ChatID == chatID {
messages = append(messages, chatMessage)
}
}
return messages, nil
}
func (q *FakeQuerier) GetChatsByOwnerID(ctx context.Context, ownerID uuid.UUID) ([]database.Chat, error) {
q.mutex.RLock()
defer q.mutex.RUnlock()
chats := []database.Chat{}
for _, chat := range q.chats {
if chat.OwnerID == ownerID {
chats = append(chats, chat)
}
}
sort.Slice(chats, func(i, j int) bool {
return chats[i].CreatedAt.After(chats[j].CreatedAt)
})
return chats, nil
}
func (q *FakeQuerier) GetCoordinatorResumeTokenSigningKey(_ context.Context) (string, error) {
q.mutex.RLock()
defer q.mutex.RUnlock()
@@ -4326,7 +4270,7 @@ func (q *FakeQuerier) GetParameterSchemasByJobID(_ context.Context, jobID uuid.U
}
func (*FakeQuerier) GetPrebuildMetrics(_ context.Context) ([]database.GetPrebuildMetricsRow, error) {
return nil, ErrUnimplemented
return make([]database.GetPrebuildMetricsRow, 0), nil
}
func (q *FakeQuerier) GetPresetByID(ctx context.Context, presetID uuid.UUID) (database.GetPresetByIDRow, error) {
@@ -8630,66 +8574,6 @@ func (q *FakeQuerier) InsertAuditLog(_ context.Context, arg database.InsertAudit
return alog, nil
}
func (q *FakeQuerier) InsertChat(ctx context.Context, arg database.InsertChatParams) (database.Chat, error) {
err := validateDatabaseType(arg)
if err != nil {
return database.Chat{}, err
}
q.mutex.Lock()
defer q.mutex.Unlock()
chat := database.Chat{
ID: uuid.New(),
CreatedAt: arg.CreatedAt,
UpdatedAt: arg.UpdatedAt,
OwnerID: arg.OwnerID,
Title: arg.Title,
}
q.chats = append(q.chats, chat)
return chat, nil
}
func (q *FakeQuerier) InsertChatMessages(ctx context.Context, arg database.InsertChatMessagesParams) ([]database.ChatMessage, error) {
err := validateDatabaseType(arg)
if err != nil {
return nil, err
}
q.mutex.Lock()
defer q.mutex.Unlock()
id := int64(0)
if len(q.chatMessages) > 0 {
id = q.chatMessages[len(q.chatMessages)-1].ID
}
messages := make([]database.ChatMessage, 0)
rawMessages := make([]json.RawMessage, 0)
err = json.Unmarshal(arg.Content, &rawMessages)
if err != nil {
return nil, err
}
for _, content := range rawMessages {
id++
_ = content
messages = append(messages, database.ChatMessage{
ID: id,
ChatID: arg.ChatID,
CreatedAt: arg.CreatedAt,
Model: arg.Model,
Provider: arg.Provider,
Content: content,
})
}
q.chatMessages = append(q.chatMessages, messages...)
return messages, nil
}
func (q *FakeQuerier) InsertCryptoKey(_ context.Context, arg database.InsertCryptoKeyParams) (database.CryptoKey, error) {
err := validateDatabaseType(arg)
if err != nil {
@@ -10638,27 +10522,6 @@ func (q *FakeQuerier) UpdateAPIKeyByID(_ context.Context, arg database.UpdateAPI
return sql.ErrNoRows
}
func (q *FakeQuerier) UpdateChatByID(ctx context.Context, arg database.UpdateChatByIDParams) error {
err := validateDatabaseType(arg)
if err != nil {
return err
}
q.mutex.Lock()
defer q.mutex.Unlock()
for i, chat := range q.chats {
if chat.ID == arg.ID {
q.chats[i].Title = arg.Title
q.chats[i].UpdatedAt = arg.UpdatedAt
q.chats[i] = chat
return nil
}
}
return sql.ErrNoRows
}
func (q *FakeQuerier) UpdateCryptoKeyDeletesAt(_ context.Context, arg database.UpdateCryptoKeyDeletesAtParams) (database.CryptoKey, error) {
err := validateDatabaseType(arg)
if err != nil {
-49
View File
@@ -249,13 +249,6 @@ func (m queryMetricsStore) DeleteApplicationConnectAPIKeysByUserID(ctx context.C
return err
}
func (m queryMetricsStore) DeleteChat(ctx context.Context, id uuid.UUID) error {
start := time.Now()
r0 := m.s.DeleteChat(ctx, id)
m.queryLatencies.WithLabelValues("DeleteChat").Observe(time.Since(start).Seconds())
return r0
}
func (m queryMetricsStore) DeleteCoordinator(ctx context.Context, id uuid.UUID) error {
start := time.Now()
r0 := m.s.DeleteCoordinator(ctx, id)
@@ -648,27 +641,6 @@ func (m queryMetricsStore) GetAuthorizationUserRoles(ctx context.Context, userID
return row, err
}
func (m queryMetricsStore) GetChatByID(ctx context.Context, id uuid.UUID) (database.Chat, error) {
start := time.Now()
r0, r1 := m.s.GetChatByID(ctx, id)
m.queryLatencies.WithLabelValues("GetChatByID").Observe(time.Since(start).Seconds())
return r0, r1
}
func (m queryMetricsStore) GetChatMessagesByChatID(ctx context.Context, chatID uuid.UUID) ([]database.ChatMessage, error) {
start := time.Now()
r0, r1 := m.s.GetChatMessagesByChatID(ctx, chatID)
m.queryLatencies.WithLabelValues("GetChatMessagesByChatID").Observe(time.Since(start).Seconds())
return r0, r1
}
func (m queryMetricsStore) GetChatsByOwnerID(ctx context.Context, ownerID uuid.UUID) ([]database.Chat, error) {
start := time.Now()
r0, r1 := m.s.GetChatsByOwnerID(ctx, ownerID)
m.queryLatencies.WithLabelValues("GetChatsByOwnerID").Observe(time.Since(start).Seconds())
return r0, r1
}
func (m queryMetricsStore) GetCoordinatorResumeTokenSigningKey(ctx context.Context) (string, error) {
start := time.Now()
r0, r1 := m.s.GetCoordinatorResumeTokenSigningKey(ctx)
@@ -2083,20 +2055,6 @@ func (m queryMetricsStore) InsertAuditLog(ctx context.Context, arg database.Inse
return log, err
}
func (m queryMetricsStore) InsertChat(ctx context.Context, arg database.InsertChatParams) (database.Chat, error) {
start := time.Now()
r0, r1 := m.s.InsertChat(ctx, arg)
m.queryLatencies.WithLabelValues("InsertChat").Observe(time.Since(start).Seconds())
return r0, r1
}
func (m queryMetricsStore) InsertChatMessages(ctx context.Context, arg database.InsertChatMessagesParams) ([]database.ChatMessage, error) {
start := time.Now()
r0, r1 := m.s.InsertChatMessages(ctx, arg)
m.queryLatencies.WithLabelValues("InsertChatMessages").Observe(time.Since(start).Seconds())
return r0, r1
}
func (m queryMetricsStore) InsertCryptoKey(ctx context.Context, arg database.InsertCryptoKeyParams) (database.CryptoKey, error) {
start := time.Now()
key, err := m.s.InsertCryptoKey(ctx, arg)
@@ -2622,13 +2580,6 @@ func (m queryMetricsStore) UpdateAPIKeyByID(ctx context.Context, arg database.Up
return err
}
func (m queryMetricsStore) UpdateChatByID(ctx context.Context, arg database.UpdateChatByIDParams) error {
start := time.Now()
r0 := m.s.UpdateChatByID(ctx, arg)
m.queryLatencies.WithLabelValues("UpdateChatByID").Observe(time.Since(start).Seconds())
return r0
}
func (m queryMetricsStore) UpdateCryptoKeyDeletesAt(ctx context.Context, arg database.UpdateCryptoKeyDeletesAtParams) (database.CryptoKey, error) {
start := time.Now()
key, err := m.s.UpdateCryptoKeyDeletesAt(ctx, arg)
-103
View File
@@ -376,20 +376,6 @@ func (mr *MockStoreMockRecorder) DeleteApplicationConnectAPIKeysByUserID(ctx, us
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "DeleteApplicationConnectAPIKeysByUserID", reflect.TypeOf((*MockStore)(nil).DeleteApplicationConnectAPIKeysByUserID), ctx, userID)
}
// DeleteChat mocks base method.
func (m *MockStore) DeleteChat(ctx context.Context, id uuid.UUID) error {
m.ctrl.T.Helper()
ret := m.ctrl.Call(m, "DeleteChat", ctx, id)
ret0, _ := ret[0].(error)
return ret0
}
// DeleteChat indicates an expected call of DeleteChat.
func (mr *MockStoreMockRecorder) DeleteChat(ctx, id any) *gomock.Call {
mr.mock.ctrl.T.Helper()
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "DeleteChat", reflect.TypeOf((*MockStore)(nil).DeleteChat), ctx, id)
}
// DeleteCoordinator mocks base method.
func (m *MockStore) DeleteCoordinator(ctx context.Context, id uuid.UUID) error {
m.ctrl.T.Helper()
@@ -1292,51 +1278,6 @@ func (mr *MockStoreMockRecorder) GetAuthorizedWorkspacesAndAgentsByOwnerID(ctx,
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetAuthorizedWorkspacesAndAgentsByOwnerID", reflect.TypeOf((*MockStore)(nil).GetAuthorizedWorkspacesAndAgentsByOwnerID), ctx, ownerID, prepared)
}
// GetChatByID mocks base method.
func (m *MockStore) GetChatByID(ctx context.Context, id uuid.UUID) (database.Chat, error) {
m.ctrl.T.Helper()
ret := m.ctrl.Call(m, "GetChatByID", ctx, id)
ret0, _ := ret[0].(database.Chat)
ret1, _ := ret[1].(error)
return ret0, ret1
}
// GetChatByID indicates an expected call of GetChatByID.
func (mr *MockStoreMockRecorder) GetChatByID(ctx, id any) *gomock.Call {
mr.mock.ctrl.T.Helper()
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetChatByID", reflect.TypeOf((*MockStore)(nil).GetChatByID), ctx, id)
}
// GetChatMessagesByChatID mocks base method.
func (m *MockStore) GetChatMessagesByChatID(ctx context.Context, chatID uuid.UUID) ([]database.ChatMessage, error) {
m.ctrl.T.Helper()
ret := m.ctrl.Call(m, "GetChatMessagesByChatID", ctx, chatID)
ret0, _ := ret[0].([]database.ChatMessage)
ret1, _ := ret[1].(error)
return ret0, ret1
}
// GetChatMessagesByChatID indicates an expected call of GetChatMessagesByChatID.
func (mr *MockStoreMockRecorder) GetChatMessagesByChatID(ctx, chatID any) *gomock.Call {
mr.mock.ctrl.T.Helper()
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetChatMessagesByChatID", reflect.TypeOf((*MockStore)(nil).GetChatMessagesByChatID), ctx, chatID)
}
// GetChatsByOwnerID mocks base method.
func (m *MockStore) GetChatsByOwnerID(ctx context.Context, ownerID uuid.UUID) ([]database.Chat, error) {
m.ctrl.T.Helper()
ret := m.ctrl.Call(m, "GetChatsByOwnerID", ctx, ownerID)
ret0, _ := ret[0].([]database.Chat)
ret1, _ := ret[1].(error)
return ret0, ret1
}
// GetChatsByOwnerID indicates an expected call of GetChatsByOwnerID.
func (mr *MockStoreMockRecorder) GetChatsByOwnerID(ctx, ownerID any) *gomock.Call {
mr.mock.ctrl.T.Helper()
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetChatsByOwnerID", reflect.TypeOf((*MockStore)(nil).GetChatsByOwnerID), ctx, ownerID)
}
// GetCoordinatorResumeTokenSigningKey mocks base method.
func (m *MockStore) GetCoordinatorResumeTokenSigningKey(ctx context.Context) (string, error) {
m.ctrl.T.Helper()
@@ -4411,36 +4352,6 @@ func (mr *MockStoreMockRecorder) InsertAuditLog(ctx, arg any) *gomock.Call {
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "InsertAuditLog", reflect.TypeOf((*MockStore)(nil).InsertAuditLog), ctx, arg)
}
// InsertChat mocks base method.
func (m *MockStore) InsertChat(ctx context.Context, arg database.InsertChatParams) (database.Chat, error) {
m.ctrl.T.Helper()
ret := m.ctrl.Call(m, "InsertChat", ctx, arg)
ret0, _ := ret[0].(database.Chat)
ret1, _ := ret[1].(error)
return ret0, ret1
}
// InsertChat indicates an expected call of InsertChat.
func (mr *MockStoreMockRecorder) InsertChat(ctx, arg any) *gomock.Call {
mr.mock.ctrl.T.Helper()
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "InsertChat", reflect.TypeOf((*MockStore)(nil).InsertChat), ctx, arg)
}
// InsertChatMessages mocks base method.
func (m *MockStore) InsertChatMessages(ctx context.Context, arg database.InsertChatMessagesParams) ([]database.ChatMessage, error) {
m.ctrl.T.Helper()
ret := m.ctrl.Call(m, "InsertChatMessages", ctx, arg)
ret0, _ := ret[0].([]database.ChatMessage)
ret1, _ := ret[1].(error)
return ret0, ret1
}
// InsertChatMessages indicates an expected call of InsertChatMessages.
func (mr *MockStoreMockRecorder) InsertChatMessages(ctx, arg any) *gomock.Call {
mr.mock.ctrl.T.Helper()
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "InsertChatMessages", reflect.TypeOf((*MockStore)(nil).InsertChatMessages), ctx, arg)
}
// InsertCryptoKey mocks base method.
func (m *MockStore) InsertCryptoKey(ctx context.Context, arg database.InsertCryptoKeyParams) (database.CryptoKey, error) {
m.ctrl.T.Helper()
@@ -5575,20 +5486,6 @@ func (mr *MockStoreMockRecorder) UpdateAPIKeyByID(ctx, arg any) *gomock.Call {
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "UpdateAPIKeyByID", reflect.TypeOf((*MockStore)(nil).UpdateAPIKeyByID), ctx, arg)
}
// UpdateChatByID mocks base method.
func (m *MockStore) UpdateChatByID(ctx context.Context, arg database.UpdateChatByIDParams) error {
m.ctrl.T.Helper()
ret := m.ctrl.Call(m, "UpdateChatByID", ctx, arg)
ret0, _ := ret[0].(error)
return ret0
}
// UpdateChatByID indicates an expected call of UpdateChatByID.
func (mr *MockStoreMockRecorder) UpdateChatByID(ctx, arg any) *gomock.Call {
mr.mock.ctrl.T.Helper()
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "UpdateChatByID", reflect.TypeOf((*MockStore)(nil).UpdateChatByID), ctx, arg)
}
// UpdateCryptoKeyDeletesAt mocks base method.
func (m *MockStore) UpdateCryptoKeyDeletesAt(ctx context.Context, arg database.UpdateCryptoKeyDeletesAtParams) (database.CryptoKey, error) {
m.ctrl.T.Helper()
-40
View File
@@ -822,32 +822,6 @@ CREATE TABLE audit_logs (
resource_icon text NOT NULL
);
CREATE TABLE chat_messages (
id bigint NOT NULL,
chat_id uuid NOT NULL,
created_at timestamp with time zone DEFAULT now() NOT NULL,
model text NOT NULL,
provider text NOT NULL,
content jsonb NOT NULL
);
CREATE SEQUENCE chat_messages_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
ALTER SEQUENCE chat_messages_id_seq OWNED BY chat_messages.id;
CREATE TABLE chats (
id uuid DEFAULT gen_random_uuid() NOT NULL,
owner_id uuid NOT NULL,
created_at timestamp with time zone DEFAULT now() NOT NULL,
updated_at timestamp with time zone DEFAULT now() NOT NULL,
title text NOT NULL
);
CREATE TABLE crypto_keys (
feature crypto_key_feature NOT NULL,
sequence integer NOT NULL,
@@ -2342,8 +2316,6 @@ CREATE VIEW workspaces_expanded AS
COMMENT ON VIEW workspaces_expanded IS 'Joins in the display name information such as username, avatar, and organization name.';
ALTER TABLE ONLY chat_messages ALTER COLUMN id SET DEFAULT nextval('chat_messages_id_seq'::regclass);
ALTER TABLE ONLY licenses ALTER COLUMN id SET DEFAULT nextval('licenses_id_seq'::regclass);
ALTER TABLE ONLY provisioner_job_logs ALTER COLUMN id SET DEFAULT nextval('provisioner_job_logs_id_seq'::regclass);
@@ -2365,12 +2337,6 @@ ALTER TABLE ONLY api_keys
ALTER TABLE ONLY audit_logs
ADD CONSTRAINT audit_logs_pkey PRIMARY KEY (id);
ALTER TABLE ONLY chat_messages
ADD CONSTRAINT chat_messages_pkey PRIMARY KEY (id);
ALTER TABLE ONLY chats
ADD CONSTRAINT chats_pkey PRIMARY KEY (id);
ALTER TABLE ONLY crypto_keys
ADD CONSTRAINT crypto_keys_pkey PRIMARY KEY (feature, sequence);
@@ -2867,12 +2833,6 @@ forward without requiring a migration to clean up historical data.';
ALTER TABLE ONLY api_keys
ADD CONSTRAINT api_keys_user_id_uuid_fkey FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE CASCADE;
ALTER TABLE ONLY chat_messages
ADD CONSTRAINT chat_messages_chat_id_fkey FOREIGN KEY (chat_id) REFERENCES chats(id) ON DELETE CASCADE;
ALTER TABLE ONLY chats
ADD CONSTRAINT chats_owner_id_fkey FOREIGN KEY (owner_id) REFERENCES users(id) ON DELETE CASCADE;
ALTER TABLE ONLY crypto_keys
ADD CONSTRAINT crypto_keys_secret_key_id_fkey FOREIGN KEY (secret_key_id) REFERENCES dbcrypt_keys(active_key_digest);
@@ -7,8 +7,6 @@ type ForeignKeyConstraint string
// ForeignKeyConstraint enums.
const (
ForeignKeyAPIKeysUserIDUUID ForeignKeyConstraint = "api_keys_user_id_uuid_fkey" // ALTER TABLE ONLY api_keys ADD CONSTRAINT api_keys_user_id_uuid_fkey FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE CASCADE;
ForeignKeyChatMessagesChatID ForeignKeyConstraint = "chat_messages_chat_id_fkey" // ALTER TABLE ONLY chat_messages ADD CONSTRAINT chat_messages_chat_id_fkey FOREIGN KEY (chat_id) REFERENCES chats(id) ON DELETE CASCADE;
ForeignKeyChatsOwnerID ForeignKeyConstraint = "chats_owner_id_fkey" // ALTER TABLE ONLY chats ADD CONSTRAINT chats_owner_id_fkey FOREIGN KEY (owner_id) REFERENCES users(id) ON DELETE CASCADE;
ForeignKeyCryptoKeysSecretKeyID ForeignKeyConstraint = "crypto_keys_secret_key_id_fkey" // ALTER TABLE ONLY crypto_keys ADD CONSTRAINT crypto_keys_secret_key_id_fkey FOREIGN KEY (secret_key_id) REFERENCES dbcrypt_keys(active_key_digest);
ForeignKeyGitAuthLinksOauthAccessTokenKeyID ForeignKeyConstraint = "git_auth_links_oauth_access_token_key_id_fkey" // ALTER TABLE ONLY external_auth_links ADD CONSTRAINT git_auth_links_oauth_access_token_key_id_fkey FOREIGN KEY (oauth_access_token_key_id) REFERENCES dbcrypt_keys(active_key_digest);
ForeignKeyGitAuthLinksOauthRefreshTokenKeyID ForeignKeyConstraint = "git_auth_links_oauth_refresh_token_key_id_fkey" // ALTER TABLE ONLY external_auth_links ADD CONSTRAINT git_auth_links_oauth_refresh_token_key_id_fkey FOREIGN KEY (oauth_refresh_token_key_id) REFERENCES dbcrypt_keys(active_key_digest);
@@ -0,0 +1 @@
-- noop
@@ -0,0 +1,2 @@
DROP TABLE IF EXISTS chat_messages;
DROP TABLE IF EXISTS chats;
-5
View File
@@ -611,8 +611,3 @@ func (m WorkspaceAgentVolumeResourceMonitor) Debounce(
return m.DebouncedUntil, false
}
func (c Chat) RBACObject() rbac.Object {
return rbac.ResourceChat.WithID(c.ID).
WithOwner(c.OwnerID.String())
}
-17
View File
@@ -2781,23 +2781,6 @@ type AuditLog struct {
ResourceIcon string `db:"resource_icon" json:"resource_icon"`
}
type Chat struct {
ID uuid.UUID `db:"id" json:"id"`
OwnerID uuid.UUID `db:"owner_id" json:"owner_id"`
CreatedAt time.Time `db:"created_at" json:"created_at"`
UpdatedAt time.Time `db:"updated_at" json:"updated_at"`
Title string `db:"title" json:"title"`
}
type ChatMessage struct {
ID int64 `db:"id" json:"id"`
ChatID uuid.UUID `db:"chat_id" json:"chat_id"`
CreatedAt time.Time `db:"created_at" json:"created_at"`
Model string `db:"model" json:"model"`
Provider string `db:"provider" json:"provider"`
Content json.RawMessage `db:"content" json:"content"`
}
type CryptoKey struct {
Feature CryptoKeyFeature `db:"feature" json:"feature"`
Sequence int32 `db:"sequence" json:"sequence"`
-7
View File
@@ -79,7 +79,6 @@ type sqlcQuerier interface {
// be recreated.
DeleteAllWebpushSubscriptions(ctx context.Context) error
DeleteApplicationConnectAPIKeysByUserID(ctx context.Context, userID uuid.UUID) error
DeleteChat(ctx context.Context, id uuid.UUID) error
DeleteCoordinator(ctx context.Context, id uuid.UUID) error
DeleteCryptoKey(ctx context.Context, arg DeleteCryptoKeyParams) (CryptoKey, error)
DeleteCustomRole(ctx context.Context, arg DeleteCustomRoleParams) error
@@ -154,9 +153,6 @@ type sqlcQuerier interface {
// This function returns roles for authorization purposes. Implied member roles
// are included.
GetAuthorizationUserRoles(ctx context.Context, userID uuid.UUID) (GetAuthorizationUserRolesRow, error)
GetChatByID(ctx context.Context, id uuid.UUID) (Chat, error)
GetChatMessagesByChatID(ctx context.Context, chatID uuid.UUID) ([]ChatMessage, error)
GetChatsByOwnerID(ctx context.Context, ownerID uuid.UUID) ([]Chat, error)
GetCoordinatorResumeTokenSigningKey(ctx context.Context) (string, error)
GetCryptoKeyByFeatureAndSequence(ctx context.Context, arg GetCryptoKeyByFeatureAndSequenceParams) (CryptoKey, error)
GetCryptoKeys(ctx context.Context) ([]CryptoKey, error)
@@ -472,8 +468,6 @@ type sqlcQuerier interface {
// every member of the org.
InsertAllUsersGroup(ctx context.Context, organizationID uuid.UUID) (Group, error)
InsertAuditLog(ctx context.Context, arg InsertAuditLogParams) (AuditLog, error)
InsertChat(ctx context.Context, arg InsertChatParams) (Chat, error)
InsertChatMessages(ctx context.Context, arg InsertChatMessagesParams) ([]ChatMessage, error)
InsertCryptoKey(ctx context.Context, arg InsertCryptoKeyParams) (CryptoKey, error)
InsertCustomRole(ctx context.Context, arg InsertCustomRoleParams) (CustomRole, error)
InsertDBCryptKey(ctx context.Context, arg InsertDBCryptKeyParams) error
@@ -567,7 +561,6 @@ type sqlcQuerier interface {
UnarchiveTemplateVersion(ctx context.Context, arg UnarchiveTemplateVersionParams) error
UnfavoriteWorkspace(ctx context.Context, id uuid.UUID) error
UpdateAPIKeyByID(ctx context.Context, arg UpdateAPIKeyByIDParams) error
UpdateChatByID(ctx context.Context, arg UpdateChatByIDParams) error
UpdateCryptoKeyDeletesAt(ctx context.Context, arg UpdateCryptoKeyDeletesAtParams) (CryptoKey, error)
UpdateCustomRole(ctx context.Context, arg UpdateCustomRoleParams) (CustomRole, error)
UpdateExternalAuthLink(ctx context.Context, arg UpdateExternalAuthLinkParams) (ExternalAuthLink, error)
+6 -202
View File
@@ -766,207 +766,6 @@ func (q *sqlQuerier) InsertAuditLog(ctx context.Context, arg InsertAuditLogParam
return i, err
}
const deleteChat = `-- name: DeleteChat :exec
DELETE FROM chats WHERE id = $1
`
func (q *sqlQuerier) DeleteChat(ctx context.Context, id uuid.UUID) error {
_, err := q.db.ExecContext(ctx, deleteChat, id)
return err
}
const getChatByID = `-- name: GetChatByID :one
SELECT id, owner_id, created_at, updated_at, title FROM chats
WHERE id = $1
`
func (q *sqlQuerier) GetChatByID(ctx context.Context, id uuid.UUID) (Chat, error) {
row := q.db.QueryRowContext(ctx, getChatByID, id)
var i Chat
err := row.Scan(
&i.ID,
&i.OwnerID,
&i.CreatedAt,
&i.UpdatedAt,
&i.Title,
)
return i, err
}
const getChatMessagesByChatID = `-- name: GetChatMessagesByChatID :many
SELECT id, chat_id, created_at, model, provider, content FROM chat_messages
WHERE chat_id = $1
ORDER BY created_at ASC
`
func (q *sqlQuerier) GetChatMessagesByChatID(ctx context.Context, chatID uuid.UUID) ([]ChatMessage, error) {
rows, err := q.db.QueryContext(ctx, getChatMessagesByChatID, chatID)
if err != nil {
return nil, err
}
defer rows.Close()
var items []ChatMessage
for rows.Next() {
var i ChatMessage
if err := rows.Scan(
&i.ID,
&i.ChatID,
&i.CreatedAt,
&i.Model,
&i.Provider,
&i.Content,
); err != nil {
return nil, err
}
items = append(items, i)
}
if err := rows.Close(); err != nil {
return nil, err
}
if err := rows.Err(); err != nil {
return nil, err
}
return items, nil
}
const getChatsByOwnerID = `-- name: GetChatsByOwnerID :many
SELECT id, owner_id, created_at, updated_at, title FROM chats
WHERE owner_id = $1
ORDER BY created_at DESC
`
func (q *sqlQuerier) GetChatsByOwnerID(ctx context.Context, ownerID uuid.UUID) ([]Chat, error) {
rows, err := q.db.QueryContext(ctx, getChatsByOwnerID, ownerID)
if err != nil {
return nil, err
}
defer rows.Close()
var items []Chat
for rows.Next() {
var i Chat
if err := rows.Scan(
&i.ID,
&i.OwnerID,
&i.CreatedAt,
&i.UpdatedAt,
&i.Title,
); err != nil {
return nil, err
}
items = append(items, i)
}
if err := rows.Close(); err != nil {
return nil, err
}
if err := rows.Err(); err != nil {
return nil, err
}
return items, nil
}
const insertChat = `-- name: InsertChat :one
INSERT INTO chats (owner_id, created_at, updated_at, title)
VALUES ($1, $2, $3, $4)
RETURNING id, owner_id, created_at, updated_at, title
`
type InsertChatParams struct {
OwnerID uuid.UUID `db:"owner_id" json:"owner_id"`
CreatedAt time.Time `db:"created_at" json:"created_at"`
UpdatedAt time.Time `db:"updated_at" json:"updated_at"`
Title string `db:"title" json:"title"`
}
func (q *sqlQuerier) InsertChat(ctx context.Context, arg InsertChatParams) (Chat, error) {
row := q.db.QueryRowContext(ctx, insertChat,
arg.OwnerID,
arg.CreatedAt,
arg.UpdatedAt,
arg.Title,
)
var i Chat
err := row.Scan(
&i.ID,
&i.OwnerID,
&i.CreatedAt,
&i.UpdatedAt,
&i.Title,
)
return i, err
}
const insertChatMessages = `-- name: InsertChatMessages :many
INSERT INTO chat_messages (chat_id, created_at, model, provider, content)
SELECT
$1 :: uuid AS chat_id,
$2 :: timestamptz AS created_at,
$3 :: VARCHAR(127) AS model,
$4 :: VARCHAR(127) AS provider,
jsonb_array_elements($5 :: jsonb) AS content
RETURNING chat_messages.id, chat_messages.chat_id, chat_messages.created_at, chat_messages.model, chat_messages.provider, chat_messages.content
`
type InsertChatMessagesParams struct {
ChatID uuid.UUID `db:"chat_id" json:"chat_id"`
CreatedAt time.Time `db:"created_at" json:"created_at"`
Model string `db:"model" json:"model"`
Provider string `db:"provider" json:"provider"`
Content json.RawMessage `db:"content" json:"content"`
}
func (q *sqlQuerier) InsertChatMessages(ctx context.Context, arg InsertChatMessagesParams) ([]ChatMessage, error) {
rows, err := q.db.QueryContext(ctx, insertChatMessages,
arg.ChatID,
arg.CreatedAt,
arg.Model,
arg.Provider,
arg.Content,
)
if err != nil {
return nil, err
}
defer rows.Close()
var items []ChatMessage
for rows.Next() {
var i ChatMessage
if err := rows.Scan(
&i.ID,
&i.ChatID,
&i.CreatedAt,
&i.Model,
&i.Provider,
&i.Content,
); err != nil {
return nil, err
}
items = append(items, i)
}
if err := rows.Close(); err != nil {
return nil, err
}
if err := rows.Err(); err != nil {
return nil, err
}
return items, nil
}
const updateChatByID = `-- name: UpdateChatByID :exec
UPDATE chats
SET title = $2, updated_at = $3
WHERE id = $1
`
type UpdateChatByIDParams struct {
ID uuid.UUID `db:"id" json:"id"`
Title string `db:"title" json:"title"`
UpdatedAt time.Time `db:"updated_at" json:"updated_at"`
}
func (q *sqlQuerier) UpdateChatByID(ctx context.Context, arg UpdateChatByIDParams) error {
_, err := q.db.ExecContext(ctx, updateChatByID, arg.ID, arg.Title, arg.UpdatedAt)
return err
}
const deleteCryptoKey = `-- name: DeleteCryptoKey :one
UPDATE crypto_keys
SET secret = NULL, secret_key_id = NULL
@@ -19559,7 +19358,12 @@ WHERE
provisioner_jobs.completed_at IS NOT NULL AND
($1 :: timestamptz) - provisioner_jobs.completed_at > (INTERVAL '1 millisecond' * (templates.failure_ttl / 1000000))
)
) AND workspaces.deleted = 'false'
)
AND workspaces.deleted = 'false'
-- Prebuilt workspaces (identified by having the prebuilds system user as owner_id)
-- should not be considered by the lifecycle executor, as they are handled by the
-- prebuilds reconciliation loop.
AND workspaces.owner_id != 'c42fdf75-3097-471c-8c33-fb52454d81c0'::UUID
`
type GetWorkspacesEligibleForTransitionRow struct {
-36
View File
@@ -1,36 +0,0 @@
-- name: InsertChat :one
INSERT INTO chats (owner_id, created_at, updated_at, title)
VALUES ($1, $2, $3, $4)
RETURNING *;
-- name: UpdateChatByID :exec
UPDATE chats
SET title = $2, updated_at = $3
WHERE id = $1;
-- name: GetChatsByOwnerID :many
SELECT * FROM chats
WHERE owner_id = $1
ORDER BY created_at DESC;
-- name: GetChatByID :one
SELECT * FROM chats
WHERE id = $1;
-- name: InsertChatMessages :many
INSERT INTO chat_messages (chat_id, created_at, model, provider, content)
SELECT
@chat_id :: uuid AS chat_id,
@created_at :: timestamptz AS created_at,
@model :: VARCHAR(127) AS model,
@provider :: VARCHAR(127) AS provider,
jsonb_array_elements(@content :: jsonb) AS content
RETURNING chat_messages.*;
-- name: GetChatMessagesByChatID :many
SELECT * FROM chat_messages
WHERE chat_id = $1
ORDER BY created_at ASC;
-- name: DeleteChat :exec
DELETE FROM chats WHERE id = $1;
+6 -1
View File
@@ -758,7 +758,12 @@ WHERE
provisioner_jobs.completed_at IS NOT NULL AND
(@now :: timestamptz) - provisioner_jobs.completed_at > (INTERVAL '1 millisecond' * (templates.failure_ttl / 1000000))
)
) AND workspaces.deleted = 'false';
)
AND workspaces.deleted = 'false'
-- Prebuilt workspaces (identified by having the prebuilds system user as owner_id)
-- should not be considered by the lifecycle executor, as they are handled by the
-- prebuilds reconciliation loop.
AND workspaces.owner_id != 'c42fdf75-3097-471c-8c33-fb52454d81c0'::UUID;
-- name: UpdateWorkspaceDormantDeletingAt :one
UPDATE
-2
View File
@@ -9,8 +9,6 @@ const (
UniqueAgentStatsPkey UniqueConstraint = "agent_stats_pkey" // ALTER TABLE ONLY workspace_agent_stats ADD CONSTRAINT agent_stats_pkey PRIMARY KEY (id);
UniqueAPIKeysPkey UniqueConstraint = "api_keys_pkey" // ALTER TABLE ONLY api_keys ADD CONSTRAINT api_keys_pkey PRIMARY KEY (id);
UniqueAuditLogsPkey UniqueConstraint = "audit_logs_pkey" // ALTER TABLE ONLY audit_logs ADD CONSTRAINT audit_logs_pkey PRIMARY KEY (id);
UniqueChatMessagesPkey UniqueConstraint = "chat_messages_pkey" // ALTER TABLE ONLY chat_messages ADD CONSTRAINT chat_messages_pkey PRIMARY KEY (id);
UniqueChatsPkey UniqueConstraint = "chats_pkey" // ALTER TABLE ONLY chats ADD CONSTRAINT chats_pkey PRIMARY KEY (id);
UniqueCryptoKeysPkey UniqueConstraint = "crypto_keys_pkey" // ALTER TABLE ONLY crypto_keys ADD CONSTRAINT crypto_keys_pkey PRIMARY KEY (feature, sequence);
UniqueCustomRolesUniqueKey UniqueConstraint = "custom_roles_unique_key" // ALTER TABLE ONLY custom_roles ADD CONSTRAINT custom_roles_unique_key UNIQUE (name, organization_id);
UniqueDbcryptKeysActiveKeyDigestKey UniqueConstraint = "dbcrypt_keys_active_key_digest_key" // ALTER TABLE ONLY dbcrypt_keys ADD CONSTRAINT dbcrypt_keys_active_key_digest_key UNIQUE (active_key_digest);
-25
View File
@@ -1,11 +1,8 @@
package coderd
import (
"context"
"net/http"
"github.com/kylecarbs/aisdk-go"
"github.com/coder/coder/v2/coderd/httpapi"
"github.com/coder/coder/v2/coderd/rbac"
"github.com/coder/coder/v2/coderd/rbac/policy"
@@ -87,25 +84,3 @@ func buildInfoHandler(resp codersdk.BuildInfoResponse) http.HandlerFunc {
func (api *API) sshConfig(rw http.ResponseWriter, r *http.Request) {
httpapi.Write(r.Context(), rw, http.StatusOK, api.SSHConfig)
}
type LanguageModel struct {
codersdk.LanguageModel
Provider func(ctx context.Context, messages []aisdk.Message, thinking bool) (aisdk.DataStream, error)
}
// @Summary Get language models
// @ID get-language-models
// @Security CoderSessionToken
// @Produce json
// @Tags General
// @Success 200 {object} codersdk.LanguageModelConfig
// @Router /deployment/llms [get]
func (api *API) deploymentLLMs(rw http.ResponseWriter, r *http.Request) {
models := make([]codersdk.LanguageModel, 0, len(api.LanguageModels))
for _, model := range api.LanguageModels {
models = append(models, model.LanguageModel)
}
httpapi.Write(r.Context(), rw, http.StatusOK, codersdk.LanguageModelConfig{
Models: models,
})
}
@@ -0,0 +1,2 @@
//go:generate mockgen -destination ./rendermock.go -package rendermock github.com/coder/coder/v2/coderd/dynamicparameters Renderer
package rendermock
@@ -0,0 +1,71 @@
// Code generated by MockGen. DO NOT EDIT.
// Source: github.com/coder/coder/v2/coderd/dynamicparameters (interfaces: Renderer)
//
// Generated by this command:
//
// mockgen -destination ./rendermock.go -package rendermock github.com/coder/coder/v2/coderd/dynamicparameters Renderer
//
// Package rendermock is a generated GoMock package.
package rendermock
import (
context "context"
reflect "reflect"
preview "github.com/coder/preview"
uuid "github.com/google/uuid"
hcl "github.com/hashicorp/hcl/v2"
gomock "go.uber.org/mock/gomock"
)
// MockRenderer is a mock of Renderer interface.
type MockRenderer struct {
ctrl *gomock.Controller
recorder *MockRendererMockRecorder
isgomock struct{}
}
// MockRendererMockRecorder is the mock recorder for MockRenderer.
type MockRendererMockRecorder struct {
mock *MockRenderer
}
// NewMockRenderer creates a new mock instance.
func NewMockRenderer(ctrl *gomock.Controller) *MockRenderer {
mock := &MockRenderer{ctrl: ctrl}
mock.recorder = &MockRendererMockRecorder{mock}
return mock
}
// EXPECT returns an object that allows the caller to indicate expected use.
func (m *MockRenderer) EXPECT() *MockRendererMockRecorder {
return m.recorder
}
// Close mocks base method.
func (m *MockRenderer) Close() {
m.ctrl.T.Helper()
m.ctrl.Call(m, "Close")
}
// Close indicates an expected call of Close.
func (mr *MockRendererMockRecorder) Close() *gomock.Call {
mr.mock.ctrl.T.Helper()
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Close", reflect.TypeOf((*MockRenderer)(nil).Close))
}
// Render mocks base method.
func (m *MockRenderer) Render(ctx context.Context, ownerID uuid.UUID, values map[string]string) (*preview.Output, hcl.Diagnostics) {
m.ctrl.T.Helper()
ret := m.ctrl.Call(m, "Render", ctx, ownerID, values)
ret0, _ := ret[0].(*preview.Output)
ret1, _ := ret[1].(hcl.Diagnostics)
return ret0, ret1
}
// Render indicates an expected call of Render.
func (mr *MockRendererMockRecorder) Render(ctx, ownerID, values any) *gomock.Call {
mr.mock.ctrl.T.Helper()
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Render", reflect.TypeOf((*MockRenderer)(nil).Render), ctx, ownerID, values)
}
+8 -2
View File
@@ -169,9 +169,15 @@ func ResolveParameters(
parameterNames[parameter.Name] = struct{}{}
if !firstBuild && !parameter.Mutable {
originalValue, ok := originalValues[parameter.Name]
// Immutable parameters should not be changed after the first build.
// They can match the original value though!
if parameter.Value.AsString() != originalValues[parameter.Name].Value {
// If the value matches the original value, that is fine.
//
// If the original value is not set, that means this is a new parameter. New
// immutable parameters are allowed. This is an opinionated choice to prevent
// workspaces failing to update or delete. Ideally we would block this, as
// immutable parameters should only be able to be set at creation time.
if ok && parameter.Value.AsString() != originalValue.Value {
var src *hcl.Range
if parameter.Source != nil {
src = &parameter.Source.HCLBlock().TypeRange
+59
View File
@@ -0,0 +1,59 @@
package dynamicparameters_test
import (
"testing"
"github.com/google/uuid"
"github.com/stretchr/testify/require"
"go.uber.org/mock/gomock"
"github.com/coder/coder/v2/coderd/database"
"github.com/coder/coder/v2/coderd/dynamicparameters"
"github.com/coder/coder/v2/coderd/dynamicparameters/rendermock"
"github.com/coder/coder/v2/codersdk"
"github.com/coder/coder/v2/testutil"
"github.com/coder/preview"
previewtypes "github.com/coder/preview/types"
"github.com/coder/terraform-provider-coder/v2/provider"
)
func TestResolveParameters(t *testing.T) {
t.Parallel()
t.Run("NewImmutable", func(t *testing.T) {
t.Parallel()
ctrl := gomock.NewController(t)
render := rendermock.NewMockRenderer(ctrl)
// A single immutable parameter with no previous value.
render.EXPECT().
Render(gomock.Any(), gomock.Any(), gomock.Any()).
AnyTimes().
Return(&preview.Output{
Parameters: []previewtypes.Parameter{
{
ParameterData: previewtypes.ParameterData{
Name: "immutable",
Type: previewtypes.ParameterTypeString,
FormType: provider.ParameterFormTypeInput,
Mutable: false,
DefaultValue: previewtypes.StringLiteral("foo"),
Required: true,
},
Value: previewtypes.StringLiteral("foo"),
Diagnostics: nil,
},
},
}, nil)
ctx := testutil.Context(t, testutil.WaitShort)
values, err := dynamicparameters.ResolveParameters(ctx, uuid.New(), render, false,
[]database.WorkspaceBuildParameter{}, // No previous values
[]codersdk.WorkspaceBuildParameter{}, // No new build values
[]database.TemplateVersionPresetParameter{}, // No preset values
)
require.NoError(t, err)
require.Equal(t, map[string]string{"immutable": "foo"}, values)
})
}
-59
View File
@@ -1,59 +0,0 @@
package httpmw
import (
"context"
"net/http"
"github.com/go-chi/chi/v5"
"github.com/google/uuid"
"github.com/coder/coder/v2/coderd/database"
"github.com/coder/coder/v2/coderd/httpapi"
"github.com/coder/coder/v2/codersdk"
)
type chatContextKey struct{}
func ChatParam(r *http.Request) database.Chat {
chat, ok := r.Context().Value(chatContextKey{}).(database.Chat)
if !ok {
panic("developer error: chat param middleware not provided")
}
return chat
}
func ExtractChatParam(db database.Store) func(http.Handler) http.Handler {
return func(next http.Handler) http.Handler {
return http.HandlerFunc(func(rw http.ResponseWriter, r *http.Request) {
ctx := r.Context()
arg := chi.URLParam(r, "chat")
if arg == "" {
httpapi.Write(ctx, rw, http.StatusBadRequest, codersdk.Response{
Message: "\"chat\" must be provided.",
})
return
}
chatID, err := uuid.Parse(arg)
if err != nil {
httpapi.Write(ctx, rw, http.StatusBadRequest, codersdk.Response{
Message: "Invalid chat ID.",
})
return
}
chat, err := db.GetChatByID(ctx, chatID)
if httpapi.Is404Error(err) {
httpapi.ResourceNotFound(rw)
return
}
if err != nil {
httpapi.Write(ctx, rw, http.StatusInternalServerError, codersdk.Response{
Message: "Failed to get chat.",
Detail: err.Error(),
})
return
}
ctx = context.WithValue(ctx, chatContextKey{}, chat)
next.ServeHTTP(rw, r.WithContext(ctx))
})
}
}
-150
View File
@@ -1,150 +0,0 @@
package httpmw_test
import (
"context"
"net/http"
"net/http/httptest"
"testing"
"time"
"github.com/go-chi/chi/v5"
"github.com/google/uuid"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/coder/coder/v2/coderd/database"
"github.com/coder/coder/v2/coderd/database/dbgen"
"github.com/coder/coder/v2/coderd/database/dbtestutil"
"github.com/coder/coder/v2/coderd/database/dbtime"
"github.com/coder/coder/v2/coderd/httpmw"
"github.com/coder/coder/v2/codersdk"
)
func TestExtractChat(t *testing.T) {
t.Parallel()
setupAuthentication := func(db database.Store) (*http.Request, database.User) {
r := httptest.NewRequest("GET", "/", nil)
user := dbgen.User(t, db, database.User{
ID: uuid.New(),
})
_, token := dbgen.APIKey(t, db, database.APIKey{
UserID: user.ID,
})
r.Header.Set(codersdk.SessionTokenHeader, token)
r = r.WithContext(context.WithValue(r.Context(), chi.RouteCtxKey, chi.NewRouteContext()))
return r, user
}
t.Run("None", func(t *testing.T) {
t.Parallel()
var (
db, _ = dbtestutil.NewDB(t)
rw = httptest.NewRecorder()
r, _ = setupAuthentication(db)
rtr = chi.NewRouter()
)
rtr.Use(
httpmw.ExtractAPIKeyMW(httpmw.ExtractAPIKeyConfig{
DB: db,
RedirectToLogin: false,
}),
httpmw.ExtractChatParam(db),
)
rtr.Get("/", nil)
rtr.ServeHTTP(rw, r)
res := rw.Result()
defer res.Body.Close()
require.Equal(t, http.StatusBadRequest, res.StatusCode)
})
t.Run("InvalidUUID", func(t *testing.T) {
t.Parallel()
var (
db, _ = dbtestutil.NewDB(t)
rw = httptest.NewRecorder()
r, _ = setupAuthentication(db)
rtr = chi.NewRouter()
)
chi.RouteContext(r.Context()).URLParams.Add("chat", "not-a-uuid")
rtr.Use(
httpmw.ExtractAPIKeyMW(httpmw.ExtractAPIKeyConfig{
DB: db,
RedirectToLogin: false,
}),
httpmw.ExtractChatParam(db),
)
rtr.Get("/", nil)
rtr.ServeHTTP(rw, r)
res := rw.Result()
defer res.Body.Close()
require.Equal(t, http.StatusBadRequest, res.StatusCode) // Changed from NotFound in org test to BadRequest as per chat.go
})
t.Run("NotFound", func(t *testing.T) {
t.Parallel()
var (
db, _ = dbtestutil.NewDB(t)
rw = httptest.NewRecorder()
r, _ = setupAuthentication(db)
rtr = chi.NewRouter()
)
chi.RouteContext(r.Context()).URLParams.Add("chat", uuid.NewString())
rtr.Use(
httpmw.ExtractAPIKeyMW(httpmw.ExtractAPIKeyConfig{
DB: db,
RedirectToLogin: false,
}),
httpmw.ExtractChatParam(db),
)
rtr.Get("/", nil)
rtr.ServeHTTP(rw, r)
res := rw.Result()
defer res.Body.Close()
require.Equal(t, http.StatusNotFound, res.StatusCode)
})
t.Run("Success", func(t *testing.T) {
t.Parallel()
var (
db, _ = dbtestutil.NewDB(t)
rw = httptest.NewRecorder()
r, user = setupAuthentication(db)
rtr = chi.NewRouter()
)
// Create a test chat
testChat := dbgen.Chat(t, db, database.Chat{
ID: uuid.New(),
OwnerID: user.ID,
CreatedAt: dbtime.Now(),
UpdatedAt: dbtime.Now(),
Title: "Test Chat",
})
rtr.Use(
httpmw.ExtractAPIKeyMW(httpmw.ExtractAPIKeyConfig{
DB: db,
RedirectToLogin: false,
}),
httpmw.ExtractChatParam(db),
)
rtr.Get("/", func(rw http.ResponseWriter, r *http.Request) {
chat := httpmw.ChatParam(r)
require.NotZero(t, chat)
assert.Equal(t, testChat.ID, chat.ID)
assert.WithinDuration(t, testChat.CreatedAt, chat.CreatedAt, time.Second)
assert.WithinDuration(t, testChat.UpdatedAt, chat.UpdatedAt, time.Second)
assert.Equal(t, testChat.Title, chat.Title)
rw.WriteHeader(http.StatusOK)
})
// Try by ID
chi.RouteContext(r.Context()).URLParams.Add("chat", testChat.ID.String())
rtr.ServeHTTP(rw, r)
res := rw.Result()
defer res.Body.Close()
require.Equal(t, http.StatusOK, res.StatusCode, "by id")
})
}
-11
View File
@@ -54,16 +54,6 @@ var (
Type: "audit_log",
}
// ResourceChat
// Valid Actions
// - "ActionCreate" :: create a chat
// - "ActionDelete" :: delete a chat
// - "ActionRead" :: read a chat
// - "ActionUpdate" :: update a chat
ResourceChat = Object{
Type: "chat",
}
// ResourceCryptoKey
// Valid Actions
// - "ActionCreate" :: create crypto keys
@@ -378,7 +368,6 @@ func AllResources() []Objecter {
ResourceAssignOrgRole,
ResourceAssignRole,
ResourceAuditLog,
ResourceChat,
ResourceCryptoKey,
ResourceDebugInfo,
ResourceDeploymentConfig,
-8
View File
@@ -124,14 +124,6 @@ var RBACPermissions = map[string]PermissionDefinition{
ActionRead: actDef("read and use a workspace proxy"),
},
},
"chat": {
Actions: map[Action]ActionDefinition{
ActionCreate: actDef("create a chat"),
ActionRead: actDef("read a chat"),
ActionDelete: actDef("delete a chat"),
ActionUpdate: actDef("update a chat"),
},
},
"license": {
Actions: map[Action]ActionDefinition{
ActionCreate: actDef("create a license"),
-2
View File
@@ -305,8 +305,6 @@ func ReloadBuiltinRoles(opts *RoleOptions) {
ResourceOrganizationMember.Type: {policy.ActionRead},
// Users can create provisioner daemons scoped to themselves.
ResourceProvisionerDaemon.Type: {policy.ActionRead, policy.ActionCreate, policy.ActionRead, policy.ActionUpdate},
// Users can create, read, update, and delete their own agentic chat messages.
ResourceChat.Type: {policy.ActionCreate, policy.ActionRead, policy.ActionUpdate, policy.ActionDelete},
})...,
),
}.withCachedRegoValue()
-31
View File
@@ -849,37 +849,6 @@ func TestRolePermissions(t *testing.T) {
},
},
},
// Members may read their own chats.
{
Name: "CreateReadUpdateDeleteMyChats",
Actions: []policy.Action{policy.ActionCreate, policy.ActionRead, policy.ActionUpdate, policy.ActionDelete},
Resource: rbac.ResourceChat.WithOwner(currentUser.String()),
AuthorizeMap: map[bool][]hasAuthSubjects{
true: {memberMe, orgMemberMe, owner},
false: {
userAdmin, orgUserAdmin, templateAdmin,
orgAuditor, orgTemplateAdmin,
otherOrgMember, otherOrgAuditor, otherOrgUserAdmin, otherOrgTemplateAdmin,
orgAdmin, otherOrgAdmin,
},
},
},
// Only owners can create, read, update, and delete other users' chats.
{
Name: "CreateReadUpdateDeleteOtherUserChats",
Actions: []policy.Action{policy.ActionCreate, policy.ActionRead, policy.ActionUpdate, policy.ActionDelete},
Resource: rbac.ResourceChat.WithOwner(uuid.NewString()), // some other user
AuthorizeMap: map[bool][]hasAuthSubjects{
true: {owner},
false: {
memberMe, orgMemberMe,
userAdmin, orgUserAdmin, templateAdmin,
orgAuditor, orgTemplateAdmin,
otherOrgMember, otherOrgAuditor, otherOrgUserAdmin, otherOrgTemplateAdmin,
orgAdmin, otherOrgAdmin,
},
},
},
}
// We expect every permission to be tested above.
+2
View File
@@ -33,6 +33,8 @@ func NextAutostart(at time.Time, wsSchedule string, templateSchedule TemplateSch
return zonedTransition, allowed
}
// NextAllowedAutostart returns the next valid autostart time after 'at', based on the workspace's
// cron schedule and the template's allowed days. It searches up to 7 days ahead to find a match.
func NextAllowedAutostart(at time.Time, wsSchedule string, templateSchedule TemplateScheduleOptions) (time.Time, error) {
next := at
-4
View File
@@ -687,10 +687,6 @@ func (r *remoteReporter) createSnapshot() (*Snapshot, error) {
return nil
})
eg.Go(func() error {
if !r.options.Experiments.Enabled(codersdk.ExperimentWorkspacePrebuilds) {
return nil
}
metrics, err := r.options.Database.GetPrebuildMetrics(ctx)
if err != nil {
return xerrors.Errorf("get prebuild metrics: %w", err)
+2 -17
View File
@@ -408,7 +408,6 @@ func TestPrebuiltWorkspacesTelemetry(t *testing.T) {
cases := []struct {
name string
experimentEnabled bool
storeFn func(store database.Store) database.Store
expectedSnapshotEntries int
expectedCreated int
@@ -416,8 +415,7 @@ func TestPrebuiltWorkspacesTelemetry(t *testing.T) {
expectedClaimed int
}{
{
name: "experiment enabled",
experimentEnabled: true,
name: "prebuilds enabled",
storeFn: func(store database.Store) database.Store {
return &mockDB{Store: store}
},
@@ -427,19 +425,11 @@ func TestPrebuiltWorkspacesTelemetry(t *testing.T) {
expectedClaimed: 3,
},
{
name: "experiment enabled, prebuilds not used",
experimentEnabled: true,
name: "prebuilds not used",
storeFn: func(store database.Store) database.Store {
return &emptyMockDB{Store: store}
},
},
{
name: "experiment disabled",
experimentEnabled: false,
storeFn: func(store database.Store) database.Store {
return &mockDB{Store: store}
},
},
}
for _, tc := range cases {
@@ -448,11 +438,6 @@ func TestPrebuiltWorkspacesTelemetry(t *testing.T) {
deployment, snapshot := collectSnapshot(ctx, t, db, func(opts telemetry.Options) telemetry.Options {
opts.Database = tc.storeFn(db)
if tc.experimentEnabled {
opts.Experiments = codersdk.Experiments{
codersdk.ExperimentWorkspacePrebuilds,
}
}
return opts
})
+7 -7
View File
@@ -905,19 +905,19 @@ func (api *API) workspaceAgentListContainers(rw http.ResponseWriter, r *http.Req
// @Tags Agents
// @Produce json
// @Param workspaceagent path string true "Workspace agent ID" format(uuid)
// @Param container path string true "Container ID or name"
// @Param devcontainer path string true "Devcontainer ID"
// @Success 202 {object} codersdk.Response
// @Router /workspaceagents/{workspaceagent}/containers/devcontainers/container/{container}/recreate [post]
// @Router /workspaceagents/{workspaceagent}/containers/devcontainers/{devcontainer}/recreate [post]
func (api *API) workspaceAgentRecreateDevcontainer(rw http.ResponseWriter, r *http.Request) {
ctx := r.Context()
workspaceAgent := httpmw.WorkspaceAgentParam(r)
container := chi.URLParam(r, "container")
if container == "" {
devcontainer := chi.URLParam(r, "devcontainer")
if devcontainer == "" {
httpapi.Write(ctx, rw, http.StatusBadRequest, codersdk.Response{
Message: "Container ID or name is required.",
Message: "Devcontainer ID is required.",
Validations: []codersdk.ValidationError{
{Field: "container", Detail: "Container ID or name is required."},
{Field: "devcontainer", Detail: "Devcontainer ID is required."},
},
})
return
@@ -961,7 +961,7 @@ func (api *API) workspaceAgentRecreateDevcontainer(rw http.ResponseWriter, r *ht
}
defer release()
m, err := agentConn.RecreateDevcontainer(ctx, container)
m, err := agentConn.RecreateDevcontainer(ctx, devcontainer)
if err != nil {
if errors.Is(err, context.Canceled) {
httpapi.Write(ctx, rw, http.StatusRequestTimeout, codersdk.Response{
+41 -37
View File
@@ -1396,63 +1396,62 @@ func TestWorkspaceAgentRecreateDevcontainer(t *testing.T) {
var (
workspaceFolder = t.TempDir()
configFile = filepath.Join(workspaceFolder, ".devcontainer", "devcontainer.json")
dcLabels = map[string]string{
agentcontainers.DevcontainerLocalFolderLabel: workspaceFolder,
agentcontainers.DevcontainerConfigFileLabel: configFile,
}
devcontainerID = uuid.New()
// Create a container that would be associated with the devcontainer
devContainer = codersdk.WorkspaceAgentContainer{
ID: uuid.NewString(),
CreatedAt: dbtime.Now(),
FriendlyName: testutil.GetRandomName(t),
Image: "busybox:latest",
Labels: dcLabels,
Running: true,
Status: "running",
Labels: map[string]string{
agentcontainers.DevcontainerLocalFolderLabel: workspaceFolder,
agentcontainers.DevcontainerConfigFileLabel: configFile,
},
Running: true,
Status: "running",
}
plainContainer = codersdk.WorkspaceAgentContainer{
ID: uuid.NewString(),
CreatedAt: dbtime.Now(),
FriendlyName: testutil.GetRandomName(t),
Image: "busybox:latest",
Labels: map[string]string{},
Running: true,
Status: "running",
devcontainer = codersdk.WorkspaceAgentDevcontainer{
ID: devcontainerID,
Name: "test-devcontainer",
WorkspaceFolder: workspaceFolder,
ConfigPath: configFile,
Status: codersdk.WorkspaceAgentDevcontainerStatusRunning,
Container: &devContainer,
}
)
for _, tc := range []struct {
name string
setupMock func(mccli *acmock.MockContainerCLI, mdccli *acmock.MockDevcontainerCLI) (status int)
name string
devcontainerID string
setupDevcontainers []codersdk.WorkspaceAgentDevcontainer
setupMock func(mccli *acmock.MockContainerCLI, mdccli *acmock.MockDevcontainerCLI) (status int)
}{
{
name: "Recreate",
name: "Recreate",
devcontainerID: devcontainerID.String(),
setupDevcontainers: []codersdk.WorkspaceAgentDevcontainer{devcontainer},
setupMock: func(mccli *acmock.MockContainerCLI, mdccli *acmock.MockDevcontainerCLI) int {
mccli.EXPECT().List(gomock.Any()).Return(codersdk.WorkspaceAgentListContainersResponse{
Containers: []codersdk.WorkspaceAgentContainer{devContainer},
}, nil).AnyTimes()
// DetectArchitecture always returns "<none>" for this test to disable agent injection.
mccli.EXPECT().DetectArchitecture(gomock.Any(), devContainer.ID).Return("<none>", nil).AnyTimes()
mdccli.EXPECT().ReadConfig(gomock.Any(), workspaceFolder, configFile, gomock.Any()).Return(agentcontainers.DevcontainerConfig{}, nil).Times(1)
mdccli.EXPECT().ReadConfig(gomock.Any(), workspaceFolder, configFile, gomock.Any()).Return(agentcontainers.DevcontainerConfig{}, nil).AnyTimes()
mdccli.EXPECT().Up(gomock.Any(), workspaceFolder, configFile, gomock.Any()).Return("someid", nil).Times(1)
return 0
},
},
{
name: "Container does not exist",
name: "Devcontainer does not exist",
devcontainerID: uuid.NewString(),
setupDevcontainers: nil,
setupMock: func(mccli *acmock.MockContainerCLI, mdccli *acmock.MockDevcontainerCLI) int {
mccli.EXPECT().List(gomock.Any()).Return(codersdk.WorkspaceAgentListContainersResponse{}, nil).AnyTimes()
return http.StatusNotFound
},
},
{
name: "Not a devcontainer",
setupMock: func(mccli *acmock.MockContainerCLI, mdccli *acmock.MockDevcontainerCLI) int {
mccli.EXPECT().List(gomock.Any()).Return(codersdk.WorkspaceAgentListContainersResponse{
Containers: []codersdk.WorkspaceAgentContainer{plainContainer},
}, nil).AnyTimes()
return http.StatusNotFound
},
},
} {
t.Run(tc.name, func(t *testing.T) {
t.Parallel()
@@ -1472,16 +1471,21 @@ func TestWorkspaceAgentRecreateDevcontainer(t *testing.T) {
}).WithAgent(func(agents []*proto.Agent) []*proto.Agent {
return agents
}).Do()
devcontainerAPIOptions := []agentcontainers.Option{
agentcontainers.WithContainerCLI(mccli),
agentcontainers.WithDevcontainerCLI(mdccli),
agentcontainers.WithWatcher(watcher.NewNoop()),
}
if tc.setupDevcontainers != nil {
devcontainerAPIOptions = append(devcontainerAPIOptions,
agentcontainers.WithDevcontainers(tc.setupDevcontainers, nil))
}
_ = agenttest.New(t, client.URL, r.AgentToken, func(o *agent.Options) {
o.Logger = logger.Named("agent")
o.Devcontainers = true
o.DevcontainerAPIOptions = append(
o.DevcontainerAPIOptions,
agentcontainers.WithContainerCLI(mccli),
agentcontainers.WithDevcontainerCLI(mdccli),
agentcontainers.WithWatcher(watcher.NewNoop()),
agentcontainers.WithContainerLabelIncludeFilter(agentcontainers.DevcontainerLocalFolderLabel, workspaceFolder),
)
o.DevcontainerAPIOptions = devcontainerAPIOptions
})
resources := coderdtest.NewWorkspaceAgentWaiter(t, client, r.Workspace.ID).Wait()
require.Len(t, resources, 1, "expected one resource")
@@ -1490,7 +1494,7 @@ func TestWorkspaceAgentRecreateDevcontainer(t *testing.T) {
ctx := testutil.Context(t, testutil.WaitLong)
_, err := client.WorkspaceAgentRecreateDevcontainer(ctx, agentID, devContainer.ID)
_, err := client.WorkspaceAgentRecreateDevcontainer(ctx, agentID, tc.devcontainerID)
if wantStatus > 0 {
cerr, ok := codersdk.AsError(err)
require.True(t, ok, "expected error to be a coder error")
-153
View File
@@ -1,153 +0,0 @@
package codersdk
import (
"context"
"encoding/json"
"fmt"
"net/http"
"time"
"github.com/google/uuid"
"github.com/kylecarbs/aisdk-go"
"golang.org/x/xerrors"
)
// CreateChat creates a new chat.
func (c *Client) CreateChat(ctx context.Context) (Chat, error) {
res, err := c.Request(ctx, http.MethodPost, "/api/v2/chats", nil)
if err != nil {
return Chat{}, xerrors.Errorf("execute request: %w", err)
}
if res.StatusCode != http.StatusCreated {
return Chat{}, ReadBodyAsError(res)
}
defer res.Body.Close()
var chat Chat
return chat, json.NewDecoder(res.Body).Decode(&chat)
}
type Chat struct {
ID uuid.UUID `json:"id" format:"uuid"`
CreatedAt time.Time `json:"created_at" format:"date-time"`
UpdatedAt time.Time `json:"updated_at" format:"date-time"`
Title string `json:"title"`
}
// ListChats lists all chats.
func (c *Client) ListChats(ctx context.Context) ([]Chat, error) {
res, err := c.Request(ctx, http.MethodGet, "/api/v2/chats", nil)
if err != nil {
return nil, xerrors.Errorf("execute request: %w", err)
}
defer res.Body.Close()
if res.StatusCode != http.StatusOK {
return nil, ReadBodyAsError(res)
}
var chats []Chat
return chats, json.NewDecoder(res.Body).Decode(&chats)
}
// Chat returns a chat by ID.
func (c *Client) Chat(ctx context.Context, id uuid.UUID) (Chat, error) {
res, err := c.Request(ctx, http.MethodGet, fmt.Sprintf("/api/v2/chats/%s", id), nil)
if err != nil {
return Chat{}, xerrors.Errorf("execute request: %w", err)
}
defer res.Body.Close()
if res.StatusCode != http.StatusOK {
return Chat{}, ReadBodyAsError(res)
}
var chat Chat
return chat, json.NewDecoder(res.Body).Decode(&chat)
}
// ChatMessages returns the messages of a chat.
func (c *Client) ChatMessages(ctx context.Context, id uuid.UUID) ([]ChatMessage, error) {
res, err := c.Request(ctx, http.MethodGet, fmt.Sprintf("/api/v2/chats/%s/messages", id), nil)
if err != nil {
return nil, xerrors.Errorf("execute request: %w", err)
}
defer res.Body.Close()
if res.StatusCode != http.StatusOK {
return nil, ReadBodyAsError(res)
}
var messages []ChatMessage
return messages, json.NewDecoder(res.Body).Decode(&messages)
}
type ChatMessage = aisdk.Message
type CreateChatMessageRequest struct {
Model string `json:"model"`
Message ChatMessage `json:"message"`
Thinking bool `json:"thinking"`
}
// CreateChatMessage creates a new chat message and streams the response.
// If the provided message has a conflicting ID with an existing message,
// it will be overwritten.
func (c *Client) CreateChatMessage(ctx context.Context, id uuid.UUID, req CreateChatMessageRequest) (<-chan aisdk.DataStreamPart, error) {
res, err := c.Request(ctx, http.MethodPost, fmt.Sprintf("/api/v2/chats/%s/messages", id), req)
defer func() {
if res != nil && res.Body != nil {
_ = res.Body.Close()
}
}()
if err != nil {
return nil, xerrors.Errorf("execute request: %w", err)
}
if res.StatusCode != http.StatusOK {
return nil, ReadBodyAsError(res)
}
nextEvent := ServerSentEventReader(ctx, res.Body)
wc := make(chan aisdk.DataStreamPart, 256)
go func() {
defer close(wc)
defer res.Body.Close()
for {
select {
case <-ctx.Done():
return
default:
sse, err := nextEvent()
if err != nil {
return
}
if sse.Type != ServerSentEventTypeData {
continue
}
var part aisdk.DataStreamPart
b, ok := sse.Data.([]byte)
if !ok {
return
}
err = json.Unmarshal(b, &part)
if err != nil {
return
}
select {
case <-ctx.Done():
return
case wc <- part:
}
}
}
}()
return wc, nil
}
func (c *Client) DeleteChat(ctx context.Context, id uuid.UUID) error {
res, err := c.Request(ctx, http.MethodDelete, fmt.Sprintf("/api/v2/chats/%s", id), nil)
if err != nil {
return xerrors.Errorf("execute request: %w", err)
}
defer res.Body.Close()
if res.StatusCode != http.StatusNoContent {
return ReadBodyAsError(res)
}
return nil
}
+1 -59
View File
@@ -383,7 +383,6 @@ type DeploymentValues struct {
DisablePasswordAuth serpent.Bool `json:"disable_password_auth,omitempty" typescript:",notnull"`
Support SupportConfig `json:"support,omitempty" typescript:",notnull"`
ExternalAuthConfigs serpent.Struct[[]ExternalAuthConfig] `json:"external_auth,omitempty" typescript:",notnull"`
AI serpent.Struct[AIConfig] `json:"ai,omitempty" typescript:",notnull"`
SSHConfig SSHConfig `json:"config_ssh,omitempty" typescript:",notnull"`
WgtunnelHost serpent.String `json:"wgtunnel_host,omitempty" typescript:",notnull"`
DisableOwnerWorkspaceExec serpent.Bool `json:"disable_owner_workspace_exec,omitempty" typescript:",notnull"`
@@ -2681,15 +2680,6 @@ Write out the current server config as YAML to stdout.`,
Value: &c.Support.Links,
Hidden: false,
},
{
// Env handling is done in cli.ReadAIProvidersFromEnv
Name: "AI",
Description: "Configure AI providers.",
YAML: "ai",
Value: &c.AI,
// Hidden because this is experimental.
Hidden: true,
},
{
// Env handling is done in cli.ReadGitAuthFromEnvironment
Name: "External Auth Providers",
@@ -3080,7 +3070,6 @@ Write out the current server config as YAML to stdout.`,
Group: &deploymentGroupPrebuilds,
YAML: "reconciliation_interval",
Annotations: serpent.Annotations{}.Mark(annotationFormatDuration, "true"),
Hidden: ExperimentsSafe.Enabled(ExperimentWorkspacePrebuilds), // Hide setting while this feature is experimental.
},
{
Name: "Reconciliation Backoff Interval",
@@ -3132,21 +3121,6 @@ Write out the current server config as YAML to stdout.`,
return opts
}
type AIProviderConfig struct {
// Type is the type of the API provider.
Type string `json:"type" yaml:"type"`
// APIKey is the API key to use for the API provider.
APIKey string `json:"-" yaml:"api_key"`
// Models is the list of models to use for the API provider.
Models []string `json:"models" yaml:"models"`
// BaseURL is the base URL to use for the API provider.
BaseURL string `json:"base_url" yaml:"base_url"`
}
type AIConfig struct {
Providers []AIProviderConfig `json:"providers,omitempty" yaml:"providers,omitempty"`
}
type SupportConfig struct {
Links serpent.Struct[[]LinkConfig] `json:"links" typescript:",notnull"`
}
@@ -3367,8 +3341,6 @@ const (
ExperimentNotifications Experiment = "notifications" // Sends notifications via SMTP and webhooks following certain events.
ExperimentWorkspaceUsage Experiment = "workspace-usage" // Enables the new workspace usage tracking.
ExperimentWebPush Experiment = "web-push" // Enables web push notifications through the browser.
ExperimentWorkspacePrebuilds Experiment = "workspace-prebuilds" // Enables the new workspace prebuilds feature.
ExperimentAgenticChat Experiment = "agentic-chat" // Enables the new agentic AI chat feature.
)
// ExperimentsKnown should include all experiments defined above.
@@ -3378,17 +3350,13 @@ var ExperimentsKnown = Experiments{
ExperimentNotifications,
ExperimentWorkspaceUsage,
ExperimentWebPush,
ExperimentWorkspacePrebuilds,
ExperimentAgenticChat,
}
// ExperimentsSafe should include all experiments that are safe for
// users to opt-in to via --experimental='*'.
// Experiments that are not ready for consumption by all users should
// not be included here and will be essentially hidden.
var ExperimentsSafe = Experiments{
ExperimentWorkspacePrebuilds,
}
var ExperimentsSafe = Experiments{}
// Experiments is a list of experiments.
// Multiple experiments may be enabled at the same time.
@@ -3597,32 +3565,6 @@ func (c *Client) SSHConfiguration(ctx context.Context) (SSHConfigResponse, error
return sshConfig, json.NewDecoder(res.Body).Decode(&sshConfig)
}
type LanguageModelConfig struct {
Models []LanguageModel `json:"models"`
}
// LanguageModel is a language model that can be used for chat.
type LanguageModel struct {
// ID is used by the provider to identify the LLM.
ID string `json:"id"`
DisplayName string `json:"display_name"`
// Provider is the provider of the LLM. e.g. openai, anthropic, etc.
Provider string `json:"provider"`
}
func (c *Client) LanguageModelConfig(ctx context.Context) (LanguageModelConfig, error) {
res, err := c.Request(ctx, http.MethodGet, "/api/v2/deployment/llms", nil)
if err != nil {
return LanguageModelConfig{}, err
}
defer res.Body.Close()
if res.StatusCode != http.StatusOK {
return LanguageModelConfig{}, ReadBodyAsError(res)
}
var llms LanguageModelConfig
return llms, json.NewDecoder(res.Body).Decode(&llms)
}
type CryptoKeyFeature string
const (
-2
View File
@@ -9,7 +9,6 @@ const (
ResourceAssignOrgRole RBACResource = "assign_org_role"
ResourceAssignRole RBACResource = "assign_role"
ResourceAuditLog RBACResource = "audit_log"
ResourceChat RBACResource = "chat"
ResourceCryptoKey RBACResource = "crypto_key"
ResourceDebugInfo RBACResource = "debug_info"
ResourceDeploymentConfig RBACResource = "deployment_config"
@@ -73,7 +72,6 @@ var RBACResourceActions = map[RBACResource][]RBACAction{
ResourceAssignOrgRole: {ActionAssign, ActionCreate, ActionDelete, ActionRead, ActionUnassign, ActionUpdate},
ResourceAssignRole: {ActionAssign, ActionRead, ActionUnassign},
ResourceAuditLog: {ActionCreate, ActionRead},
ResourceChat: {ActionCreate, ActionDelete, ActionRead, ActionUpdate},
ResourceCryptoKey: {ActionCreate, ActionDelete, ActionRead, ActionUpdate},
ResourceDebugInfo: {ActionRead},
ResourceDeploymentConfig: {ActionRead, ActionUpdate},
+2 -1
View File
@@ -8,9 +8,10 @@ import (
"io"
"github.com/google/uuid"
"github.com/kylecarbs/aisdk-go"
"golang.org/x/xerrors"
"github.com/coder/aisdk-go"
"github.com/coder/coder/v2/codersdk"
)
+2 -1
View File
@@ -10,11 +10,12 @@ import (
"time"
"github.com/google/uuid"
"github.com/kylecarbs/aisdk-go"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"go.uber.org/goleak"
"github.com/coder/aisdk-go"
"github.com/coder/coder/v2/coderd/coderdtest"
"github.com/coder/coder/v2/coderd/database"
"github.com/coder/coder/v2/coderd/database/dbfake"
+2 -2
View File
@@ -519,8 +519,8 @@ func (c *Client) WorkspaceAgentListContainers(ctx context.Context, agentID uuid.
}
// WorkspaceAgentRecreateDevcontainer recreates the devcontainer with the given ID.
func (c *Client) WorkspaceAgentRecreateDevcontainer(ctx context.Context, agentID uuid.UUID, containerIDOrName string) (Response, error) {
res, err := c.Request(ctx, http.MethodPost, fmt.Sprintf("/api/v2/workspaceagents/%s/containers/devcontainers/container/%s/recreate", agentID, containerIDOrName), nil)
func (c *Client) WorkspaceAgentRecreateDevcontainer(ctx context.Context, agentID uuid.UUID, devcontainerID string) (Response, error) {
res, err := c.Request(ctx, http.MethodPost, fmt.Sprintf("/api/v2/workspaceagents/%s/containers/devcontainers/%s/recreate", agentID, devcontainerID), nil)
if err != nil {
return Response{}, err
}
+6 -3
View File
@@ -37,15 +37,18 @@ const (
type BuildReason string
const (
// "initiator" is used when a workspace build is triggered by a user.
// BuildReasonInitiator "initiator" is used when a workspace build is triggered by a user.
// Combined with the initiator id/username, it indicates which user initiated the build.
BuildReasonInitiator BuildReason = "initiator"
// "autostart" is used when a build to start a workspace is triggered by Autostart.
// BuildReasonAutostart "autostart" is used when a build to start a workspace is triggered by Autostart.
// The initiator id/username in this case is the workspace owner and can be ignored.
BuildReasonAutostart BuildReason = "autostart"
// "autostop" is used when a build to stop a workspace is triggered by Autostop.
// BuildReasonAutostop "autostop" is used when a build to stop a workspace is triggered by Autostop.
// The initiator id/username in this case is the workspace owner and can be ignored.
BuildReasonAutostop BuildReason = "autostop"
// BuildReasonDormancy "dormancy" is used when a build to stop a workspace is triggered due to inactivity (dormancy).
// The initiator id/username in this case is the workspace owner and can be ignored.
BuildReasonDormancy BuildReason = "dormancy"
)
// WorkspaceBuild is an at-point representation of a workspace state.
+2 -2
View File
@@ -389,10 +389,10 @@ func (c *AgentConn) ListContainers(ctx context.Context) (codersdk.WorkspaceAgent
// RecreateDevcontainer recreates a devcontainer with the given container.
// This is a blocking call and will wait for the container to be recreated.
func (c *AgentConn) RecreateDevcontainer(ctx context.Context, containerIDOrName string) (codersdk.Response, error) {
func (c *AgentConn) RecreateDevcontainer(ctx context.Context, devcontainerID string) (codersdk.Response, error) {
ctx, span := tracing.StartSpan(ctx)
defer span.End()
res, err := c.apiRequest(ctx, http.MethodPost, "/api/v0/containers/devcontainers/container/"+containerIDOrName+"/recreate", nil)
res, err := c.apiRequest(ctx, http.MethodPost, "/api/v0/containers/devcontainers/"+devcontainerID+"/recreate", nil)
if err != nil {
return codersdk.Response{}, xerrors.Errorf("do request: %w", err)
}
@@ -1,5 +1,12 @@
# Prebuilt workspaces
> [!WARNING]
> Prebuilds Compatibility Limitations:
> Prebuilt workspaces currently do not work reliably with [DevContainers feature](../managing-templates/devcontainers/index.md).
> If your project relies on DevContainer configuration, we recommend disabling prebuilds or carefully testing behavior before enabling them.
>
> Were actively working to improve compatibility, but for now, please avoid using prebuilds with this feature to ensure stability and expected behavior.
Prebuilt workspaces allow template administrators to improve the developer experience by reducing workspace
creation time with an automatically maintained pool of ready-to-use workspaces for specific parameter presets.
@@ -16,7 +23,7 @@ Prebuilt workspaces are:
## Relationship to workspace presets
Prebuilt workspaces are tightly integrated with [workspace presets](./parameters.md#workspace-presets-beta):
Prebuilt workspaces are tightly integrated with [workspace presets](./parameters.md#workspace-presets):
1. Each prebuilt workspace is associated with a specific template preset.
1. The preset must define all required parameters needed to build the workspace.
@@ -27,7 +34,6 @@ Prebuilt workspaces are tightly integrated with [workspace presets](./parameters
- [**Premium license**](../../licensing/index.md)
- **Compatible Terraform provider**: Use `coder/coder` Terraform provider `>= 2.4.1`.
- **Feature flag**: Enable the `workspace-prebuilds` [experiment](../../../reference/cli/server.md#--experiments).
## Enable prebuilt workspaces for template presets
+6 -6
View File
@@ -859,19 +859,19 @@ To perform this operation, you must be authenticated. [Learn more](authenticatio
```shell
# Example request using curl
curl -X POST http://coder-server:8080/api/v2/workspaceagents/{workspaceagent}/containers/devcontainers/container/{container}/recreate \
curl -X POST http://coder-server:8080/api/v2/workspaceagents/{workspaceagent}/containers/devcontainers/{devcontainer}/recreate \
-H 'Accept: application/json' \
-H 'Coder-Session-Token: API_KEY'
```
`POST /workspaceagents/{workspaceagent}/containers/devcontainers/container/{container}/recreate`
`POST /workspaceagents/{workspaceagent}/containers/devcontainers/{devcontainer}/recreate`
### Parameters
| Name | In | Type | Required | Description |
|------------------|------|--------------|----------|----------------------|
| `workspaceagent` | path | string(uuid) | true | Workspace agent ID |
| `container` | path | string | true | Container ID or name |
| Name | In | Type | Required | Description |
|------------------|------|--------------|----------|--------------------|
| `workspaceagent` | path | string(uuid) | true | Workspace agent ID |
| `devcontainer` | path | string | true | Devcontainer ID |
### Example responses
-372
View File
@@ -1,372 +0,0 @@
# Chat
## List chats
### Code samples
```shell
# Example request using curl
curl -X GET http://coder-server:8080/api/v2/chats \
-H 'Accept: application/json' \
-H 'Coder-Session-Token: API_KEY'
```
`GET /chats`
### Example responses
> 200 Response
```json
[
{
"created_at": "2019-08-24T14:15:22Z",
"id": "497f6eca-6276-4993-bfeb-53cbbbba6f08",
"title": "string",
"updated_at": "2019-08-24T14:15:22Z"
}
]
```
### Responses
| Status | Meaning | Description | Schema |
|--------|---------------------------------------------------------|-------------|---------------------------------------------------|
| 200 | [OK](https://tools.ietf.org/html/rfc7231#section-6.3.1) | OK | array of [codersdk.Chat](schemas.md#codersdkchat) |
<h3 id="list-chats-responseschema">Response Schema</h3>
Status Code **200**
| Name | Type | Required | Restrictions | Description |
|----------------|-------------------|----------|--------------|-------------|
| `[array item]` | array | false | | |
| `» created_at` | string(date-time) | false | | |
| `» id` | string(uuid) | false | | |
| `» title` | string | false | | |
| `» updated_at` | string(date-time) | false | | |
To perform this operation, you must be authenticated. [Learn more](authentication.md).
## Create a chat
### Code samples
```shell
# Example request using curl
curl -X POST http://coder-server:8080/api/v2/chats \
-H 'Accept: application/json' \
-H 'Coder-Session-Token: API_KEY'
```
`POST /chats`
### Example responses
> 201 Response
```json
{
"created_at": "2019-08-24T14:15:22Z",
"id": "497f6eca-6276-4993-bfeb-53cbbbba6f08",
"title": "string",
"updated_at": "2019-08-24T14:15:22Z"
}
```
### Responses
| Status | Meaning | Description | Schema |
|--------|--------------------------------------------------------------|-------------|------------------------------------------|
| 201 | [Created](https://tools.ietf.org/html/rfc7231#section-6.3.2) | Created | [codersdk.Chat](schemas.md#codersdkchat) |
To perform this operation, you must be authenticated. [Learn more](authentication.md).
## Get a chat
### Code samples
```shell
# Example request using curl
curl -X GET http://coder-server:8080/api/v2/chats/{chat} \
-H 'Accept: application/json' \
-H 'Coder-Session-Token: API_KEY'
```
`GET /chats/{chat}`
### Parameters
| Name | In | Type | Required | Description |
|--------|------|--------|----------|-------------|
| `chat` | path | string | true | Chat ID |
### Example responses
> 200 Response
```json
{
"created_at": "2019-08-24T14:15:22Z",
"id": "497f6eca-6276-4993-bfeb-53cbbbba6f08",
"title": "string",
"updated_at": "2019-08-24T14:15:22Z"
}
```
### Responses
| Status | Meaning | Description | Schema |
|--------|---------------------------------------------------------|-------------|------------------------------------------|
| 200 | [OK](https://tools.ietf.org/html/rfc7231#section-6.3.1) | OK | [codersdk.Chat](schemas.md#codersdkchat) |
To perform this operation, you must be authenticated. [Learn more](authentication.md).
## Get chat messages
### Code samples
```shell
# Example request using curl
curl -X GET http://coder-server:8080/api/v2/chats/{chat}/messages \
-H 'Accept: application/json' \
-H 'Coder-Session-Token: API_KEY'
```
`GET /chats/{chat}/messages`
### Parameters
| Name | In | Type | Required | Description |
|--------|------|--------|----------|-------------|
| `chat` | path | string | true | Chat ID |
### Example responses
> 200 Response
```json
[
{
"annotations": [
null
],
"content": "string",
"createdAt": [
0
],
"experimental_attachments": [
{
"contentType": "string",
"name": "string",
"url": "string"
}
],
"id": "string",
"parts": [
{
"data": [
0
],
"details": [
{
"data": "string",
"signature": "string",
"text": "string",
"type": "string"
}
],
"mimeType": "string",
"reasoning": "string",
"source": {
"contentType": "string",
"data": "string",
"metadata": {
"property1": null,
"property2": null
},
"uri": "string"
},
"text": "string",
"toolInvocation": {
"args": null,
"result": null,
"state": "call",
"step": 0,
"toolCallId": "string",
"toolName": "string"
},
"type": "text"
}
],
"role": "string"
}
]
```
### Responses
| Status | Meaning | Description | Schema |
|--------|---------------------------------------------------------|-------------|---------------------------------------------------|
| 200 | [OK](https://tools.ietf.org/html/rfc7231#section-6.3.1) | OK | array of [aisdk.Message](schemas.md#aisdkmessage) |
<h3 id="get-chat-messages-responseschema">Response Schema</h3>
Status Code **200**
| Name | Type | Required | Restrictions | Description |
|------------------------------|------------------------------------------------------------------|----------|--------------|-------------------------|
| `[array item]` | array | false | | |
| `» annotations` | array | false | | |
| `» content` | string | false | | |
| `» createdAt` | array | false | | |
| `» experimental_attachments` | array | false | | |
| `»» contentType` | string | false | | |
| `»» name` | string | false | | |
| `»» url` | string | false | | |
| `» id` | string | false | | |
| `» parts` | array | false | | |
| `»» data` | array | false | | |
| `»» details` | array | false | | |
| `»»» data` | string | false | | |
| `»»» signature` | string | false | | |
| `»»» text` | string | false | | |
| `»»» type` | string | false | | |
| `»» mimeType` | string | false | | Type: "file" |
| `»» reasoning` | string | false | | Type: "reasoning" |
| `»» source` | [aisdk.SourceInfo](schemas.md#aisdksourceinfo) | false | | Type: "source" |
| `»»» contentType` | string | false | | |
| `»»» data` | string | false | | |
| `»»» metadata` | object | false | | |
| `»»»» [any property]` | any | false | | |
| `»»» uri` | string | false | | |
| `»» text` | string | false | | Type: "text" |
| `»» toolInvocation` | [aisdk.ToolInvocation](schemas.md#aisdktoolinvocation) | false | | Type: "tool-invocation" |
| `»»» args` | any | false | | |
| `»»» result` | any | false | | |
| `»»» state` | [aisdk.ToolInvocationState](schemas.md#aisdktoolinvocationstate) | false | | |
| `»»» step` | integer | false | | |
| `»»» toolCallId` | string | false | | |
| `»»» toolName` | string | false | | |
| `»» type` | [aisdk.PartType](schemas.md#aisdkparttype) | false | | |
| `» role` | string | false | | |
#### Enumerated Values
| Property | Value |
|----------|-------------------|
| `state` | `call` |
| `state` | `partial-call` |
| `state` | `result` |
| `type` | `text` |
| `type` | `reasoning` |
| `type` | `tool-invocation` |
| `type` | `source` |
| `type` | `file` |
| `type` | `step-start` |
To perform this operation, you must be authenticated. [Learn more](authentication.md).
## Create a chat message
### Code samples
```shell
# Example request using curl
curl -X POST http://coder-server:8080/api/v2/chats/{chat}/messages \
-H 'Content-Type: application/json' \
-H 'Accept: application/json' \
-H 'Coder-Session-Token: API_KEY'
```
`POST /chats/{chat}/messages`
> Body parameter
```json
{
"message": {
"annotations": [
null
],
"content": "string",
"createdAt": [
0
],
"experimental_attachments": [
{
"contentType": "string",
"name": "string",
"url": "string"
}
],
"id": "string",
"parts": [
{
"data": [
0
],
"details": [
{
"data": "string",
"signature": "string",
"text": "string",
"type": "string"
}
],
"mimeType": "string",
"reasoning": "string",
"source": {
"contentType": "string",
"data": "string",
"metadata": {
"property1": null,
"property2": null
},
"uri": "string"
},
"text": "string",
"toolInvocation": {
"args": null,
"result": null,
"state": "call",
"step": 0,
"toolCallId": "string",
"toolName": "string"
},
"type": "text"
}
],
"role": "string"
},
"model": "string",
"thinking": true
}
```
### Parameters
| Name | In | Type | Required | Description |
|--------|------|----------------------------------------------------------------------------------|----------|--------------|
| `chat` | path | string | true | Chat ID |
| `body` | body | [codersdk.CreateChatMessageRequest](schemas.md#codersdkcreatechatmessagerequest) | true | Request body |
### Example responses
> 200 Response
```json
[
null
]
```
### Responses
| Status | Meaning | Description | Schema |
|--------|---------------------------------------------------------|-------------|--------------------|
| 200 | [OK](https://tools.ietf.org/html/rfc7231#section-6.3.1) | OK | array of undefined |
<h3 id="create-a-chat-message-responseschema">Response Schema</h3>
To perform this operation, you must be authenticated. [Learn more](authentication.md).
-50
View File
@@ -161,19 +161,6 @@ curl -X GET http://coder-server:8080/api/v2/deployment/config \
"user": {}
},
"agent_stat_refresh_interval": 0,
"ai": {
"value": {
"providers": [
{
"base_url": "string",
"models": [
"string"
],
"type": "string"
}
]
}
},
"allow_workspace_renames": true,
"autobuild_poll_interval": 0,
"browser_only": true,
@@ -586,43 +573,6 @@ curl -X GET http://coder-server:8080/api/v2/deployment/config \
To perform this operation, you must be authenticated. [Learn more](authentication.md).
## Get language models
### Code samples
```shell
# Example request using curl
curl -X GET http://coder-server:8080/api/v2/deployment/llms \
-H 'Accept: application/json' \
-H 'Coder-Session-Token: API_KEY'
```
`GET /deployment/llms`
### Example responses
> 200 Response
```json
{
"models": [
{
"display_name": "string",
"id": "string",
"provider": "string"
}
]
}
```
### Responses
| Status | Meaning | Description | Schema |
|--------|---------------------------------------------------------|-------------|------------------------------------------------------------------------|
| 200 | [OK](https://tools.ietf.org/html/rfc7231#section-6.3.1) | OK | [codersdk.LanguageModelConfig](schemas.md#codersdklanguagemodelconfig) |
To perform this operation, you must be authenticated. [Learn more](authentication.md).
## SSH Config
### Code samples
-5
View File
@@ -187,7 +187,6 @@ Status Code **200**
| `resource_type` | `assign_org_role` |
| `resource_type` | `assign_role` |
| `resource_type` | `audit_log` |
| `resource_type` | `chat` |
| `resource_type` | `crypto_key` |
| `resource_type` | `debug_info` |
| `resource_type` | `deployment_config` |
@@ -357,7 +356,6 @@ Status Code **200**
| `resource_type` | `assign_org_role` |
| `resource_type` | `assign_role` |
| `resource_type` | `audit_log` |
| `resource_type` | `chat` |
| `resource_type` | `crypto_key` |
| `resource_type` | `debug_info` |
| `resource_type` | `deployment_config` |
@@ -527,7 +525,6 @@ Status Code **200**
| `resource_type` | `assign_org_role` |
| `resource_type` | `assign_role` |
| `resource_type` | `audit_log` |
| `resource_type` | `chat` |
| `resource_type` | `crypto_key` |
| `resource_type` | `debug_info` |
| `resource_type` | `deployment_config` |
@@ -666,7 +663,6 @@ Status Code **200**
| `resource_type` | `assign_org_role` |
| `resource_type` | `assign_role` |
| `resource_type` | `audit_log` |
| `resource_type` | `chat` |
| `resource_type` | `crypto_key` |
| `resource_type` | `debug_info` |
| `resource_type` | `deployment_config` |
@@ -1027,7 +1023,6 @@ Status Code **200**
| `resource_type` | `assign_org_role` |
| `resource_type` | `assign_role` |
| `resource_type` | `audit_log` |
| `resource_type` | `chat` |
| `resource_type` | `crypto_key` |
| `resource_type` | `debug_info` |
| `resource_type` | `deployment_config` |
+3 -582
View File
@@ -212,250 +212,6 @@
|--------------------|
| `prebuild_claimed` |
## aisdk.Attachment
```json
{
"contentType": "string",
"name": "string",
"url": "string"
}
```
### Properties
| Name | Type | Required | Restrictions | Description |
|---------------|--------|----------|--------------|-------------|
| `contentType` | string | false | | |
| `name` | string | false | | |
| `url` | string | false | | |
## aisdk.Message
```json
{
"annotations": [
null
],
"content": "string",
"createdAt": [
0
],
"experimental_attachments": [
{
"contentType": "string",
"name": "string",
"url": "string"
}
],
"id": "string",
"parts": [
{
"data": [
0
],
"details": [
{
"data": "string",
"signature": "string",
"text": "string",
"type": "string"
}
],
"mimeType": "string",
"reasoning": "string",
"source": {
"contentType": "string",
"data": "string",
"metadata": {
"property1": null,
"property2": null
},
"uri": "string"
},
"text": "string",
"toolInvocation": {
"args": null,
"result": null,
"state": "call",
"step": 0,
"toolCallId": "string",
"toolName": "string"
},
"type": "text"
}
],
"role": "string"
}
```
### Properties
| Name | Type | Required | Restrictions | Description |
|----------------------------|-----------------------------------------------|----------|--------------|-------------|
| `annotations` | array of undefined | false | | |
| `content` | string | false | | |
| `createdAt` | array of integer | false | | |
| `experimental_attachments` | array of [aisdk.Attachment](#aisdkattachment) | false | | |
| `id` | string | false | | |
| `parts` | array of [aisdk.Part](#aisdkpart) | false | | |
| `role` | string | false | | |
## aisdk.Part
```json
{
"data": [
0
],
"details": [
{
"data": "string",
"signature": "string",
"text": "string",
"type": "string"
}
],
"mimeType": "string",
"reasoning": "string",
"source": {
"contentType": "string",
"data": "string",
"metadata": {
"property1": null,
"property2": null
},
"uri": "string"
},
"text": "string",
"toolInvocation": {
"args": null,
"result": null,
"state": "call",
"step": 0,
"toolCallId": "string",
"toolName": "string"
},
"type": "text"
}
```
### Properties
| Name | Type | Required | Restrictions | Description |
|------------------|---------------------------------------------------------|----------|--------------|-------------------------|
| `data` | array of integer | false | | |
| `details` | array of [aisdk.ReasoningDetail](#aisdkreasoningdetail) | false | | |
| `mimeType` | string | false | | Type: "file" |
| `reasoning` | string | false | | Type: "reasoning" |
| `source` | [aisdk.SourceInfo](#aisdksourceinfo) | false | | Type: "source" |
| `text` | string | false | | Type: "text" |
| `toolInvocation` | [aisdk.ToolInvocation](#aisdktoolinvocation) | false | | Type: "tool-invocation" |
| `type` | [aisdk.PartType](#aisdkparttype) | false | | |
## aisdk.PartType
```json
"text"
```
### Properties
#### Enumerated Values
| Value |
|-------------------|
| `text` |
| `reasoning` |
| `tool-invocation` |
| `source` |
| `file` |
| `step-start` |
## aisdk.ReasoningDetail
```json
{
"data": "string",
"signature": "string",
"text": "string",
"type": "string"
}
```
### Properties
| Name | Type | Required | Restrictions | Description |
|-------------|--------|----------|--------------|-------------|
| `data` | string | false | | |
| `signature` | string | false | | |
| `text` | string | false | | |
| `type` | string | false | | |
## aisdk.SourceInfo
```json
{
"contentType": "string",
"data": "string",
"metadata": {
"property1": null,
"property2": null
},
"uri": "string"
}
```
### Properties
| Name | Type | Required | Restrictions | Description |
|--------------------|--------|----------|--------------|-------------|
| `contentType` | string | false | | |
| `data` | string | false | | |
| `metadata` | object | false | | |
| » `[any property]` | any | false | | |
| `uri` | string | false | | |
## aisdk.ToolInvocation
```json
{
"args": null,
"result": null,
"state": "call",
"step": 0,
"toolCallId": "string",
"toolName": "string"
}
```
### Properties
| Name | Type | Required | Restrictions | Description |
|--------------|--------------------------------------------------------|----------|--------------|-------------|
| `args` | any | false | | |
| `result` | any | false | | |
| `state` | [aisdk.ToolInvocationState](#aisdktoolinvocationstate) | false | | |
| `step` | integer | false | | |
| `toolCallId` | string | false | | |
| `toolName` | string | false | | |
## aisdk.ToolInvocationState
```json
"call"
```
### Properties
#### Enumerated Values
| Value |
|----------------|
| `call` |
| `partial-call` |
| `result` |
## coderd.SCIMUser
```json
@@ -579,48 +335,6 @@
| `groups` | array of [codersdk.Group](#codersdkgroup) | false | | |
| `users` | array of [codersdk.ReducedUser](#codersdkreduceduser) | false | | |
## codersdk.AIConfig
```json
{
"providers": [
{
"base_url": "string",
"models": [
"string"
],
"type": "string"
}
]
}
```
### Properties
| Name | Type | Required | Restrictions | Description |
|-------------|-----------------------------------------------------------------|----------|--------------|-------------|
| `providers` | array of [codersdk.AIProviderConfig](#codersdkaiproviderconfig) | false | | |
## codersdk.AIProviderConfig
```json
{
"base_url": "string",
"models": [
"string"
],
"type": "string"
}
```
### Properties
| Name | Type | Required | Restrictions | Description |
|------------|-----------------|----------|--------------|-----------------------------------------------------------|
| `base_url` | string | false | | Base URL is the base URL to use for the API provider. |
| `models` | array of string | false | | Models is the list of models to use for the API provider. |
| `type` | string | false | | Type is the type of the API provider. |
## codersdk.APIKey
```json
@@ -1335,6 +1049,7 @@ AuthorizationObject can represent a "set" of objects, such as: all workspaces in
| `initiator` |
| `autostart` |
| `autostop` |
| `dormancy` |
## codersdk.ChangePasswordWithOneTimePasscodeRequest
@@ -1354,97 +1069,6 @@ AuthorizationObject can represent a "set" of objects, such as: all workspaces in
| `one_time_passcode` | string | true | | |
| `password` | string | true | | |
## codersdk.Chat
```json
{
"created_at": "2019-08-24T14:15:22Z",
"id": "497f6eca-6276-4993-bfeb-53cbbbba6f08",
"title": "string",
"updated_at": "2019-08-24T14:15:22Z"
}
```
### Properties
| Name | Type | Required | Restrictions | Description |
|--------------|--------|----------|--------------|-------------|
| `created_at` | string | false | | |
| `id` | string | false | | |
| `title` | string | false | | |
| `updated_at` | string | false | | |
## codersdk.ChatMessage
```json
{
"annotations": [
null
],
"content": "string",
"createdAt": [
0
],
"experimental_attachments": [
{
"contentType": "string",
"name": "string",
"url": "string"
}
],
"id": "string",
"parts": [
{
"data": [
0
],
"details": [
{
"data": "string",
"signature": "string",
"text": "string",
"type": "string"
}
],
"mimeType": "string",
"reasoning": "string",
"source": {
"contentType": "string",
"data": "string",
"metadata": {
"property1": null,
"property2": null
},
"uri": "string"
},
"text": "string",
"toolInvocation": {
"args": null,
"result": null,
"state": "call",
"step": 0,
"toolCallId": "string",
"toolName": "string"
},
"type": "text"
}
],
"role": "string"
}
```
### Properties
| Name | Type | Required | Restrictions | Description |
|----------------------------|-----------------------------------------------|----------|--------------|-------------|
| `annotations` | array of undefined | false | | |
| `content` | string | false | | |
| `createdAt` | array of integer | false | | |
| `experimental_attachments` | array of [aisdk.Attachment](#aisdkattachment) | false | | |
| `id` | string | false | | |
| `parts` | array of [aisdk.Part](#aisdkpart) | false | | |
| `role` | string | false | | |
## codersdk.ConnectionLatency
```json
@@ -1477,77 +1101,6 @@ AuthorizationObject can represent a "set" of objects, such as: all workspaces in
| `password` | string | true | | |
| `to_type` | [codersdk.LoginType](#codersdklogintype) | true | | To type is the login type to convert to. |
## codersdk.CreateChatMessageRequest
```json
{
"message": {
"annotations": [
null
],
"content": "string",
"createdAt": [
0
],
"experimental_attachments": [
{
"contentType": "string",
"name": "string",
"url": "string"
}
],
"id": "string",
"parts": [
{
"data": [
0
],
"details": [
{
"data": "string",
"signature": "string",
"text": "string",
"type": "string"
}
],
"mimeType": "string",
"reasoning": "string",
"source": {
"contentType": "string",
"data": "string",
"metadata": {
"property1": null,
"property2": null
},
"uri": "string"
},
"text": "string",
"toolInvocation": {
"args": null,
"result": null,
"state": "call",
"step": 0,
"toolCallId": "string",
"toolName": "string"
},
"type": "text"
}
],
"role": "string"
},
"model": "string",
"thinking": true
}
```
### Properties
| Name | Type | Required | Restrictions | Description |
|------------|----------------------------------------------|----------|--------------|-------------|
| `message` | [codersdk.ChatMessage](#codersdkchatmessage) | false | | |
| `model` | string | false | | |
| `thinking` | boolean | false | | |
## codersdk.CreateFirstUserRequest
```json
@@ -1812,52 +1365,12 @@ This is required on creation to enable a user-flow of validating a template work
## codersdk.CreateTestAuditLogRequest
```json
{
"action": "create",
"additional_fields": [
0
],
"build_reason": "autostart",
"organization_id": "7c60d51f-b44e-4682-87d6-449835ea4de6",
"request_id": "266ea41d-adf5-480b-af50-15b940c2b846",
"resource_id": "4d5215ed-38bb-48ed-879a-fdb9ca58522f",
"resource_type": "template",
"time": "2019-08-24T14:15:22Z"
}
{}
```
### Properties
| Name | Type | Required | Restrictions | Description |
|---------------------|------------------------------------------------|----------|--------------|-------------|
| `action` | [codersdk.AuditAction](#codersdkauditaction) | false | | |
| `additional_fields` | array of integer | false | | |
| `build_reason` | [codersdk.BuildReason](#codersdkbuildreason) | false | | |
| `organization_id` | string | false | | |
| `request_id` | string | false | | |
| `resource_id` | string | false | | |
| `resource_type` | [codersdk.ResourceType](#codersdkresourcetype) | false | | |
| `time` | string | false | | |
#### Enumerated Values
| Property | Value |
|-----------------|--------------------|
| `action` | `create` |
| `action` | `write` |
| `action` | `delete` |
| `action` | `start` |
| `action` | `stop` |
| `build_reason` | `autostart` |
| `build_reason` | `autostop` |
| `build_reason` | `initiator` |
| `resource_type` | `template` |
| `resource_type` | `template_version` |
| `resource_type` | `user` |
| `resource_type` | `workspace` |
| `resource_type` | `workspace_build` |
| `resource_type` | `git_ssh_key` |
| `resource_type` | `auditable_group` |
None
## codersdk.CreateTokenRequest
@@ -2328,19 +1841,6 @@ CreateWorkspaceRequest provides options for creating a new workspace. Only one o
"user": {}
},
"agent_stat_refresh_interval": 0,
"ai": {
"value": {
"providers": [
{
"base_url": "string",
"models": [
"string"
],
"type": "string"
}
]
}
},
"allow_workspace_renames": true,
"autobuild_poll_interval": 0,
"browser_only": true,
@@ -2829,19 +2329,6 @@ CreateWorkspaceRequest provides options for creating a new workspace. Only one o
"user": {}
},
"agent_stat_refresh_interval": 0,
"ai": {
"value": {
"providers": [
{
"base_url": "string",
"models": [
"string"
],
"type": "string"
}
]
}
},
"allow_workspace_renames": true,
"autobuild_poll_interval": 0,
"browser_only": true,
@@ -3221,7 +2708,6 @@ CreateWorkspaceRequest provides options for creating a new workspace. Only one o
| `address` | [serpent.HostPort](#serpenthostport) | false | | Deprecated: Use HTTPAddress or TLS.Address instead. |
| `agent_fallback_troubleshooting_url` | [serpent.URL](#serpenturl) | false | | |
| `agent_stat_refresh_interval` | integer | false | | |
| `ai` | [serpent.Struct-codersdk_AIConfig](#serpentstruct-codersdk_aiconfig) | false | | |
| `allow_workspace_renames` | boolean | false | | |
| `autobuild_poll_interval` | integer | false | | |
| `browser_only` | boolean | false | | |
@@ -3511,8 +2997,6 @@ CreateWorkspaceRequest provides options for creating a new workspace. Only one o
| `notifications` |
| `workspace-usage` |
| `web-push` |
| `workspace-prebuilds` |
| `agentic-chat` |
## codersdk.ExternalAuth
@@ -4152,44 +3636,6 @@ Git clone makes use of this by parsing the URL from: 'Username for "https://gith
|-------------------------------|
| `REQUIRED_TEMPLATE_VARIABLES` |
## codersdk.LanguageModel
```json
{
"display_name": "string",
"id": "string",
"provider": "string"
}
```
### Properties
| Name | Type | Required | Restrictions | Description |
|----------------|--------|----------|--------------|-------------------------------------------------------------------|
| `display_name` | string | false | | |
| `id` | string | false | | ID is used by the provider to identify the LLM. |
| `provider` | string | false | | Provider is the provider of the LLM. e.g. openai, anthropic, etc. |
## codersdk.LanguageModelConfig
```json
{
"models": [
{
"display_name": "string",
"id": "string",
"provider": "string"
}
]
}
```
### Properties
| Name | Type | Required | Restrictions | Description |
|----------|-----------------------------------------------------------|----------|--------------|-------------|
| `models` | array of [codersdk.LanguageModel](#codersdklanguagemodel) | false | | |
## codersdk.License
```json
@@ -6307,7 +5753,6 @@ Git clone makes use of this by parsing the URL from: 'Username for "https://gith
| `assign_org_role` |
| `assign_role` |
| `audit_log` |
| `chat` |
| `crypto_key` |
| `debug_info` |
| `deployment_config` |
@@ -12269,30 +11714,6 @@ None
|---------|-----------------------------------------------------|----------|--------------|-------------|
| `value` | array of [codersdk.LinkConfig](#codersdklinkconfig) | false | | |
## serpent.Struct-codersdk_AIConfig
```json
{
"value": {
"providers": [
{
"base_url": "string",
"models": [
"string"
],
"type": "string"
}
]
}
}
```
### Properties
| Name | Type | Required | Restrictions | Description |
|---------|----------------------------------------|----------|--------------|-------------|
| `value` | [codersdk.AIConfig](#codersdkaiconfig) | false | | |
## serpent.URL
```json
+11
View File
@@ -1615,6 +1615,17 @@ Enable Coder Inbox.
The upper limit of attempts to send a notification.
### --workspace-prebuilds-reconciliation-interval
| | |
|-------------|-----------------------------------------------------------------|
| Type | <code>duration</code> |
| Environment | <code>$CODER_WORKSPACE_PREBUILDS_RECONCILIATION_INTERVAL</code> |
| YAML | <code>workspace_prebuilds.reconciliation_interval</code> |
| Default | <code>15s</code> |
How often to reconcile workspace prebuilds state.
### --hide-ai-tasks
| | |
+6
View File
@@ -678,6 +678,12 @@ workspaces stopping during the day due to template scheduling.
must be *. Only one hour and minute can be specified (ranges or comma
separated values are not supported).
WORKSPACE PREBUILDS OPTIONS:
Configure how workspace prebuilds behave.
--workspace-prebuilds-reconciliation-interval duration, $CODER_WORKSPACE_PREBUILDS_RECONCILIATION_INTERVAL (default: 15s)
How often to reconcile workspace prebuilds state.
⚠️ DANGEROUS OPTIONS:
--dangerous-allow-path-app-sharing bool, $CODER_DANGEROUS_ALLOW_PATH_APP_SHARING
Allow workspace apps that are not served from subdomains to be shared.
+3 -10
View File
@@ -1150,16 +1150,9 @@ func (api *API) Authorize(r *http.Request, action policy.Action, object rbac.Obj
// nolint:revive // featureEnabled is a legit control flag.
func (api *API) setupPrebuilds(featureEnabled bool) (agplprebuilds.ReconciliationOrchestrator, agplprebuilds.Claimer) {
experimentEnabled := api.AGPL.Experiments.Enabled(codersdk.ExperimentWorkspacePrebuilds)
if !experimentEnabled || !featureEnabled {
levelFn := api.Logger.Debug
// If the experiment is enabled but the license does not entitle the feature, operators should be warned.
if !featureEnabled {
levelFn = api.Logger.Warn
}
levelFn(context.Background(), "prebuilds not enabled; ensure you have a premium license and the 'workspace-prebuilds' experiment set",
slog.F("experiment_enabled", experimentEnabled), slog.F("feature_enabled", featureEnabled))
if !featureEnabled {
api.Logger.Warn(context.Background(), "prebuilds not enabled; ensure you have a premium license",
slog.F("feature_enabled", featureEnabled))
return agplprebuilds.DefaultReconciler, agplprebuilds.DefaultClaimer
}
+10 -29
View File
@@ -260,34 +260,19 @@ func TestEntitlements_Prebuilds(t *testing.T) {
t.Parallel()
cases := []struct {
name string
experimentEnabled bool
featureEnabled bool
expectedEnabled bool
name string
featureEnabled bool
expectedEnabled bool
}{
{
name: "Fully enabled",
featureEnabled: true,
experimentEnabled: true,
expectedEnabled: true,
name: "Feature enabled",
featureEnabled: true,
expectedEnabled: true,
},
{
name: "Feature disabled",
featureEnabled: false,
experimentEnabled: true,
expectedEnabled: false,
},
{
name: "Experiment disabled",
featureEnabled: true,
experimentEnabled: false,
expectedEnabled: false,
},
{
name: "Fully disabled",
featureEnabled: false,
experimentEnabled: false,
expectedEnabled: false,
name: "Feature disabled",
featureEnabled: false,
expectedEnabled: false,
},
}
@@ -302,11 +287,7 @@ func TestEntitlements_Prebuilds(t *testing.T) {
_, _, api, _ := coderdenttest.NewWithAPI(t, &coderdenttest.Options{
Options: &coderdtest.Options{
DeploymentValues: coderdtest.DeploymentValues(t, func(values *codersdk.DeploymentValues) {
if tc.experimentEnabled {
values.Experiments = serpent.StringArray{string(codersdk.ExperimentWorkspacePrebuilds)}
}
}),
DeploymentValues: coderdtest.DeploymentValues(t),
},
EntitlementsUpdateInterval: time.Second,
@@ -302,6 +302,57 @@ func TestDynamicParameterBuild(t *testing.T) {
require.ErrorContains(t, err, "Number must be between 0 and 10")
})
})
t.Run("ImmutableValidation", func(t *testing.T) {
t.Parallel()
// NewImmutable tests the case where a new immutable parameter is added to a template
// after a workspace has been created with an older version of the template.
// The test tries to delete the workspace, which should succeed.
t.Run("NewImmutable", func(t *testing.T) {
t.Parallel()
ctx := testutil.Context(t, testutil.WaitShort)
// Start with a new template that has 0 parameters
empty, _ := coderdtest.DynamicParameterTemplate(t, templateAdmin, orgID, coderdtest.DynamicParameterTemplateParams{
MainTF: string(must(os.ReadFile("testdata/parameters/none/main.tf"))),
})
// Create the workspace with 0 parameters
wrk, err := templateAdmin.CreateUserWorkspace(ctx, codersdk.Me, codersdk.CreateWorkspaceRequest{
TemplateID: empty.ID,
Name: coderdtest.RandomUsername(t),
RichParameterValues: []codersdk.WorkspaceBuildParameter{},
})
require.NoError(t, err)
coderdtest.AwaitWorkspaceBuildJobCompleted(t, templateAdmin, wrk.LatestBuild.ID)
// Update the template with a new immutable parameter
_, immutable := coderdtest.DynamicParameterTemplate(t, templateAdmin, orgID, coderdtest.DynamicParameterTemplateParams{
MainTF: string(must(os.ReadFile("testdata/parameters/immutable/main.tf"))),
TemplateID: empty.ID,
})
bld, err := templateAdmin.CreateWorkspaceBuild(ctx, wrk.ID, codersdk.CreateWorkspaceBuildRequest{
TemplateVersionID: immutable.ID, // Use the new template version with the immutable parameter
Transition: codersdk.WorkspaceTransitionDelete,
DryRun: false,
})
require.NoError(t, err)
coderdtest.AwaitWorkspaceBuildJobCompleted(t, templateAdmin, bld.ID)
// Verify the immutable parameter is set on the workspace build
params, err := templateAdmin.WorkspaceBuildParameters(ctx, bld.ID)
require.NoError(t, err)
require.Len(t, params, 1)
require.Equal(t, "Hello World", params[0].Value)
// Verify the workspace is deleted
deleted, err := templateAdmin.DeletedWorkspace(ctx, wrk.ID)
require.NoError(t, err)
require.Equal(t, wrk.ID, deleted.ID, "workspace should be deleted")
})
})
}
// TestDynamicParameterTemplate uses a template with some dynamic elements, and
+16
View File
@@ -0,0 +1,16 @@
terraform {
required_providers {
coder = {
source = "coder/coder"
}
}
}
data "coder_workspace_owner" "me" {}
data "coder_parameter" "immutable" {
name = "immutable"
type = "string"
mutable = false
default = "Hello World"
}
+10
View File
@@ -0,0 +1,10 @@
terraform {
required_providers {
coder = {
source = "coder/coder"
}
}
}
data "coder_workspace_owner" "me" {}
@@ -112,7 +112,6 @@ func TestReinitializeAgent(t *testing.T) {
Pubsub: ps,
DeploymentValues: coderdtest.DeploymentValues(t, func(dv *codersdk.DeploymentValues) {
dv.Prebuilds.ReconciliationInterval = serpent.Duration(time.Second)
dv.Experiments.Append(string(codersdk.ExperimentWorkspacePrebuilds))
}),
},
LicenseOptions: &coderdenttest.LicenseOptions{
+795 -4
View File
@@ -10,10 +10,17 @@ import (
"os"
"os/exec"
"path/filepath"
"strings"
"sync/atomic"
"testing"
"time"
"github.com/prometheus/client_golang/prometheus"
"github.com/coder/coder/v2/coderd/files"
agplprebuilds "github.com/coder/coder/v2/coderd/prebuilds"
"github.com/coder/coder/v2/enterprise/coderd/prebuilds"
"github.com/google/uuid"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
@@ -531,10 +538,7 @@ func TestCreateUserWorkspace(t *testing.T) {
client, db, user := coderdenttest.NewWithDatabase(t, &coderdenttest.Options{
Options: &coderdtest.Options{
DeploymentValues: coderdtest.DeploymentValues(t, func(dv *codersdk.DeploymentValues) {
err := dv.Experiments.Append(string(codersdk.ExperimentWorkspacePrebuilds))
require.NoError(t, err)
}),
DeploymentValues: coderdtest.DeploymentValues(t),
},
LicenseOptions: &coderdenttest.LicenseOptions{
Features: license.Features{
@@ -1716,6 +1720,793 @@ func TestTemplateDoesNotAllowUserAutostop(t *testing.T) {
})
}
func TestExecutorPrebuilds(t *testing.T) {
t.Parallel()
if !dbtestutil.WillUsePostgres() {
t.Skip("this test requires postgres")
}
getRunningPrebuilds := func(
t *testing.T,
ctx context.Context,
db database.Store,
prebuildInstances int,
) []database.GetRunningPrebuiltWorkspacesRow {
t.Helper()
var runningPrebuilds []database.GetRunningPrebuiltWorkspacesRow
testutil.Eventually(ctx, t, func(context.Context) bool {
rows, err := db.GetRunningPrebuiltWorkspaces(ctx)
if err != nil {
return false
}
for _, row := range rows {
runningPrebuilds = append(runningPrebuilds, row)
agents, err := db.GetWorkspaceAgentsInLatestBuildByWorkspaceID(ctx, row.ID)
if err != nil {
return false
}
for _, agent := range agents {
err = db.UpdateWorkspaceAgentLifecycleStateByID(ctx, database.UpdateWorkspaceAgentLifecycleStateByIDParams{
ID: agent.ID,
LifecycleState: database.WorkspaceAgentLifecycleStateReady,
StartedAt: sql.NullTime{Time: time.Now().Add(time.Hour), Valid: true},
ReadyAt: sql.NullTime{Time: time.Now().Add(-1 * time.Hour), Valid: true},
})
if err != nil {
return false
}
}
}
t.Logf("found %d running prebuilds so far, want %d", len(runningPrebuilds), prebuildInstances)
return len(runningPrebuilds) == prebuildInstances
}, testutil.IntervalSlow, "prebuilds not running")
return runningPrebuilds
}
runReconciliationLoop := func(
t *testing.T,
ctx context.Context,
db database.Store,
reconciler *prebuilds.StoreReconciler,
presets []codersdk.Preset,
) {
t.Helper()
state, err := reconciler.SnapshotState(ctx, db)
require.NoError(t, err)
ps, err := state.FilterByPreset(presets[0].ID)
require.NoError(t, err)
require.NotNil(t, ps)
actions, err := reconciler.CalculateActions(ctx, *ps)
require.NoError(t, err)
require.NotNil(t, actions)
require.NoError(t, reconciler.ReconcilePreset(ctx, *ps))
}
claimPrebuild := func(
t *testing.T,
ctx context.Context,
client *codersdk.Client,
userClient *codersdk.Client,
username string,
version codersdk.TemplateVersion,
presetID uuid.UUID,
) codersdk.Workspace {
t.Helper()
workspaceName := strings.ReplaceAll(testutil.GetRandomName(t), "_", "-")
userWorkspace, err := userClient.CreateUserWorkspace(ctx, username, codersdk.CreateWorkspaceRequest{
TemplateVersionID: version.ID,
Name: workspaceName,
TemplateVersionPresetID: presetID,
})
require.NoError(t, err)
build := coderdtest.AwaitWorkspaceBuildJobCompleted(t, userClient, userWorkspace.LatestBuild.ID)
require.Equal(t, build.Job.Status, codersdk.ProvisionerJobSucceeded)
workspace := coderdtest.MustWorkspace(t, client, userWorkspace.ID)
assert.Equal(t, codersdk.WorkspaceTransitionStart, workspace.LatestBuild.Transition)
return workspace
}
// Prebuilt workspaces should not be autostopped based on the default TTL.
// This test ensures that DefaultTTLMillis is ignored while the workspace is in a prebuild state.
// Once the workspace is claimed, the default autostop timer should take effect.
t.Run("DefaultTTLOnlyTriggersAfterClaim", func(t *testing.T) {
t.Parallel()
// Set the clock to Monday, January 1st, 2024 at 8:00 AM UTC to keep the test deterministic
clock := quartz.NewMock(t)
clock.Set(time.Date(2024, 1, 1, 8, 0, 0, 0, time.UTC))
// Setup
ctx := testutil.Context(t, testutil.WaitSuperLong)
db, pb := dbtestutil.NewDB(t, dbtestutil.WithDumpOnFailure())
logger := testutil.Logger(t)
tickCh := make(chan time.Time)
statsCh := make(chan autobuild.Stats)
notificationsNoop := notifications.NewNoopEnqueuer()
client, _, api, owner := coderdenttest.NewWithAPI(t, &coderdenttest.Options{
Options: &coderdtest.Options{
Database: db,
Pubsub: pb,
AutobuildTicker: tickCh,
IncludeProvisionerDaemon: true,
AutobuildStats: statsCh,
Clock: clock,
TemplateScheduleStore: schedule.NewEnterpriseTemplateScheduleStore(
agplUserQuietHoursScheduleStore(),
notificationsNoop,
logger,
clock,
),
},
LicenseOptions: &coderdenttest.LicenseOptions{
Features: license.Features{codersdk.FeatureAdvancedTemplateScheduling: 1},
},
})
// Setup Prebuild reconciler
cache := files.New(prometheus.NewRegistry(), &coderdtest.FakeAuthorizer{})
reconciler := prebuilds.NewStoreReconciler(
db, pb, cache,
codersdk.PrebuildsConfig{},
logger,
clock,
prometheus.NewRegistry(),
notificationsNoop,
)
var claimer agplprebuilds.Claimer = prebuilds.NewEnterpriseClaimer(db)
api.AGPL.PrebuildsClaimer.Store(&claimer)
// Setup user, template and template version with a preset with 1 prebuild instance
prebuildInstances := int32(1)
ttlTime := 2 * time.Hour
userClient, user := coderdtest.CreateAnotherUser(t, client, owner.OrganizationID, rbac.RoleMember())
version := coderdtest.CreateTemplateVersion(t, client, owner.OrganizationID, templateWithAgentAndPresetsWithPrebuilds(prebuildInstances))
coderdtest.AwaitTemplateVersionJobCompleted(t, client, version.ID)
coderdtest.CreateTemplate(t, client, owner.OrganizationID, version.ID, func(ctr *codersdk.CreateTemplateRequest) {
// Set a template level TTL to trigger the autostop
// Template level TTL can only be set if autostop is disabled for users
ctr.AllowUserAutostop = ptr.Ref[bool](false)
ctr.DefaultTTLMillis = ptr.Ref[int64](ttlTime.Milliseconds())
})
presets, err := client.TemplateVersionPresets(ctx, version.ID)
require.NoError(t, err)
require.Len(t, presets, 1)
// Given: Reconciliation loop runs and starts prebuilt workspace
runReconciliationLoop(t, ctx, db, reconciler, presets)
runningPrebuilds := getRunningPrebuilds(t, ctx, db, int(prebuildInstances))
require.Len(t, runningPrebuilds, int(prebuildInstances))
// Given: a running prebuilt workspace with a deadline, ready to be claimed
prebuild := coderdtest.MustWorkspace(t, client, runningPrebuilds[0].ID)
require.Equal(t, codersdk.WorkspaceTransitionStart, prebuild.LatestBuild.Transition)
require.NotZero(t, prebuild.LatestBuild.Deadline)
// When: the autobuild executor ticks *after* the deadline
next := prebuild.LatestBuild.Deadline.Time.Add(time.Minute)
clock.Set(next)
go func() {
tickCh <- next
}()
// Then: the prebuilt workspace should remain in a start transition
prebuildStats := <-statsCh
require.Len(t, prebuildStats.Errors, 0)
require.Len(t, prebuildStats.Transitions, 0)
require.Equal(t, codersdk.WorkspaceTransitionStart, prebuild.LatestBuild.Transition)
prebuild = coderdtest.MustWorkspace(t, client, prebuild.ID)
require.Equal(t, codersdk.BuildReasonInitiator, prebuild.LatestBuild.Reason)
// Given: a user claims the prebuilt workspace sometime later
clock.Set(clock.Now().Add(ttlTime))
workspace := claimPrebuild(t, ctx, client, userClient, user.Username, version, presets[0].ID)
require.Equal(t, prebuild.ID, workspace.ID)
// Workspace deadline must be ttlTime from the time it is claimed
require.True(t, workspace.LatestBuild.Deadline.Time.Equal(clock.Now().Add(ttlTime)))
// When: the autobuild executor ticks *after* the deadline
next = workspace.LatestBuild.Deadline.Time.Add(time.Minute)
clock.Set(next)
go func() {
tickCh <- next
close(tickCh)
}()
// Then: the workspace should be stopped
workspaceStats := <-statsCh
require.Len(t, workspaceStats.Errors, 0)
require.Len(t, workspaceStats.Transitions, 1)
require.Contains(t, workspaceStats.Transitions, workspace.ID)
require.Equal(t, database.WorkspaceTransitionStop, workspaceStats.Transitions[workspace.ID])
workspace = coderdtest.MustWorkspace(t, client, workspace.ID)
require.Equal(t, codersdk.BuildReasonAutostop, workspace.LatestBuild.Reason)
})
// Prebuild workspaces should not follow the autostop schedule.
// This test verifies that AutostopRequirement (autostop schedule) is ignored while the workspace is a prebuild.
// After being claimed, the workspace should be stopped according to the autostop schedule.
t.Run("AutostopScheduleOnlyTriggersAfterClaim", func(t *testing.T) {
t.Parallel()
cases := []struct {
name string
isClaimedBeforeDeadline bool
}{
// If the prebuild is claimed before the scheduled deadline,
// the claimed workspace should inherit and respect that same deadline.
{
name: "ClaimedBeforeDeadline_UsesSameDeadline",
isClaimedBeforeDeadline: true,
},
// If the prebuild is claimed after the scheduled deadline,
// the workspace should not stop immediately, but instead respect the next
// valid scheduled deadline (the next day).
{
name: "ClaimedAfterDeadline_SchedulesForNextDay",
isClaimedBeforeDeadline: false,
},
}
for _, tc := range cases {
tc := tc
t.Run(tc.name, func(t *testing.T) {
t.Parallel()
// Set the clock to Monday, January 1st, 2024 at 8:00 AM UTC to keep the test deterministic
clock := quartz.NewMock(t)
clock.Set(time.Date(2024, 1, 1, 8, 0, 0, 0, time.UTC))
// Setup
ctx := testutil.Context(t, testutil.WaitSuperLong)
db, pb := dbtestutil.NewDB(t, dbtestutil.WithDumpOnFailure())
logger := testutil.Logger(t)
tickCh := make(chan time.Time)
statsCh := make(chan autobuild.Stats)
notificationsNoop := notifications.NewNoopEnqueuer()
client, _, api, owner := coderdenttest.NewWithAPI(t, &coderdenttest.Options{
Options: &coderdtest.Options{
Database: db,
Pubsub: pb,
AutobuildTicker: tickCh,
IncludeProvisionerDaemon: true,
AutobuildStats: statsCh,
Clock: clock,
TemplateScheduleStore: schedule.NewEnterpriseTemplateScheduleStore(
agplUserQuietHoursScheduleStore(),
notificationsNoop,
logger,
clock,
),
},
LicenseOptions: &coderdenttest.LicenseOptions{
Features: license.Features{codersdk.FeatureAdvancedTemplateScheduling: 1},
},
})
// Setup Prebuild reconciler
cache := files.New(prometheus.NewRegistry(), &coderdtest.FakeAuthorizer{})
reconciler := prebuilds.NewStoreReconciler(
db, pb, cache,
codersdk.PrebuildsConfig{},
logger,
clock,
prometheus.NewRegistry(),
notificationsNoop,
)
var claimer agplprebuilds.Claimer = prebuilds.NewEnterpriseClaimer(db)
api.AGPL.PrebuildsClaimer.Store(&claimer)
// Setup user, template and template version with a preset with 1 prebuild instance
prebuildInstances := int32(1)
userClient, user := coderdtest.CreateAnotherUser(t, client, owner.OrganizationID, rbac.RoleMember())
version := coderdtest.CreateTemplateVersion(t, client, owner.OrganizationID, templateWithAgentAndPresetsWithPrebuilds(prebuildInstances))
coderdtest.AwaitTemplateVersionJobCompleted(t, client, version.ID)
coderdtest.CreateTemplate(t, client, owner.OrganizationID, version.ID, func(ctr *codersdk.CreateTemplateRequest) {
// Set a template level Autostop schedule to trigger the autostop daily
ctr.AutostopRequirement = ptr.Ref[codersdk.TemplateAutostopRequirement](
codersdk.TemplateAutostopRequirement{
DaysOfWeek: []string{"monday", "tuesday", "wednesday", "thursday", "friday", "saturday", "sunday"},
Weeks: 1,
})
})
presets, err := client.TemplateVersionPresets(ctx, version.ID)
require.NoError(t, err)
require.Len(t, presets, 1)
// Given: Reconciliation loop runs and starts prebuilt workspace
runReconciliationLoop(t, ctx, db, reconciler, presets)
runningPrebuilds := getRunningPrebuilds(t, ctx, db, int(prebuildInstances))
require.Len(t, runningPrebuilds, int(prebuildInstances))
// Given: a running prebuilt workspace with a deadline, ready to be claimed
prebuild := coderdtest.MustWorkspace(t, client, runningPrebuilds[0].ID)
require.Equal(t, codersdk.WorkspaceTransitionStart, prebuild.LatestBuild.Transition)
require.NotZero(t, prebuild.LatestBuild.Deadline)
next := clock.Now()
if tc.isClaimedBeforeDeadline {
// When: the autobuild executor ticks *before* the deadline:
next = next.Add(time.Minute)
} else {
// When: the autobuild executor ticks *after* the deadline:
next = next.Add(24 * time.Hour)
}
clock.Set(next)
go func() {
tickCh <- next
}()
// Then: the prebuilt workspace should remain in a start transition
prebuildStats := <-statsCh
require.Len(t, prebuildStats.Errors, 0)
require.Len(t, prebuildStats.Transitions, 0)
require.Equal(t, codersdk.WorkspaceTransitionStart, prebuild.LatestBuild.Transition)
prebuild = coderdtest.MustWorkspace(t, client, prebuild.ID)
require.Equal(t, codersdk.BuildReasonInitiator, prebuild.LatestBuild.Reason)
// Given: a user claims the prebuilt workspace
workspace := claimPrebuild(t, ctx, client, userClient, user.Username, version, presets[0].ID)
require.Equal(t, prebuild.ID, workspace.ID)
if tc.isClaimedBeforeDeadline {
// Then: the claimed workspace should inherit and respect that same deadline.
require.True(t, workspace.LatestBuild.Deadline.Time.Equal(prebuild.LatestBuild.Deadline.Time))
} else {
// Then: the claimed workspace should respect the next valid scheduled deadline (next day).
require.True(t, workspace.LatestBuild.Deadline.Time.Equal(clock.Now().Truncate(24*time.Hour).Add(24*time.Hour)))
}
// When: the autobuild executor ticks *after* the deadline:
next = workspace.LatestBuild.Deadline.Time.Add(time.Minute)
clock.Set(next)
go func() {
tickCh <- next
close(tickCh)
}()
// Then: the workspace should be stopped
workspaceStats := <-statsCh
require.Len(t, workspaceStats.Errors, 0)
require.Len(t, workspaceStats.Transitions, 1)
require.Contains(t, workspaceStats.Transitions, workspace.ID)
require.Equal(t, database.WorkspaceTransitionStop, workspaceStats.Transitions[workspace.ID])
workspace = coderdtest.MustWorkspace(t, client, workspace.ID)
require.Equal(t, codersdk.BuildReasonAutostop, workspace.LatestBuild.Reason)
})
}
})
// Prebuild workspaces should not follow the autostart schedule.
// This test verifies that AutostartRequirement (autostart schedule) is ignored while the workspace is a prebuild.
t.Run("AutostartScheduleOnlyTriggersAfterClaim", func(t *testing.T) {
t.Parallel()
// Set the clock to dbtime.Now() to match the workspace build's CreatedAt
clock := quartz.NewMock(t)
clock.Set(dbtime.Now())
// Setup
ctx := testutil.Context(t, testutil.WaitSuperLong)
db, pb := dbtestutil.NewDB(t, dbtestutil.WithDumpOnFailure())
logger := testutil.Logger(t)
tickCh := make(chan time.Time)
statsCh := make(chan autobuild.Stats)
notificationsNoop := notifications.NewNoopEnqueuer()
client, _, api, owner := coderdenttest.NewWithAPI(t, &coderdenttest.Options{
Options: &coderdtest.Options{
Database: db,
Pubsub: pb,
AutobuildTicker: tickCh,
IncludeProvisionerDaemon: true,
AutobuildStats: statsCh,
Clock: clock,
TemplateScheduleStore: schedule.NewEnterpriseTemplateScheduleStore(
agplUserQuietHoursScheduleStore(),
notificationsNoop,
logger,
clock,
),
},
LicenseOptions: &coderdenttest.LicenseOptions{
Features: license.Features{codersdk.FeatureAdvancedTemplateScheduling: 1},
},
})
// Setup Prebuild reconciler
cache := files.New(prometheus.NewRegistry(), &coderdtest.FakeAuthorizer{})
reconciler := prebuilds.NewStoreReconciler(
db, pb, cache,
codersdk.PrebuildsConfig{},
logger,
clock,
prometheus.NewRegistry(),
notificationsNoop,
)
var claimer agplprebuilds.Claimer = prebuilds.NewEnterpriseClaimer(db)
api.AGPL.PrebuildsClaimer.Store(&claimer)
// Setup user, template and template version with a preset with 1 prebuild instance
prebuildInstances := int32(1)
userClient, user := coderdtest.CreateAnotherUser(t, client, owner.OrganizationID, rbac.RoleMember())
version := coderdtest.CreateTemplateVersion(t, client, owner.OrganizationID, templateWithAgentAndPresetsWithPrebuilds(prebuildInstances))
coderdtest.AwaitTemplateVersionJobCompleted(t, client, version.ID)
coderdtest.CreateTemplate(t, client, owner.OrganizationID, version.ID, func(ctr *codersdk.CreateTemplateRequest) {
// Set a template level Autostart schedule to trigger the autostart daily
ctr.AllowUserAutostart = ptr.Ref[bool](true)
ctr.AutostartRequirement = &codersdk.TemplateAutostartRequirement{DaysOfWeek: codersdk.AllDaysOfWeek}
})
presets, err := client.TemplateVersionPresets(ctx, version.ID)
require.NoError(t, err)
require.Len(t, presets, 1)
// Given: Reconciliation loop runs and starts prebuilt workspace
runReconciliationLoop(t, ctx, db, reconciler, presets)
runningPrebuilds := getRunningPrebuilds(t, ctx, db, int(prebuildInstances))
require.Len(t, runningPrebuilds, int(prebuildInstances))
// Given: prebuilt workspace has autostart schedule daily at midnight
prebuild := coderdtest.MustWorkspace(t, client, runningPrebuilds[0].ID)
sched, err := cron.Weekly("CRON_TZ=UTC 0 0 * * *")
require.NoError(t, err)
err = client.UpdateWorkspaceAutostart(ctx, prebuild.ID, codersdk.UpdateWorkspaceAutostartRequest{
Schedule: ptr.Ref(sched.String()),
})
require.NoError(t, err)
// Given: prebuilt workspace is stopped
prebuild = coderdtest.MustTransitionWorkspace(t, client, prebuild.ID, codersdk.WorkspaceTransitionStart, codersdk.WorkspaceTransitionStop)
coderdtest.AwaitWorkspaceBuildJobCompleted(t, client, prebuild.LatestBuild.ID)
// Tick at the next scheduled time after the prebuilds LatestBuild.CreatedAt,
// since the next allowed autostart is calculated starting from that point.
// When: the autobuild executor ticks after the scheduled time
go func() {
tickCh <- sched.Next(prebuild.LatestBuild.CreatedAt).Add(time.Minute)
}()
// Then: the prebuilt workspace should remain in a stop transition
prebuildStats := <-statsCh
require.Len(t, prebuildStats.Errors, 0)
require.Len(t, prebuildStats.Transitions, 0)
require.Equal(t, codersdk.WorkspaceTransitionStop, prebuild.LatestBuild.Transition)
prebuild = coderdtest.MustWorkspace(t, client, prebuild.ID)
require.Equal(t, codersdk.BuildReasonInitiator, prebuild.LatestBuild.Reason)
// Given: a prebuilt workspace that is running and ready to be claimed
prebuild = coderdtest.MustTransitionWorkspace(t, client, prebuild.ID, codersdk.WorkspaceTransitionStop, codersdk.WorkspaceTransitionStart)
coderdtest.AwaitWorkspaceBuildJobCompleted(t, client, prebuild.LatestBuild.ID)
// Make sure the workspace's agent is again ready
getRunningPrebuilds(t, ctx, db, int(prebuildInstances))
// Given: a user claims the prebuilt workspace
workspace := claimPrebuild(t, ctx, client, userClient, user.Username, version, presets[0].ID)
require.Equal(t, prebuild.ID, workspace.ID)
require.NotNil(t, workspace.NextStartAt)
// Given: workspace is stopped
workspace = coderdtest.MustTransitionWorkspace(t, client, workspace.ID, codersdk.WorkspaceTransitionStart, codersdk.WorkspaceTransitionStop)
coderdtest.AwaitWorkspaceBuildJobCompleted(t, client, workspace.LatestBuild.ID)
// Then: the claimed workspace should inherit and respect that same NextStartAt
require.True(t, workspace.NextStartAt.Equal(*prebuild.NextStartAt))
// Tick at the next scheduled time after the prebuilds LatestBuild.CreatedAt,
// since the next allowed autostart is calculated starting from that point.
// When: the autobuild executor ticks after the scheduled time
go func() {
tickCh <- sched.Next(prebuild.LatestBuild.CreatedAt).Add(time.Minute)
}()
// Then: the workspace should have a NextStartAt equal to the next autostart schedule
workspaceStats := <-statsCh
require.Len(t, workspaceStats.Errors, 0)
require.Len(t, workspaceStats.Transitions, 1)
workspace = coderdtest.MustWorkspace(t, client, workspace.ID)
require.NotNil(t, workspace.NextStartAt)
require.Equal(t, sched.Next(clock.Now()), workspace.NextStartAt.UTC())
})
// Prebuild workspaces should not transition to dormant when the inactive TTL is reached.
// This test verifies that TimeTilDormantMillis is ignored while the workspace is a prebuild.
// After being claimed, the workspace should become dormant according to the configured inactivity period.
t.Run("DormantOnlyAfterClaimed", func(t *testing.T) {
t.Parallel()
// Set the clock to Monday, January 1st, 2024 at 8:00 AM UTC to keep the test deterministic
clock := quartz.NewMock(t)
clock.Set(time.Date(2024, 1, 1, 8, 0, 0, 0, time.UTC))
// Setup
ctx := testutil.Context(t, testutil.WaitSuperLong)
db, pb := dbtestutil.NewDB(t, dbtestutil.WithDumpOnFailure())
logger := testutil.Logger(t)
tickCh := make(chan time.Time)
statsCh := make(chan autobuild.Stats)
notificationsNoop := notifications.NewNoopEnqueuer()
client, _, api, owner := coderdenttest.NewWithAPI(t, &coderdenttest.Options{
Options: &coderdtest.Options{
Database: db,
Pubsub: pb,
AutobuildTicker: tickCh,
IncludeProvisionerDaemon: true,
AutobuildStats: statsCh,
Clock: clock,
TemplateScheduleStore: schedule.NewEnterpriseTemplateScheduleStore(
agplUserQuietHoursScheduleStore(),
notificationsNoop,
logger,
clock,
),
},
LicenseOptions: &coderdenttest.LicenseOptions{
Features: license.Features{codersdk.FeatureAdvancedTemplateScheduling: 1},
},
})
// Setup Prebuild reconciler
cache := files.New(prometheus.NewRegistry(), &coderdtest.FakeAuthorizer{})
reconciler := prebuilds.NewStoreReconciler(
db, pb, cache,
codersdk.PrebuildsConfig{},
logger,
clock,
prometheus.NewRegistry(),
notificationsNoop,
)
var claimer agplprebuilds.Claimer = prebuilds.NewEnterpriseClaimer(db)
api.AGPL.PrebuildsClaimer.Store(&claimer)
// Setup user, template and template version with a preset with 1 prebuild instance
prebuildInstances := int32(1)
inactiveTTL := 2 * time.Hour
userClient, user := coderdtest.CreateAnotherUser(t, client, owner.OrganizationID, rbac.RoleMember())
version := coderdtest.CreateTemplateVersion(t, client, owner.OrganizationID, templateWithAgentAndPresetsWithPrebuilds(prebuildInstances))
coderdtest.AwaitTemplateVersionJobCompleted(t, client, version.ID)
coderdtest.CreateTemplate(t, client, owner.OrganizationID, version.ID, func(ctr *codersdk.CreateTemplateRequest) {
// Set a template level inactive TTL to trigger dormancy
ctr.TimeTilDormantMillis = ptr.Ref[int64](inactiveTTL.Milliseconds())
})
presets, err := client.TemplateVersionPresets(ctx, version.ID)
require.NoError(t, err)
require.Len(t, presets, 1)
// Given: reconciliation loop runs and starts prebuilt workspace
runReconciliationLoop(t, ctx, db, reconciler, presets)
runningPrebuilds := getRunningPrebuilds(t, ctx, db, int(prebuildInstances))
require.Len(t, runningPrebuilds, int(prebuildInstances))
// Given: a running prebuilt workspace, ready to be claimed
prebuild := coderdtest.MustWorkspace(t, client, runningPrebuilds[0].ID)
require.Equal(t, codersdk.WorkspaceTransitionStart, prebuild.LatestBuild.Transition)
// When: the autobuild executor ticks *after* the inactive TTL
go func() {
tickCh <- prebuild.LastUsedAt.Add(inactiveTTL).Add(time.Minute)
}()
// Then: the prebuilt workspace should remain in a start transition
prebuildStats := <-statsCh
require.Len(t, prebuildStats.Errors, 0)
require.Len(t, prebuildStats.Transitions, 0)
require.Equal(t, codersdk.WorkspaceTransitionStart, prebuild.LatestBuild.Transition)
prebuild = coderdtest.MustWorkspace(t, client, prebuild.ID)
require.Equal(t, codersdk.BuildReasonInitiator, prebuild.LatestBuild.Reason)
// Given: a user claims the prebuilt workspace sometime later
clock.Set(clock.Now().Add(inactiveTTL))
workspace := claimPrebuild(t, ctx, client, userClient, user.Username, version, presets[0].ID)
require.Equal(t, prebuild.ID, workspace.ID)
require.Nil(t, prebuild.DormantAt)
// When: the autobuild executor ticks *after* the inactive TTL
go func() {
tickCh <- prebuild.LastUsedAt.Add(inactiveTTL).Add(time.Minute)
close(tickCh)
}()
// Then: the workspace should transition to stopped state for breaching failure TTL
workspaceStats := <-statsCh
require.Len(t, workspaceStats.Errors, 0)
require.Len(t, workspaceStats.Transitions, 1)
require.Contains(t, workspaceStats.Transitions, workspace.ID)
require.Equal(t, database.WorkspaceTransitionStop, workspaceStats.Transitions[workspace.ID])
workspace = coderdtest.MustWorkspace(t, client, workspace.ID)
require.Equal(t, codersdk.BuildReasonDormancy, workspace.LatestBuild.Reason)
require.NotNil(t, workspace.DormantAt)
})
// Prebuild workspaces should not be deleted when the failure TTL is reached.
// This test verifies that FailureTTLMillis is ignored while the workspace is a prebuild.
t.Run("FailureTTLOnlyAfterClaimed", func(t *testing.T) {
t.Parallel()
// Set the clock to Monday, January 1st, 2024 at 8:00 AM UTC to keep the test deterministic
clock := quartz.NewMock(t)
clock.Set(time.Date(2024, 1, 1, 8, 0, 0, 0, time.UTC))
// Setup
ctx := testutil.Context(t, testutil.WaitSuperLong)
db, pb := dbtestutil.NewDB(t, dbtestutil.WithDumpOnFailure())
logger := testutil.Logger(t)
tickCh := make(chan time.Time)
statsCh := make(chan autobuild.Stats)
notificationsNoop := notifications.NewNoopEnqueuer()
client, _, api, owner := coderdenttest.NewWithAPI(t, &coderdenttest.Options{
Options: &coderdtest.Options{
Database: db,
Pubsub: pb,
AutobuildTicker: tickCh,
IncludeProvisionerDaemon: true,
AutobuildStats: statsCh,
Clock: clock,
TemplateScheduleStore: schedule.NewEnterpriseTemplateScheduleStore(
agplUserQuietHoursScheduleStore(),
notificationsNoop,
logger,
clock,
),
},
LicenseOptions: &coderdenttest.LicenseOptions{
Features: license.Features{
codersdk.FeatureAdvancedTemplateScheduling: 1,
},
},
})
// Setup Prebuild reconciler
cache := files.New(prometheus.NewRegistry(), &coderdtest.FakeAuthorizer{})
reconciler := prebuilds.NewStoreReconciler(
db, pb, cache,
codersdk.PrebuildsConfig{},
logger,
clock,
prometheus.NewRegistry(),
notificationsNoop,
)
var claimer agplprebuilds.Claimer = prebuilds.NewEnterpriseClaimer(db)
api.AGPL.PrebuildsClaimer.Store(&claimer)
// Setup user, template and template version with a preset with 1 prebuild instance
prebuildInstances := int32(1)
failureTTL := 2 * time.Hour
version := coderdtest.CreateTemplateVersion(t, client, owner.OrganizationID, templateWithFailedResponseAndPresetsWithPrebuilds(prebuildInstances))
coderdtest.AwaitTemplateVersionJobCompleted(t, client, version.ID)
template := coderdtest.CreateTemplate(t, client, owner.OrganizationID, version.ID, func(ctr *codersdk.CreateTemplateRequest) {
// Set a template level Failure TTL to trigger workspace deletion
ctr.FailureTTLMillis = ptr.Ref[int64](failureTTL.Milliseconds())
})
presets, err := client.TemplateVersionPresets(ctx, version.ID)
require.NoError(t, err)
require.Len(t, presets, 1)
// Given: reconciliation loop runs and starts prebuilt workspace in failed state
runReconciliationLoop(t, ctx, db, reconciler, presets)
var failedWorkspaceBuilds []database.GetFailedWorkspaceBuildsByTemplateIDRow
require.Eventually(t, func() bool {
rows, err := db.GetFailedWorkspaceBuildsByTemplateID(ctx, database.GetFailedWorkspaceBuildsByTemplateIDParams{
TemplateID: template.ID,
})
if err != nil {
return false
}
failedWorkspaceBuilds = append(failedWorkspaceBuilds, rows...)
t.Logf("found %d failed prebuilds so far, want %d", len(failedWorkspaceBuilds), prebuildInstances)
return len(failedWorkspaceBuilds) == int(prebuildInstances)
}, testutil.WaitSuperLong, testutil.IntervalSlow)
require.Len(t, failedWorkspaceBuilds, int(prebuildInstances))
// Given: a failed prebuilt workspace
prebuild := coderdtest.MustWorkspace(t, client, failedWorkspaceBuilds[0].WorkspaceID)
require.Equal(t, codersdk.WorkspaceStatusFailed, prebuild.LatestBuild.Status)
// When: the autobuild executor ticks *after* the failure TTL
go func() {
tickCh <- prebuild.LatestBuild.Job.CompletedAt.Add(failureTTL * 2)
}()
// Then: the prebuilt workspace should remain in a start transition
prebuildStats := <-statsCh
require.Len(t, prebuildStats.Errors, 0)
require.Len(t, prebuildStats.Transitions, 0)
require.Equal(t, codersdk.WorkspaceTransitionStart, prebuild.LatestBuild.Transition)
prebuild = coderdtest.MustWorkspace(t, client, prebuild.ID)
require.Equal(t, codersdk.BuildReasonInitiator, prebuild.LatestBuild.Reason)
})
}
func templateWithAgentAndPresetsWithPrebuilds(desiredInstances int32) *echo.Responses {
return &echo.Responses{
Parse: echo.ParseComplete,
ProvisionPlan: []*proto.Response{
{
Type: &proto.Response_Plan{
Plan: &proto.PlanComplete{
Presets: []*proto.Preset{
{
Name: "preset-test",
Parameters: []*proto.PresetParameter{
{
Name: "k1",
Value: "v1",
},
},
Prebuild: &proto.Prebuild{
Instances: desiredInstances,
},
},
},
},
},
},
},
ProvisionApply: []*proto.Response{
{
Type: &proto.Response_Apply{
Apply: &proto.ApplyComplete{
Resources: []*proto.Resource{
{
Type: "compute",
Name: "main",
Agents: []*proto.Agent{
{
Name: "smith",
OperatingSystem: "linux",
Architecture: "i386",
},
},
},
},
},
},
},
},
}
}
func templateWithFailedResponseAndPresetsWithPrebuilds(desiredInstances int32) *echo.Responses {
return &echo.Responses{
Parse: echo.ParseComplete,
ProvisionPlan: []*proto.Response{
{
Type: &proto.Response_Plan{
Plan: &proto.PlanComplete{
Presets: []*proto.Preset{
{
Name: "preset-test",
Parameters: []*proto.PresetParameter{
{
Name: "k1",
Value: "v1",
},
},
Prebuild: &proto.Prebuild{
Instances: desiredInstances,
},
},
},
},
},
},
},
ProvisionApply: echo.ApplyFailed,
}
}
// TestWorkspaceTemplateParamsChange tests a workspace with a parameter that
// validation changes on apply. The params used in create workspace are invalid
// according to the static params on import.
+5 -5
View File
@@ -312,7 +312,7 @@ require (
github.com/go-playground/universal-translator v0.18.1 // indirect
github.com/go-sourcemap/sourcemap v2.1.3+incompatible // indirect
github.com/go-test/deep v1.1.0 // indirect
github.com/go-viper/mapstructure/v2 v2.2.1 // indirect
github.com/go-viper/mapstructure/v2 v2.3.0 // indirect
github.com/gobwas/glob v0.2.3 // indirect
github.com/gobwas/httphead v0.1.0 // indirect
github.com/gobwas/pool v0.2.1 // indirect
@@ -481,14 +481,11 @@ require (
)
require (
github.com/anthropics/anthropic-sdk-go v0.2.0-beta.3
github.com/coder/agentapi-sdk-go v0.0.0-20250505131810-560d1d88d225
github.com/coder/aisdk-go v0.0.9
github.com/coder/preview v1.0.1
github.com/fsnotify/fsnotify v1.9.0
github.com/kylecarbs/aisdk-go v0.0.8
github.com/mark3labs/mcp-go v0.32.0
github.com/openai/openai-go v0.1.0-beta.10
google.golang.org/genai v0.7.0
)
require (
@@ -505,6 +502,7 @@ require (
github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/metric v0.50.0 // indirect
github.com/GoogleCloudPlatform/opentelemetry-operations-go/internal/resourcemapping v0.50.0 // indirect
github.com/Masterminds/semver/v3 v3.3.1 // indirect
github.com/anthropics/anthropic-sdk-go v1.4.0 // indirect
github.com/aquasecurity/go-version v0.0.1 // indirect
github.com/aquasecurity/trivy v0.58.2 // indirect
github.com/aws/aws-sdk-go v1.55.7 // indirect
@@ -522,6 +520,7 @@ require (
github.com/klauspost/cpuid/v2 v2.2.10 // indirect
github.com/moby/sys/user v0.4.0 // indirect
github.com/nfnt/resize v0.0.0-20180221191011-83c6a9932646 // indirect
github.com/openai/openai-go v1.3.0 // indirect
github.com/planetscale/vtprotobuf v0.6.1-0.20240319094008-0393e58bdf10 // indirect
github.com/puzpuzpuz/xsync/v3 v3.5.1 // indirect
github.com/samber/lo v1.50.0 // indirect
@@ -536,5 +535,6 @@ require (
go.opentelemetry.io/contrib/detectors/gcp v1.35.0 // indirect
go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.60.0 // indirect
go.opentelemetry.io/otel/sdk/metric v1.35.0 // indirect
google.golang.org/genai v1.10.0 // indirect
k8s.io/utils v0.0.0-20241210054802-24370beab758 // indirect
)
+10 -10
View File
@@ -720,8 +720,8 @@ github.com/andybalholm/brotli v1.1.1 h1:PR2pgnyFznKEugtsUo0xLdDop5SKXd5Qf5ysW+7X
github.com/andybalholm/brotli v1.1.1/go.mod h1:05ib4cKhjx3OQYUY22hTVd34Bc8upXjOLL2rKwwZBoA=
github.com/anmitsu/go-shlex v0.0.0-20200514113438-38f4b401e2be h1:9AeTilPcZAjCFIImctFaOjnTIavg87rW78vTPkQqLI8=
github.com/anmitsu/go-shlex v0.0.0-20200514113438-38f4b401e2be/go.mod h1:ySMOLuWl6zY27l47sB3qLNK6tF2fkHG55UZxx8oIVo4=
github.com/anthropics/anthropic-sdk-go v0.2.0-beta.3 h1:b5t1ZJMvV/l99y4jbz7kRFdUp3BSDkI8EhSlHczivtw=
github.com/anthropics/anthropic-sdk-go v0.2.0-beta.3/go.mod h1:AapDW22irxK2PSumZiQXYUFvsdQgkwIWlpESweWZI/c=
github.com/anthropics/anthropic-sdk-go v1.4.0 h1:fU1jKxYbQdQDiEXCxeW5XZRIOwKevn/PMg8Ay1nnUx0=
github.com/anthropics/anthropic-sdk-go v1.4.0/go.mod h1:AapDW22irxK2PSumZiQXYUFvsdQgkwIWlpESweWZI/c=
github.com/antihax/optional v1.0.0/go.mod h1:uupD/76wgC+ih3iEmQUL+0Ugr19nfwCT1kdvxnR2qWY=
github.com/apache/arrow/go/v10 v10.0.1/go.mod h1:YvhnlEePVnBS4+0z3fhPfUy7W1Ikj0Ih0vcRo/gZ1M0=
github.com/apache/arrow/go/v11 v11.0.0/go.mod h1:Eg5OsL5H+e299f7u5ssuXsuHQVEGC4xei5aX110hRiI=
@@ -897,6 +897,8 @@ github.com/cncf/xds/go v0.0.0-20250326154945-ae57f3c0d45f h1:C5bqEmzEPLsHm9Mv73l
github.com/cncf/xds/go v0.0.0-20250326154945-ae57f3c0d45f/go.mod h1:W+zGtBO5Y1IgJhy4+A9GOqVhqLpfZi+vwmdNXUehLA8=
github.com/coder/agentapi-sdk-go v0.0.0-20250505131810-560d1d88d225 h1:tRIViZ5JRmzdOEo5wUWngaGEFBG8OaE1o2GIHN5ujJ8=
github.com/coder/agentapi-sdk-go v0.0.0-20250505131810-560d1d88d225/go.mod h1:rNLVpYgEVeu1Zk29K64z6Od8RBP9DwqCu9OfCzh8MR4=
github.com/coder/aisdk-go v0.0.9 h1:Vzo/k2qwVGLTR10ESDeP2Ecek1SdPfZlEjtTfMveiVo=
github.com/coder/aisdk-go v0.0.9/go.mod h1:KF6/Vkono0FJJOtWtveh5j7yfNrSctVTpwgweYWSp5M=
github.com/coder/bubbletea v1.2.2-0.20241212190825-007a1cdb2c41 h1:SBN/DA63+ZHwuWwPHPYoCZ/KLAjHv5g4h2MS4f2/MTI=
github.com/coder/bubbletea v1.2.2-0.20241212190825-007a1cdb2c41/go.mod h1:I9ULxr64UaOSUv7hcb3nX4kowodJCVS7vt7VVJk/kW4=
github.com/coder/clistat v1.0.0 h1:MjiS7qQ1IobuSSgDnxcCSyBPESs44hExnh2TEqMcGnA=
@@ -1146,8 +1148,8 @@ github.com/go-sql-driver/mysql v1.8.1 h1:LedoTUt/eveggdHS9qUFC1EFSa8bU2+1pZjSRpv
github.com/go-sql-driver/mysql v1.8.1/go.mod h1:wEBSXgmK//2ZFJyE+qWnIsVGmvmEKlqwuVSjsCm7DZg=
github.com/go-test/deep v1.1.0 h1:WOcxcdHcvdgThNXjw0t76K42FXTU7HpNQWHpA2HHNlg=
github.com/go-test/deep v1.1.0/go.mod h1:5C2ZWiW0ErCdrYzpqxLbTX7MG14M9iiw8DgHncVwcsE=
github.com/go-viper/mapstructure/v2 v2.2.1 h1:ZAaOCxANMuZx5RCeg0mBdEZk7DZasvvZIxtHqx8aGss=
github.com/go-viper/mapstructure/v2 v2.2.1/go.mod h1:oJDH3BJKyqBA2TXFhDsKDGDTlndYOZ6rGS0BRZIxGhM=
github.com/go-viper/mapstructure/v2 v2.3.0 h1:27XbWsHIqhbdR5TIC911OfYvgSaW93HM+dX7970Q7jk=
github.com/go-viper/mapstructure/v2 v2.3.0/go.mod h1:oJDH3BJKyqBA2TXFhDsKDGDTlndYOZ6rGS0BRZIxGhM=
github.com/gobuffalo/flect v1.0.3 h1:xeWBM2nui+qnVvNM4S3foBhCAL2XgPU+a7FdpelbTq4=
github.com/gobuffalo/flect v1.0.3/go.mod h1:A5msMlrHtLqh9umBSnvabjsMrCcCpAyzglnDvkbYKHs=
github.com/gobwas/glob v0.2.3 h1:A4xDbljILXROh+kObIiy5kIaPYD8e96x1tgBhUI5J+Y=
@@ -1470,8 +1472,6 @@ github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=
github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE=
github.com/kylecarbs/aisdk-go v0.0.8 h1:hnKVbLM6U8XqX3t5I26J8k5saXdra595bGt1HP0PvKA=
github.com/kylecarbs/aisdk-go v0.0.8/go.mod h1:3nAhClwRNo6ZfU44GrBZ8O2fCCrxJdaHb9JIz+P3LR8=
github.com/kylecarbs/chroma/v2 v2.0.0-20240401211003-9e036e0631f3 h1:Z9/bo5PSeMutpdiKYNt/TTSfGM1Ll0naj3QzYX9VxTc=
github.com/kylecarbs/chroma/v2 v2.0.0-20240401211003-9e036e0631f3/go.mod h1:BUGjjsD+ndS6eX37YgTchSEG+Jg9Jv1GiZs9sqPqztk=
github.com/kylecarbs/opencensus-go v0.23.1-0.20220307014935-4d0325a68f8b/go.mod h1:XItmlyltB5F7CS4xOC1DcqMoFqwtC6OG2xF7mCv7P7E=
@@ -1613,8 +1613,8 @@ github.com/open-telemetry/opentelemetry-collector-contrib/pkg/sampling v0.120.1
github.com/open-telemetry/opentelemetry-collector-contrib/pkg/sampling v0.120.1/go.mod h1:01TvyaK8x640crO2iFwW/6CFCZgNsOvOGH3B5J239m0=
github.com/open-telemetry/opentelemetry-collector-contrib/processor/probabilisticsamplerprocessor v0.120.1 h1:TCyOus9tym82PD1VYtthLKMVMlVyRwtDI4ck4SR2+Ok=
github.com/open-telemetry/opentelemetry-collector-contrib/processor/probabilisticsamplerprocessor v0.120.1/go.mod h1:Z/S1brD5gU2Ntht/bHxBVnGxXKTvZDr0dNv/riUzPmY=
github.com/openai/openai-go v0.1.0-beta.10 h1:CknhGXe8aXQMRuqg255PFnWzgRY9nEryMxoNIBBM9tU=
github.com/openai/openai-go v0.1.0-beta.10/go.mod h1:g461MYGXEXBVdV5SaR/5tNzNbSfwTBBefwc+LlDCK0Y=
github.com/openai/openai-go v1.3.0 h1:lBpvgXxGHUufk9DNTguval40y2oK0GHZwgWQyUtjPIQ=
github.com/openai/openai-go v1.3.0/go.mod h1:g461MYGXEXBVdV5SaR/5tNzNbSfwTBBefwc+LlDCK0Y=
github.com/opencontainers/go-digest v1.0.0 h1:apOUWs51W5PlhuyGyz9FCeeBIOUDA/6nW8Oi/yOhh5U=
github.com/opencontainers/go-digest v1.0.0/go.mod h1:0JzlMkj0TRzQZfJkVvzbP0HBR3IKzErnv2BNG4W4MAM=
github.com/opencontainers/image-spec v1.1.1 h1:y0fUlFfIZhPF1W537XOLg0/fcx6zcHCJwooC2xJA040=
@@ -2495,8 +2495,8 @@ google.golang.org/appengine v1.6.6/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCID
google.golang.org/appengine v1.6.7/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc=
google.golang.org/appengine v1.6.8 h1:IhEN5q69dyKagZPYMSdIjS2HqprW324FRQZJcGqPAsM=
google.golang.org/appengine v1.6.8/go.mod h1:1jJ3jBArFh5pcgW8gCtRJnepW8FzD1V44FJffLiz/Ds=
google.golang.org/genai v0.7.0 h1:TINBYXnP+K+D8b16LfVyb6XR3kdtieXy6nJsGoEXcBc=
google.golang.org/genai v0.7.0/go.mod h1:TyfOKRz/QyCaj6f/ZDt505x+YreXnY40l2I6k8TvgqY=
google.golang.org/genai v1.10.0 h1:ETP0Yksn5KUSEn5+ihMOnP3IqjZ+7Z4i0LjJslEXatI=
google.golang.org/genai v1.10.0/go.mod h1:TyfOKRz/QyCaj6f/ZDt505x+YreXnY40l2I6k8TvgqY=
google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc=
google.golang.org/genproto v0.0.0-20190307195333-5fe7a883aa19/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE=
google.golang.org/genproto v0.0.0-20190418145605-e7d98fc518a7/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE=
+13
View File
@@ -20,6 +20,9 @@
# binary will be signed using ./sign_darwin.sh. Read that file for more details
# on the requirements.
#
# If the --sign-gpg parameter is specified, the output binary will be signed using ./sign_with_gpg.sh.
# Read that file for more details on the requirements.
#
# If the --agpl parameter is specified, builds only the AGPL-licensed code (no
# Coder enterprise features).
#
@@ -41,6 +44,7 @@ slim="${CODER_SLIM_BUILD:-0}"
agpl="${CODER_BUILD_AGPL:-0}"
sign_darwin="${CODER_SIGN_DARWIN:-0}"
sign_windows="${CODER_SIGN_WINDOWS:-0}"
sign_gpg="${CODER_SIGN_GPG:-0}"
boringcrypto=${CODER_BUILD_BORINGCRYPTO:-0}
dylib=0
windows_resources="${CODER_WINDOWS_RESOURCES:-0}"
@@ -85,6 +89,10 @@ while true; do
sign_windows=1
shift
;;
--sign-gpg)
sign_gpg=1
shift
;;
--boringcrypto)
boringcrypto=1
shift
@@ -319,4 +327,9 @@ if [[ "$sign_windows" == 1 ]] && [[ "$os" == "windows" ]]; then
execrelative ./sign_windows.sh "$output_path" 1>&2
fi
# Platform agnostic signing
if [[ "$sign_gpg" == 1 ]]; then
execrelative ./sign_with_gpg.sh "$output_path" 1>&2
fi
echo "$output_path"
+2 -19
View File
@@ -129,26 +129,9 @@ if [[ "$dry_run" == 0 ]] && [[ "${CODER_GPG_RELEASE_KEY_BASE64:-}" != "" ]]; the
log "--- Signing checksums file"
log
# Import the GPG key.
old_gnupg_home="${GNUPGHOME:-}"
gnupg_home_temp="$(mktemp -d)"
export GNUPGHOME="$gnupg_home_temp"
echo "$CODER_GPG_RELEASE_KEY_BASE64" | base64 -d | gpg --import 1>&2
# Sign the checksums file. This generates a file in the same directory and
# with the same name as the checksums file but ending in ".asc".
#
# We pipe `true` into `gpg` so that it never tries to be interactive (i.e.
# ask for a passphrase). The key we import above is not password protected.
true | gpg --detach-sign --armor "${temp_dir}/${checksum_file}" 1>&2
rm -rf "$gnupg_home_temp"
unset GNUPGHOME
if [[ "$old_gnupg_home" != "" ]]; then
export GNUPGHOME="$old_gnupg_home"
fi
execrelative ../sign_with_gpg.sh "${temp_dir}/${checksum_file}"
signed_checksum_path="${temp_dir}/${checksum_file}.asc"
if [[ ! -e "$signed_checksum_path" ]]; then
log "Signed checksum file not found: ${signed_checksum_path}"
log
+59
View File
@@ -0,0 +1,59 @@
#!/usr/bin/env bash
# This script signs a given binary using GPG.
# It expects the binary to be signed as the first argument.
#
# Usage: ./sign_with_gpg.sh path/to/binary
#
# On success, the input file will be signed using the GPG key and the signature output file will moved to /site/out/bin/ (happens in the Makefile)
#
# Depends on the GPG utility. Requires the following environment variables to be set:
# - $CODER_GPG_RELEASE_KEY_BASE64: The base64 encoded private key to use.
set -euo pipefail
# shellcheck source=scripts/lib.sh
source "$(dirname "${BASH_SOURCE[0]}")/lib.sh"
requiredenvs CODER_GPG_RELEASE_KEY_BASE64
FILE_TO_SIGN="$1"
if [[ -z "$FILE_TO_SIGN" ]]; then
error "Usage: $0 <file_to_sign>"
fi
if [[ ! -f "$FILE_TO_SIGN" ]]; then
error "File not found: $FILE_TO_SIGN"
fi
# Import the GPG key.
old_gnupg_home="${GNUPGHOME:-}"
gnupg_home_temp="$(mktemp -d)"
export GNUPGHOME="$gnupg_home_temp"
# Ensure GPG uses the temporary directory
echo "$CODER_GPG_RELEASE_KEY_BASE64" | base64 -d | gpg --homedir "$gnupg_home_temp" --import 1>&2
# Sign the binary. This generates a file in the same directory and
# with the same name as the binary but ending in ".asc".
#
# We pipe `true` into `gpg` so that it never tries to be interactive (i.e.
# ask for a passphrase). The key we import above is not password protected.
true | gpg --homedir "$gnupg_home_temp" --detach-sign --armor "$FILE_TO_SIGN" 1>&2
# Verify the signature and capture the exit status
gpg --homedir "$gnupg_home_temp" --verify "${FILE_TO_SIGN}.asc" "$FILE_TO_SIGN" 1>&2
verification_result=$?
# Clean up the temporary GPG home
rm -rf "$gnupg_home_temp"
unset GNUPGHOME
if [[ "$old_gnupg_home" != "" ]]; then
export GNUPGHOME="$old_gnupg_home"
fi
if [[ $verification_result -eq 0 ]]; then
echo "${FILE_TO_SIGN}.asc"
else
error "Signature verification failed!"
fi
+1 -3
View File
@@ -34,8 +34,6 @@
"update-emojis": "cp -rf ./node_modules/emoji-datasource-apple/img/apple/64/* ./static/emojis"
},
"dependencies": {
"@ai-sdk/provider-utils": "2.2.6",
"@ai-sdk/react": "1.2.6",
"@emoji-mart/data": "1.2.1",
"@emoji-mart/react": "1.1.1",
"@emotion/cache": "11.14.0",
@@ -104,13 +102,13 @@
"react-helmet-async": "2.0.5",
"react-markdown": "9.0.3",
"react-query": "npm:@tanstack/react-query@5.77.0",
"react-resizable-panels": "3.0.3",
"react-router-dom": "6.26.2",
"react-syntax-highlighter": "15.6.1",
"react-textarea-autosize": "8.5.9",
"react-virtualized-auto-sizer": "1.0.24",
"react-window": "1.8.11",
"recharts": "2.15.0",
"rehype-raw": "7.0.0",
"remark-gfm": "4.0.0",
"resize-observer-polyfill": "1.5.1",
"semver": "7.6.2",
+14 -216
View File
@@ -16,12 +16,6 @@ importers:
.:
dependencies:
'@ai-sdk/provider-utils':
specifier: 2.2.6
version: 2.2.6(zod@3.24.3)
'@ai-sdk/react':
specifier: 1.2.6
version: 1.2.6(react@18.3.1)(zod@3.24.3)
'@emoji-mart/data':
specifier: 1.2.1
version: 1.2.1
@@ -226,6 +220,9 @@ importers:
react-query:
specifier: npm:@tanstack/react-query@5.77.0
version: '@tanstack/react-query@5.77.0(react@18.3.1)'
react-resizable-panels:
specifier: 3.0.3
version: 3.0.3(react-dom@18.3.1(react@18.3.1))(react@18.3.1)
react-router-dom:
specifier: 6.26.2
version: 6.26.2(react-dom@18.3.1(react@18.3.1))(react@18.3.1)
@@ -244,9 +241,6 @@ importers:
recharts:
specifier: 2.15.0
version: 2.15.0(react-dom@18.3.1(react@18.3.1))(react@18.3.1)
rehype-raw:
specifier: 7.0.0
version: 7.0.0
remark-gfm:
specifier: 4.0.0
version: 4.0.0
@@ -492,42 +486,6 @@ packages:
'@adobe/css-tools@4.4.1':
resolution: {integrity: sha512-12WGKBQzjUAI4ayyF4IAtfw2QR/IDoqk6jTddXDhtYTJF9ASmoE1zst7cVtP0aL/F1jUJL5r+JxKXKEgHNbEUQ==, tarball: https://registry.npmjs.org/@adobe/css-tools/-/css-tools-4.4.1.tgz}
'@ai-sdk/provider-utils@2.2.4':
resolution: {integrity: sha512-13sEGBxB6kgaMPGOgCLYibF6r8iv8mgjhuToFrOTU09bBxbFQd8ZoARarCfJN6VomCUbUvMKwjTBLb1vQnN+WA==, tarball: https://registry.npmjs.org/@ai-sdk/provider-utils/-/provider-utils-2.2.4.tgz}
engines: {node: '>=18'}
peerDependencies:
zod: ^3.23.8
'@ai-sdk/provider-utils@2.2.6':
resolution: {integrity: sha512-sUlZ7Gnq84DCGWMQRIK8XVbkzIBnvPR1diV4v6JwPgpn5armnLI/j+rqn62MpLrU5ZCQZlDKl/Lw6ed3ulYqaA==, tarball: https://registry.npmjs.org/@ai-sdk/provider-utils/-/provider-utils-2.2.6.tgz}
engines: {node: '>=18'}
peerDependencies:
zod: ^3.23.8
'@ai-sdk/provider@1.1.0':
resolution: {integrity: sha512-0M+qjp+clUD0R1E5eWQFhxEvWLNaOtGQRUaBn8CUABnSKredagq92hUS9VjOzGsTm37xLfpaxl97AVtbeOsHew==, tarball: https://registry.npmjs.org/@ai-sdk/provider/-/provider-1.1.0.tgz}
engines: {node: '>=18'}
'@ai-sdk/provider@1.1.2':
resolution: {integrity: sha512-ITdgNilJZwLKR7X5TnUr1BsQW6UTX5yFp0h66Nfx8XjBYkWD9W3yugr50GOz3CnE9m/U/Cd5OyEbTMI0rgi6ZQ==, tarball: https://registry.npmjs.org/@ai-sdk/provider/-/provider-1.1.2.tgz}
engines: {node: '>=18'}
'@ai-sdk/react@1.2.6':
resolution: {integrity: sha512-5BFChNbcYtcY9MBStcDev7WZRHf0NpTrk8yfSoedWctB3jfWkFd1HECBvdc8w3mUQshF2MumLHtAhRO7IFtGGQ==, tarball: https://registry.npmjs.org/@ai-sdk/react/-/react-1.2.6.tgz}
engines: {node: '>=18'}
peerDependencies:
react: ^18 || ^19 || ^19.0.0-rc
zod: ^3.23.8
peerDependenciesMeta:
zod:
optional: true
'@ai-sdk/ui-utils@1.2.5':
resolution: {integrity: sha512-XDgqnJcaCkDez7qolvk+PDbs/ceJvgkNkxkOlc9uDWqxfDJxtvCZ+14MP/1qr4IBwGIgKVHzMDYDXvqVhSWLzg==, tarball: https://registry.npmjs.org/@ai-sdk/ui-utils/-/ui-utils-1.2.5.tgz}
engines: {node: '>=18'}
peerDependencies:
zod: ^3.23.8
'@alloc/quick-lru@5.2.0':
resolution: {integrity: sha512-UrcABB+4bUrFABwbluTIBErXwvbsU/V7TZWfmbgJfbkwiBuziS9gxdODUyuiecfdGQ85jglMW6juS3+z5TsKLw==, tarball: https://registry.npmjs.org/@alloc/quick-lru/-/quick-lru-5.2.0.tgz}
engines: {node: '>=10'}
@@ -4030,33 +3988,18 @@ packages:
resolution: {integrity: sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==, tarball: https://registry.npmjs.org/hasown/-/hasown-2.0.2.tgz}
engines: {node: '>= 0.4'}
hast-util-from-parse5@8.0.3:
resolution: {integrity: sha512-3kxEVkEKt0zvcZ3hCRYI8rqrgwtlIOFMWkbclACvjlDw8Li9S2hk/d51OI0nr/gIpdMHNepwgOKqZ/sy0Clpyg==, tarball: https://registry.npmjs.org/hast-util-from-parse5/-/hast-util-from-parse5-8.0.3.tgz}
hast-util-parse-selector@2.2.5:
resolution: {integrity: sha512-7j6mrk/qqkSehsM92wQjdIgWM2/BW61u/53G6xmC8i1OmEdKLHbk419QKQUjz6LglWsfqoiHmyMRkP1BGjecNQ==, tarball: https://registry.npmjs.org/hast-util-parse-selector/-/hast-util-parse-selector-2.2.5.tgz}
hast-util-parse-selector@4.0.0:
resolution: {integrity: sha512-wkQCkSYoOGCRKERFWcxMVMOcYE2K1AaNLU8DXS9arxnLOUEWbOXKXiJUNzEpqZ3JOKpnha3jkFrumEjVliDe7A==, tarball: https://registry.npmjs.org/hast-util-parse-selector/-/hast-util-parse-selector-4.0.0.tgz}
hast-util-raw@9.1.0:
resolution: {integrity: sha512-Y8/SBAHkZGoNkpzqqfCldijcuUKh7/su31kEBp67cFY09Wy0mTRgtsLYsiIxMJxlu0f6AA5SUTbDR8K0rxnbUw==, tarball: https://registry.npmjs.org/hast-util-raw/-/hast-util-raw-9.1.0.tgz}
hast-util-to-jsx-runtime@2.3.2:
resolution: {integrity: sha512-1ngXYb+V9UT5h+PxNRa1O1FYguZK/XL+gkeqvp7EdHlB9oHUG0eYRo/vY5inBdcqo3RkPMC58/H94HvkbfGdyg==, tarball: https://registry.npmjs.org/hast-util-to-jsx-runtime/-/hast-util-to-jsx-runtime-2.3.2.tgz}
hast-util-to-parse5@8.0.0:
resolution: {integrity: sha512-3KKrV5ZVI8if87DVSi1vDeByYrkGzg4mEfeu4alwgmmIeARiBLKCZS2uw5Gb6nU9x9Yufyj3iudm6i7nl52PFw==, tarball: https://registry.npmjs.org/hast-util-to-parse5/-/hast-util-to-parse5-8.0.0.tgz}
hast-util-whitespace@3.0.0:
resolution: {integrity: sha512-88JUN06ipLwsnv+dVn+OIYOvAuvBMy/Qoi6O7mQHxdPXpjy+Cd6xRkWwux7DKO+4sYILtLBRIKgsdpS2gQc7qw==, tarball: https://registry.npmjs.org/hast-util-whitespace/-/hast-util-whitespace-3.0.0.tgz}
hastscript@6.0.0:
resolution: {integrity: sha512-nDM6bvd7lIqDUiYEiu5Sl/+6ReP0BMk/2f4U/Rooccxkj0P5nm+acM5PrGJ/t5I8qPGiqZSE6hVAwZEdZIvP4w==, tarball: https://registry.npmjs.org/hastscript/-/hastscript-6.0.0.tgz}
hastscript@9.0.1:
resolution: {integrity: sha512-g7df9rMFX/SPi34tyGCyUBREQoKkapwdY/T04Qn9TDWfHhAYt4/I0gMVirzK5wEzeUqIjEB+LXC/ypb7Aqno5w==, tarball: https://registry.npmjs.org/hastscript/-/hastscript-9.0.1.tgz}
headers-polyfill@4.0.3:
resolution: {integrity: sha512-IScLbePpkvO846sIwOtOTDjutRMWdXdJmXdMvk6gCBHxFO8d+QKOQedyZSxFTTFYRSmlgSTDtXqqq4pcenBXLQ==, tarball: https://registry.npmjs.org/headers-polyfill/-/headers-polyfill-4.0.3.tgz}
@@ -4079,9 +4022,6 @@ packages:
html-url-attributes@3.0.1:
resolution: {integrity: sha512-ol6UPyBWqsrO6EJySPz2O7ZSr856WDrEzM5zMqp+FJJLGMW35cLYmmZnl0vztAZxRUoNZJFTCohfjuIJ8I4QBQ==, tarball: https://registry.npmjs.org/html-url-attributes/-/html-url-attributes-3.0.1.tgz}
html-void-elements@3.0.0:
resolution: {integrity: sha512-bEqo66MRXsUGxWHV5IP0PUiAWwoEjba4VCzg0LjFJBpchPaTfyfCKTG6bc5F8ucKec3q5y6qOdGyYTSBEvhCrg==, tarball: https://registry.npmjs.org/html-void-elements/-/html-void-elements-3.0.0.tgz}
http-errors@2.0.0:
resolution: {integrity: sha512-FtwrG/euBzaEjYeRqOgly7G0qviiXoJWnvEH2Z1plBdXgbyjv34pHTSb9zoeHMyDy33+DWy5Wt9Wo+TURtOYSQ==, tarball: https://registry.npmjs.org/http-errors/-/http-errors-2.0.0.tgz}
engines: {node: '>= 0.8'}
@@ -4585,9 +4525,6 @@ packages:
json-schema-traverse@0.4.1:
resolution: {integrity: sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==, tarball: https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz}
json-schema@0.4.0:
resolution: {integrity: sha512-es94M3nTIfsEPisRafak+HDLfHXnKBhV3vU5eqPcS3flIWqcxJWgXHXiey3YrpaNsanY5ei1VoYEbOzijuq9BA==, tarball: https://registry.npmjs.org/json-schema/-/json-schema-0.4.0.tgz}
json-stable-stringify-without-jsonify@1.0.1:
resolution: {integrity: sha512-Bdboy+l7tA3OGW6FjyFHWkP5LuByj1Tk33Ljyq0axyzdk9//JSi2u3fP1QSmd1KNwq6VOKYGlAu87CisVir6Pw==, tarball: https://registry.npmjs.org/json-stable-stringify-without-jsonify/-/json-stable-stringify-without-jsonify-1.0.1.tgz}
@@ -5348,9 +5285,6 @@ packages:
property-information@6.5.0:
resolution: {integrity: sha512-PgTgs/BlvHxOu8QuEN7wi5A0OmXaBcHpmCSTehcs6Uuu9IkDIEo13Hy7n898RHfrQ49vKCoGeWZSaAK01nwVig==, tarball: https://registry.npmjs.org/property-information/-/property-information-6.5.0.tgz}
property-information@7.0.0:
resolution: {integrity: sha512-7D/qOz/+Y4X/rzSB6jKxKUsQnphO046ei8qxG59mtM3RG3DHgTK81HrxrmoDVINJb8NKT5ZsRbwHvQ6B68Iyhg==, tarball: https://registry.npmjs.org/property-information/-/property-information-7.0.0.tgz}
protobufjs@7.4.0:
resolution: {integrity: sha512-mRUWCc3KUU4w1jU8sGxICXH/gNS94DvI1gxqDvBzhj1JpcsimQkYiOJfwsPUykUI5ZaspFbSgmBLER8IrQ3tqw==, tarball: https://registry.npmjs.org/protobufjs/-/protobufjs-7.4.0.tgz}
engines: {node: '>=12.0.0'}
@@ -5497,6 +5431,12 @@ packages:
'@types/react':
optional: true
react-resizable-panels@3.0.3:
resolution: {integrity: sha512-7HA8THVBHTzhDK4ON0tvlGXyMAJN1zBeRpuyyremSikgYh2ku6ltD7tsGQOcXx4NKPrZtYCm/5CBr+dkruTGQw==, tarball: https://registry.npmjs.org/react-resizable-panels/-/react-resizable-panels-3.0.3.tgz}
peerDependencies:
react: ^16.14.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 || ^19.0.0-rc
react-dom: ^16.14.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 || ^19.0.0-rc
react-router-dom@6.26.2:
resolution: {integrity: sha512-z7YkaEW0Dy35T3/QKPYB1LjMK2R1fxnHO8kWpUMTBdfVzZrWOiY9a7CtN8HqdWtDUWd5FY6Dl8HFsqVwH4uOtQ==, tarball: https://registry.npmjs.org/react-router-dom/-/react-router-dom-6.26.2.tgz}
engines: {node: '>=14.0.0'}
@@ -5611,9 +5551,6 @@ packages:
resolution: {integrity: sha512-sy6TXMN+hnP/wMy+ISxg3krXx7BAtWVO4UouuCN/ziM9UEne0euamVNafDfvC83bRNr95y0V5iijeDQFUNpvrg==, tarball: https://registry.npmjs.org/regexp.prototype.flags/-/regexp.prototype.flags-1.5.1.tgz}
engines: {node: '>= 0.4'}
rehype-raw@7.0.0:
resolution: {integrity: sha512-/aE8hCfKlQeA8LmyeyQvQF3eBiLRGNlfBJEvWH7ivp9sBqs7TNqBL5X3v157rM4IFETqDnIOO+z5M/biZbo9Ww==, tarball: https://registry.npmjs.org/rehype-raw/-/rehype-raw-7.0.0.tgz}
remark-gfm@4.0.0:
resolution: {integrity: sha512-U92vJgBPkbw4Zfu/IiW2oTZLSL3Zpv+uI7My2eq8JxKgqraFdU8YUGicEJCEgSbeaG+QDFqIcwwfMTOEelPxuA==, tarball: https://registry.npmjs.org/remark-gfm/-/remark-gfm-4.0.0.tgz}
@@ -5718,9 +5655,6 @@ packages:
scheduler@0.23.2:
resolution: {integrity: sha512-UOShsPwz7NrMUqhR6t0hWjFduvOzbtv7toDH1/hIrfRNIDBnnBWd0CwJTGvTpngVlmwGCdP9/Zl/tVrDqcuYzQ==, tarball: https://registry.npmjs.org/scheduler/-/scheduler-0.23.2.tgz}
secure-json-parse@2.7.0:
resolution: {integrity: sha512-6aU+Rwsezw7VR8/nyvKTx8QpWH9FrcYiXXlqC4z5d5XQBDRqtbfsRjnwGyqbi3gddNtWHuEk9OANUotL26qKUw==, tarball: https://registry.npmjs.org/secure-json-parse/-/secure-json-parse-2.7.0.tgz}
semver@7.6.2:
resolution: {integrity: sha512-FNAIBWCx9qcRhoHcgcJ0gvU7SN1lYU2ZXuSfl04bSC5OpvDHFyJCjdNHomPXxjQlCBU67YW64PzY7/VIEH7F2w==, tarball: https://registry.npmjs.org/semver/-/semver-7.6.2.tgz}
engines: {node: '>=10'}
@@ -5958,11 +5892,6 @@ packages:
resolution: {integrity: sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==, tarball: https://registry.npmjs.org/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz}
engines: {node: '>= 0.4'}
swr@2.3.3:
resolution: {integrity: sha512-dshNvs3ExOqtZ6kJBaAsabhPdHyeY4P2cKwRCniDVifBMoG/SVI7tfLWqPXriVspf2Rg4tPzXJTnwaihIeFw2A==, tarball: https://registry.npmjs.org/swr/-/swr-2.3.3.tgz}
peerDependencies:
react: ^16.11.0 || ^17.0.0 || ^18.0.0 || ^19.0.0
symbol-tree@3.2.4:
resolution: {integrity: sha512-9QNk5KwDF+Bvz+PyObkmSYjI5ksVUYtjW7AU22r2NKcfLJcXp96hkDWU3+XndOsUb+AQ9QhfzfCT2O+CNWT5Tw==, tarball: https://registry.npmjs.org/symbol-tree/-/symbol-tree-3.2.4.tgz}
@@ -6000,10 +5929,6 @@ packages:
thenify@3.3.1:
resolution: {integrity: sha512-RVZSIV5IG10Hk3enotrhvz0T9em6cyHBLkH/YAZuKqd8hRkKhSfCGIcP2KUY0EPxndzANBmNllzWPwak+bheSw==, tarball: https://registry.npmjs.org/thenify/-/thenify-3.3.1.tgz}
throttleit@2.1.0:
resolution: {integrity: sha512-nt6AMGKW1p/70DF/hGBdJB57B8Tspmbp5gfJ8ilhLnt7kkr2ye7hzD6NVG8GGErk2HWF34igrL2CXmNIkzKqKw==, tarball: https://registry.npmjs.org/throttleit/-/throttleit-2.1.0.tgz}
engines: {node: '>=18'}
tiny-case@1.0.3:
resolution: {integrity: sha512-Eet/eeMhkO6TX8mnUteS9zgPbUMQa4I6Kkp5ORiBD5476/m+PIRiumP5tmh5ioJpH7k51Kehawy2UDfsnxxY8Q==, tarball: https://registry.npmjs.org/tiny-case/-/tiny-case-1.0.3.tgz}
@@ -6309,9 +6234,6 @@ packages:
resolution: {integrity: sha512-BNGbWLfd0eUPabhkXUVm0j8uuvREyTh5ovRa/dyow/BqAbZJyC+5fU+IzQOzmAKzYqYRAISoRhdQr3eIZ/PXqg==, tarball: https://registry.npmjs.org/vary/-/vary-1.1.2.tgz}
engines: {node: '>= 0.8'}
vfile-location@5.0.3:
resolution: {integrity: sha512-5yXvWDEgqeiYiBe1lbxYF7UMAIm/IcopxMHrMQDq3nvKcjPKIhZklUKL+AE7J7uApI4kwe2snsK+eI6UTj9EHg==, tarball: https://registry.npmjs.org/vfile-location/-/vfile-location-5.0.3.tgz}
vfile-message@4.0.2:
resolution: {integrity: sha512-jRDZ1IMLttGj41KcZvlrYAaI3CfqpLpfpf+Mfig13viT6NKvRzWZ+lXz0Y5D60w6uJIBAOGq9mSHf0gktF0duw==, tarball: https://registry.npmjs.org/vfile-message/-/vfile-message-4.0.2.tgz}
@@ -6411,9 +6333,6 @@ packages:
wcwidth@1.0.1:
resolution: {integrity: sha512-XHPEwS0q6TaxcvG85+8EYkbiCux2XtWG2mkc47Ng2A77BQu9+DqIOJldST4HgPkuea7dvKSj5VgX3P1d4rW8Tg==, tarball: https://registry.npmjs.org/wcwidth/-/wcwidth-1.0.1.tgz}
web-namespaces@2.0.1:
resolution: {integrity: sha512-bKr1DkiNa2krS7qxNtdrtHAmzuYGFQLiQ13TsorsdT6ULTkPLKuu5+GsFpDlg6JFjUTwX2DyhMPG2be8uPrqsQ==, tarball: https://registry.npmjs.org/web-namespaces/-/web-namespaces-2.0.1.tgz}
webidl-conversions@7.0.0:
resolution: {integrity: sha512-VwddBukDzu71offAQR975unBIGqfKZpM+8ZX6ySk8nYhVoo5CYaZyzt3YBvYtRtO+aoGlqxPg/B87NGVZ/fu6g==, tarball: https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-7.0.0.tgz}
engines: {node: '>=12'}
@@ -6545,11 +6464,6 @@ packages:
yup@1.6.1:
resolution: {integrity: sha512-JED8pB50qbA4FOkDol0bYF/p60qSEDQqBD0/qeIrUCG1KbPBIQ776fCUNb9ldbPcSTxA69g/47XTo4TqWiuXOA==, tarball: https://registry.npmjs.org/yup/-/yup-1.6.1.tgz}
zod-to-json-schema@3.24.5:
resolution: {integrity: sha512-/AuWwMP+YqiPbsJx5D6TfgRTc4kTLjsh5SOcd4bLsfUg2RcEXrFMJl1DGgdHy2aCfsIA/cr/1JM0xcB2GZji8g==, tarball: https://registry.npmjs.org/zod-to-json-schema/-/zod-to-json-schema-3.24.5.tgz}
peerDependencies:
zod: ^3.24.1
zod-validation-error@3.4.0:
resolution: {integrity: sha512-ZOPR9SVY6Pb2qqO5XHt+MkkTRxGXb4EVtnjc9JpXUOtUB1T9Ru7mZOT361AN3MsetVe7R0a1KZshJDZdgp9miQ==, tarball: https://registry.npmjs.org/zod-validation-error/-/zod-validation-error-3.4.0.tgz}
engines: {node: '>=18.0.0'}
@@ -6569,45 +6483,6 @@ snapshots:
'@adobe/css-tools@4.4.1': {}
'@ai-sdk/provider-utils@2.2.4(zod@3.24.3)':
dependencies:
'@ai-sdk/provider': 1.1.0
nanoid: 3.3.8
secure-json-parse: 2.7.0
zod: 3.24.3
'@ai-sdk/provider-utils@2.2.6(zod@3.24.3)':
dependencies:
'@ai-sdk/provider': 1.1.2
nanoid: 3.3.8
secure-json-parse: 2.7.0
zod: 3.24.3
'@ai-sdk/provider@1.1.0':
dependencies:
json-schema: 0.4.0
'@ai-sdk/provider@1.1.2':
dependencies:
json-schema: 0.4.0
'@ai-sdk/react@1.2.6(react@18.3.1)(zod@3.24.3)':
dependencies:
'@ai-sdk/provider-utils': 2.2.4(zod@3.24.3)
'@ai-sdk/ui-utils': 1.2.5(zod@3.24.3)
react: 18.3.1
swr: 2.3.3(react@18.3.1)
throttleit: 2.1.0
optionalDependencies:
zod: 3.24.3
'@ai-sdk/ui-utils@1.2.5(zod@3.24.3)':
dependencies:
'@ai-sdk/provider': 1.1.0
'@ai-sdk/provider-utils': 2.2.4(zod@3.24.3)
zod: 3.24.3
zod-to-json-schema: 3.24.5(zod@3.24.3)
'@alloc/quick-lru@5.2.0': {}
'@ampproject/remapping@2.3.0':
@@ -10430,39 +10305,8 @@ snapshots:
dependencies:
function-bind: 1.1.2
hast-util-from-parse5@8.0.3:
dependencies:
'@types/hast': 3.0.4
'@types/unist': 3.0.3
devlop: 1.1.0
hastscript: 9.0.1
property-information: 7.0.0
vfile: 6.0.3
vfile-location: 5.0.3
web-namespaces: 2.0.1
hast-util-parse-selector@2.2.5: {}
hast-util-parse-selector@4.0.0:
dependencies:
'@types/hast': 3.0.4
hast-util-raw@9.1.0:
dependencies:
'@types/hast': 3.0.4
'@types/unist': 3.0.3
'@ungap/structured-clone': 1.3.0
hast-util-from-parse5: 8.0.3
hast-util-to-parse5: 8.0.0
html-void-elements: 3.0.0
mdast-util-to-hast: 13.2.0
parse5: 7.1.2
unist-util-position: 5.0.0
unist-util-visit: 5.0.0
vfile: 6.0.3
web-namespaces: 2.0.1
zwitch: 2.0.4
hast-util-to-jsx-runtime@2.3.2:
dependencies:
'@types/estree': 1.0.6
@@ -10483,16 +10327,6 @@ snapshots:
transitivePeerDependencies:
- supports-color
hast-util-to-parse5@8.0.0:
dependencies:
'@types/hast': 3.0.4
comma-separated-tokens: 2.0.3
devlop: 1.1.0
property-information: 6.5.0
space-separated-tokens: 2.0.2
web-namespaces: 2.0.1
zwitch: 2.0.4
hast-util-whitespace@3.0.0:
dependencies:
'@types/hast': 3.0.4
@@ -10505,14 +10339,6 @@ snapshots:
property-information: 5.6.0
space-separated-tokens: 1.1.5
hastscript@9.0.1:
dependencies:
'@types/hast': 3.0.4
comma-separated-tokens: 2.0.3
hast-util-parse-selector: 4.0.0
property-information: 7.0.0
space-separated-tokens: 2.0.2
headers-polyfill@4.0.3: {}
highlight.js@10.7.3: {}
@@ -10531,8 +10357,6 @@ snapshots:
html-url-attributes@3.0.1: {}
html-void-elements@3.0.0: {}
http-errors@2.0.0:
dependencies:
depd: 2.0.0
@@ -11260,8 +11084,6 @@ snapshots:
json-schema-traverse@0.4.1:
optional: true
json-schema@0.4.0: {}
json-stable-stringify-without-jsonify@1.0.1:
optional: true
@@ -12295,8 +12117,6 @@ snapshots:
property-information@6.5.0: {}
property-information@7.0.0: {}
protobufjs@7.4.0:
dependencies:
'@protobufjs/aspromise': 1.1.2
@@ -12473,6 +12293,11 @@ snapshots:
optionalDependencies:
'@types/react': 18.3.12
react-resizable-panels@3.0.3(react-dom@18.3.1(react@18.3.1))(react@18.3.1):
dependencies:
react: 18.3.1
react-dom: 18.3.1(react@18.3.1)
react-router-dom@6.26.2(react-dom@18.3.1(react@18.3.1))(react@18.3.1):
dependencies:
'@remix-run/router': 1.19.2
@@ -12620,12 +12445,6 @@ snapshots:
define-properties: 1.2.1
set-function-name: 2.0.1
rehype-raw@7.0.0:
dependencies:
'@types/hast': 3.0.4
hast-util-raw: 9.1.0
vfile: 6.0.3
remark-gfm@4.0.0:
dependencies:
'@types/mdast': 4.0.3
@@ -12763,8 +12582,6 @@ snapshots:
dependencies:
loose-envify: 1.4.0
secure-json-parse@2.7.0: {}
semver@7.6.2: {}
send@0.19.0:
@@ -13014,12 +12831,6 @@ snapshots:
supports-preserve-symlinks-flag@1.0.0: {}
swr@2.3.3(react@18.3.1):
dependencies:
dequal: 2.0.3
react: 18.3.1
use-sync-external-store: 1.4.0(react@18.3.1)
symbol-tree@3.2.4: {}
tailwind-merge@2.6.0: {}
@@ -13078,8 +12889,6 @@ snapshots:
dependencies:
any-promise: 1.3.0
throttleit@2.1.0: {}
tiny-case@1.0.3: {}
tiny-invariant@1.3.3: {}
@@ -13376,11 +13185,6 @@ snapshots:
vary@1.1.2: {}
vfile-location@5.0.3:
dependencies:
'@types/unist': 3.0.3
vfile: 6.0.3
vfile-message@4.0.2:
dependencies:
'@types/unist': 3.0.3
@@ -13456,8 +13260,6 @@ snapshots:
dependencies:
defaults: 1.0.4
web-namespaces@2.0.1: {}
webidl-conversions@7.0.0: {}
webpack-sources@3.2.3: {}
@@ -13572,10 +13374,6 @@ snapshots:
toposort: 2.0.2
type-fest: 2.19.0
zod-to-json-schema@3.24.5(zod@3.24.3):
dependencies:
zod: 3.24.3
zod-validation-error@3.4.0(zod@3.24.3):
dependencies:
zod: 3.24.3
+1 -25
View File
@@ -818,13 +818,6 @@ class ApiMethods {
return response.data;
};
getDeploymentLLMs = async (): Promise<TypesGen.LanguageModelConfig> => {
const response = await this.axios.get<TypesGen.LanguageModelConfig>(
"/api/v2/deployment/llms",
);
return response.data;
};
getOrganizationIdpSyncClaimFieldValues = async (
organization: string,
field: string,
@@ -1244,7 +1237,7 @@ class ApiMethods {
getTemplateVersionPresets = async (
templateVersionId: string,
): Promise<TypesGen.Preset[]> => {
): Promise<TypesGen.Preset[] | null> => {
const response = await this.axios.get<TypesGen.Preset[]>(
`/api/v2/templateversions/${templateVersionId}/presets`,
);
@@ -2584,23 +2577,6 @@ class ApiMethods {
markAllInboxNotificationsAsRead = async () => {
await this.axios.put<void>("/api/v2/notifications/inbox/mark-all-as-read");
};
createChat = async () => {
const res = await this.axios.post<TypesGen.Chat>("/api/v2/chats");
return res.data;
};
getChats = async () => {
const res = await this.axios.get<TypesGen.Chat[]>("/api/v2/chats");
return res.data;
};
getChatMessages = async (chatId: string) => {
const res = await this.axios.get<TypesGen.ChatMessage[]>(
`/api/v2/chats/${chatId}/messages`,
);
return res.data;
};
}
// Experimental API methods call endpoints under the /api/experimental/ prefix.
-25
View File
@@ -1,25 +0,0 @@
import { API } from "api/api";
import type { QueryClient } from "react-query";
export const createChat = (queryClient: QueryClient) => {
return {
mutationFn: API.createChat,
onSuccess: async () => {
await queryClient.invalidateQueries({ queryKey: ["chats"] });
},
};
};
export const getChats = () => {
return {
queryKey: ["chats"],
queryFn: API.getChats,
};
};
export const getChatMessages = (chatID: string) => {
return {
queryKey: ["chatMessages", chatID],
queryFn: () => API.getChatMessages(chatID),
};
};
-7
View File
@@ -39,10 +39,3 @@ export const deploymentIdpSyncFieldValues = (field: string) => {
queryFn: () => API.getDeploymentIdpSyncFieldValues(field),
};
};
export const deploymentLanguageModels = () => {
return {
queryKey: ["deployment", "llms"],
queryFn: API.getDeploymentLLMs,
};
};

Some files were not shown because too many files have changed in this diff Show More