Compare commits
11 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| 6125358569 | |||
| 803e2c7679 | |||
| bd6f4266a7 | |||
| 075269a94e | |||
| a5a7326415 | |||
| d15c4702b0 | |||
| 765d99caa3 | |||
| c5e87690be | |||
| 4097ec3a6d | |||
| d8651d7682 | |||
| 41359ce2fd |
@@ -32,80 +32,8 @@ env:
|
||||
CODER_RELEASE_NOTES: ${{ inputs.release_notes }}
|
||||
|
||||
jobs:
|
||||
# build-dylib is a separate job to build the dylib on macOS.
|
||||
build-dylib:
|
||||
runs-on: ${{ github.repository_owner == 'coder' && 'depot-macos-latest' || 'macos-latest' }}
|
||||
steps:
|
||||
- name: Harden Runner
|
||||
uses: step-security/harden-runner@0080882f6c36860b6ba35c610c98ce87d4e2f26f # v2.10.2
|
||||
with:
|
||||
egress-policy: audit
|
||||
|
||||
- name: Checkout
|
||||
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4.2.1
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Setup build tools
|
||||
run: |
|
||||
brew install bash gnu-getopt make
|
||||
echo "$(brew --prefix bash)/bin" >> $GITHUB_PATH
|
||||
echo "$(brew --prefix gnu-getopt)/bin" >> $GITHUB_PATH
|
||||
echo "$(brew --prefix make)/libexec/gnubin" >> $GITHUB_PATH
|
||||
|
||||
- name: Setup Go
|
||||
uses: ./.github/actions/setup-go
|
||||
|
||||
- name: Install rcodesign
|
||||
run: |
|
||||
set -euo pipefail
|
||||
wget -O /tmp/rcodesign.tar.gz https://github.com/indygreg/apple-platform-rs/releases/download/apple-codesign%2F0.22.0/apple-codesign-0.22.0-macos-universal.tar.gz
|
||||
sudo tar -xzf /tmp/rcodesign.tar.gz \
|
||||
-C /usr/local/bin \
|
||||
--strip-components=1 \
|
||||
apple-codesign-0.22.0-macos-universal/rcodesign
|
||||
rm /tmp/rcodesign.tar.gz
|
||||
|
||||
- name: Setup Apple Developer certificate and API key
|
||||
run: |
|
||||
set -euo pipefail
|
||||
touch /tmp/{apple_cert.p12,apple_cert_password.txt,apple_apikey.p8}
|
||||
chmod 600 /tmp/{apple_cert.p12,apple_cert_password.txt,apple_apikey.p8}
|
||||
echo "$AC_CERTIFICATE_P12_BASE64" | base64 -d > /tmp/apple_cert.p12
|
||||
echo "$AC_CERTIFICATE_PASSWORD" > /tmp/apple_cert_password.txt
|
||||
echo "$AC_APIKEY_P8_BASE64" | base64 -d > /tmp/apple_apikey.p8
|
||||
env:
|
||||
AC_CERTIFICATE_P12_BASE64: ${{ secrets.AC_CERTIFICATE_P12_BASE64 }}
|
||||
AC_CERTIFICATE_PASSWORD: ${{ secrets.AC_CERTIFICATE_PASSWORD }}
|
||||
AC_APIKEY_P8_BASE64: ${{ secrets.AC_APIKEY_P8_BASE64 }}
|
||||
|
||||
- name: Build dylibs
|
||||
run: |
|
||||
set -euxo pipefail
|
||||
go mod download
|
||||
|
||||
make gen/mark-fresh
|
||||
make build/coder-dylib
|
||||
env:
|
||||
CODER_SIGN_DARWIN: 1
|
||||
AC_CERTIFICATE_FILE: /tmp/apple_cert.p12
|
||||
AC_CERTIFICATE_PASSWORD_FILE: /tmp/apple_cert_password.txt
|
||||
|
||||
- name: Upload build artifacts
|
||||
uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882 # v4.4.3
|
||||
with:
|
||||
name: dylibs
|
||||
path: |
|
||||
./build/*.h
|
||||
./build/*.dylib
|
||||
retention-days: 7
|
||||
|
||||
- name: Delete Apple Developer certificate and API key
|
||||
run: rm -f /tmp/{apple_cert.p12,apple_cert_password.txt,apple_apikey.p8}
|
||||
|
||||
release:
|
||||
name: Build and publish
|
||||
needs: build-dylib
|
||||
runs-on: ${{ github.repository_owner == 'coder' && 'depot-ubuntu-22.04-8' || 'ubuntu-latest' }}
|
||||
permissions:
|
||||
# Required to publish a release
|
||||
@@ -217,18 +145,6 @@ jobs:
|
||||
- name: Install nsis and zstd
|
||||
run: sudo apt-get install -y nsis zstd
|
||||
|
||||
- name: Download dylibs
|
||||
uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8
|
||||
with:
|
||||
name: dylibs
|
||||
path: ./build
|
||||
|
||||
- name: Insert dylibs
|
||||
run: |
|
||||
mv ./build/*amd64.dylib ./site/out/bin/coder-vpn-darwin-amd64.dylib
|
||||
mv ./build/*arm64.dylib ./site/out/bin/coder-vpn-darwin-arm64.dylib
|
||||
mv ./build/*arm64.h ./site/out/bin/coder-vpn-darwin-dylib.h
|
||||
|
||||
- name: Install nfpm
|
||||
run: |
|
||||
set -euo pipefail
|
||||
@@ -355,7 +271,6 @@ jobs:
|
||||
${{ steps.image-base-tag.outputs.tag }}
|
||||
|
||||
- name: Verify that images are pushed properly
|
||||
if: steps.image-base-tag.outputs.tag != ''
|
||||
run: |
|
||||
# retry 10 times with a 5 second delay as the images may not be
|
||||
# available immediately
|
||||
@@ -388,6 +303,10 @@ jobs:
|
||||
run: |
|
||||
set -euxo pipefail
|
||||
|
||||
# build Docker images for each architecture
|
||||
version="$(./scripts/version.sh)"
|
||||
make build/coder_"$version"_linux_{amd64,arm64,armv7}.tag
|
||||
|
||||
# we can't build multi-arch if the images aren't pushed, so quit now
|
||||
# if dry-running
|
||||
if [[ "$CODER_RELEASE" != *t* ]]; then
|
||||
@@ -395,10 +314,6 @@ jobs:
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# build Docker images for each architecture
|
||||
version="$(./scripts/version.sh)"
|
||||
make build/coder_"$version"_linux_{amd64,arm64,armv7}.tag
|
||||
|
||||
# build and push multi-arch manifest, this depends on the other images
|
||||
# being pushed so will automatically push them.
|
||||
make push/build/coder_"$version"_linux.tag
|
||||
|
||||
+132
-17
@@ -300,9 +300,10 @@ func (m selectModel) filteredOptions() []string {
|
||||
}
|
||||
|
||||
type MultiSelectOptions struct {
|
||||
Message string
|
||||
Options []string
|
||||
Defaults []string
|
||||
Message string
|
||||
Options []string
|
||||
Defaults []string
|
||||
EnableCustomInput bool
|
||||
}
|
||||
|
||||
func MultiSelect(inv *serpent.Invocation, opts MultiSelectOptions) ([]string, error) {
|
||||
@@ -328,9 +329,10 @@ func MultiSelect(inv *serpent.Invocation, opts MultiSelectOptions) ([]string, er
|
||||
}
|
||||
|
||||
initialModel := multiSelectModel{
|
||||
search: textinput.New(),
|
||||
options: options,
|
||||
message: opts.Message,
|
||||
search: textinput.New(),
|
||||
options: options,
|
||||
message: opts.Message,
|
||||
enableCustomInput: opts.EnableCustomInput,
|
||||
}
|
||||
|
||||
initialModel.search.Prompt = ""
|
||||
@@ -370,12 +372,15 @@ type multiSelectOption struct {
|
||||
}
|
||||
|
||||
type multiSelectModel struct {
|
||||
search textinput.Model
|
||||
options []*multiSelectOption
|
||||
cursor int
|
||||
message string
|
||||
canceled bool
|
||||
selected bool
|
||||
search textinput.Model
|
||||
options []*multiSelectOption
|
||||
cursor int
|
||||
message string
|
||||
canceled bool
|
||||
selected bool
|
||||
isCustomInputMode bool // track if we're adding a custom option
|
||||
customInput string // store custom input
|
||||
enableCustomInput bool // control whether custom input is allowed
|
||||
}
|
||||
|
||||
func (multiSelectModel) Init() tea.Cmd {
|
||||
@@ -386,6 +391,10 @@ func (multiSelectModel) Init() tea.Cmd {
|
||||
func (m multiSelectModel) Update(msg tea.Msg) (tea.Model, tea.Cmd) {
|
||||
var cmd tea.Cmd
|
||||
|
||||
if m.isCustomInputMode {
|
||||
return m.handleCustomInputMode(msg)
|
||||
}
|
||||
|
||||
switch msg := msg.(type) {
|
||||
case terminateMsg:
|
||||
m.canceled = true
|
||||
@@ -398,6 +407,11 @@ func (m multiSelectModel) Update(msg tea.Msg) (tea.Model, tea.Cmd) {
|
||||
return m, tea.Quit
|
||||
|
||||
case tea.KeyEnter:
|
||||
// Switch to custom input mode if we're on the "+ Add custom value:" option
|
||||
if m.enableCustomInput && m.cursor == len(m.filteredOptions()) {
|
||||
m.isCustomInputMode = true
|
||||
return m, nil
|
||||
}
|
||||
if len(m.options) != 0 {
|
||||
m.selected = true
|
||||
return m, tea.Quit
|
||||
@@ -413,16 +427,16 @@ func (m multiSelectModel) Update(msg tea.Msg) (tea.Model, tea.Cmd) {
|
||||
return m, nil
|
||||
|
||||
case tea.KeyUp:
|
||||
options := m.filteredOptions()
|
||||
maxIndex := m.getMaxIndex()
|
||||
if m.cursor > 0 {
|
||||
m.cursor--
|
||||
} else {
|
||||
m.cursor = len(options) - 1
|
||||
m.cursor = maxIndex
|
||||
}
|
||||
|
||||
case tea.KeyDown:
|
||||
options := m.filteredOptions()
|
||||
if m.cursor < len(options)-1 {
|
||||
maxIndex := m.getMaxIndex()
|
||||
if m.cursor < maxIndex {
|
||||
m.cursor++
|
||||
} else {
|
||||
m.cursor = 0
|
||||
@@ -457,6 +471,91 @@ func (m multiSelectModel) Update(msg tea.Msg) (tea.Model, tea.Cmd) {
|
||||
return m, cmd
|
||||
}
|
||||
|
||||
func (m multiSelectModel) getMaxIndex() int {
|
||||
options := m.filteredOptions()
|
||||
if m.enableCustomInput {
|
||||
// Include the "+ Add custom value" entry
|
||||
return len(options)
|
||||
}
|
||||
// Includes only the actual options
|
||||
return len(options) - 1
|
||||
}
|
||||
|
||||
// handleCustomInputMode manages keyboard interactions when in custom input mode
|
||||
func (m *multiSelectModel) handleCustomInputMode(msg tea.Msg) (tea.Model, tea.Cmd) {
|
||||
keyMsg, ok := msg.(tea.KeyMsg)
|
||||
if !ok {
|
||||
return m, nil
|
||||
}
|
||||
|
||||
switch keyMsg.Type {
|
||||
case tea.KeyEnter:
|
||||
return m.handleCustomInputSubmission()
|
||||
|
||||
case tea.KeyCtrlC:
|
||||
m.canceled = true
|
||||
return m, tea.Quit
|
||||
|
||||
case tea.KeyBackspace:
|
||||
return m.handleCustomInputBackspace()
|
||||
|
||||
default:
|
||||
m.customInput += keyMsg.String()
|
||||
return m, nil
|
||||
}
|
||||
}
|
||||
|
||||
// handleCustomInputSubmission processes the submission of custom input
|
||||
func (m *multiSelectModel) handleCustomInputSubmission() (tea.Model, tea.Cmd) {
|
||||
if m.customInput == "" {
|
||||
m.isCustomInputMode = false
|
||||
return m, nil
|
||||
}
|
||||
|
||||
// Clear search to ensure option is visible and cursor points to the new option
|
||||
m.search.SetValue("")
|
||||
|
||||
// Check for duplicates
|
||||
for i, opt := range m.options {
|
||||
if opt.option == m.customInput {
|
||||
// If the option exists but isn't chosen, select it
|
||||
if !opt.chosen {
|
||||
opt.chosen = true
|
||||
}
|
||||
|
||||
// Point cursor to the new option
|
||||
m.cursor = i
|
||||
|
||||
// Reset custom input mode to disabled
|
||||
m.isCustomInputMode = false
|
||||
m.customInput = ""
|
||||
return m, nil
|
||||
}
|
||||
}
|
||||
|
||||
// Add new unique option
|
||||
m.options = append(m.options, &multiSelectOption{
|
||||
option: m.customInput,
|
||||
chosen: true,
|
||||
})
|
||||
|
||||
// Point cursor to the newly added option
|
||||
m.cursor = len(m.options) - 1
|
||||
|
||||
// Reset custom input mode to disabled
|
||||
m.customInput = ""
|
||||
m.isCustomInputMode = false
|
||||
return m, nil
|
||||
}
|
||||
|
||||
// handleCustomInputBackspace handles backspace in custom input mode
|
||||
func (m *multiSelectModel) handleCustomInputBackspace() (tea.Model, tea.Cmd) {
|
||||
if len(m.customInput) > 0 {
|
||||
m.customInput = m.customInput[:len(m.customInput)-1]
|
||||
}
|
||||
return m, nil
|
||||
}
|
||||
|
||||
func (m multiSelectModel) View() string {
|
||||
var s strings.Builder
|
||||
|
||||
@@ -469,13 +568,19 @@ func (m multiSelectModel) View() string {
|
||||
return s.String()
|
||||
}
|
||||
|
||||
if m.isCustomInputMode {
|
||||
_, _ = s.WriteString(fmt.Sprintf("%s\nEnter custom value: %s\n", msg, m.customInput))
|
||||
return s.String()
|
||||
}
|
||||
|
||||
_, _ = s.WriteString(fmt.Sprintf(
|
||||
"%s %s[Use arrows to move, space to select, <right> to all, <left> to none, type to filter]\n",
|
||||
msg,
|
||||
m.search.View(),
|
||||
))
|
||||
|
||||
for i, option := range m.filteredOptions() {
|
||||
options := m.filteredOptions()
|
||||
for i, option := range options {
|
||||
cursor := " "
|
||||
chosen := "[ ]"
|
||||
o := option.option
|
||||
@@ -498,6 +603,16 @@ func (m multiSelectModel) View() string {
|
||||
))
|
||||
}
|
||||
|
||||
if m.enableCustomInput {
|
||||
// Add the "+ Add custom value" option at the bottom
|
||||
cursor := " "
|
||||
text := " + Add custom value"
|
||||
if m.cursor == len(options) {
|
||||
cursor = pretty.Sprint(DefaultStyles.Keyword, "> ")
|
||||
text = pretty.Sprint(DefaultStyles.Keyword, text)
|
||||
}
|
||||
_, _ = s.WriteString(fmt.Sprintf("%s%s\n", cursor, text))
|
||||
}
|
||||
return s.String()
|
||||
}
|
||||
|
||||
|
||||
@@ -101,6 +101,39 @@ func TestMultiSelect(t *testing.T) {
|
||||
}()
|
||||
require.Equal(t, items, <-msgChan)
|
||||
})
|
||||
|
||||
t.Run("MultiSelectWithCustomInput", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
items := []string{"Code", "Chairs", "Whale", "Diamond", "Carrot"}
|
||||
ptty := ptytest.New(t)
|
||||
msgChan := make(chan []string)
|
||||
go func() {
|
||||
resp, err := newMultiSelectWithCustomInput(ptty, items)
|
||||
assert.NoError(t, err)
|
||||
msgChan <- resp
|
||||
}()
|
||||
require.Equal(t, items, <-msgChan)
|
||||
})
|
||||
}
|
||||
|
||||
func newMultiSelectWithCustomInput(ptty *ptytest.PTY, items []string) ([]string, error) {
|
||||
var values []string
|
||||
cmd := &serpent.Command{
|
||||
Handler: func(inv *serpent.Invocation) error {
|
||||
selectedItems, err := cliui.MultiSelect(inv, cliui.MultiSelectOptions{
|
||||
Options: items,
|
||||
Defaults: items,
|
||||
EnableCustomInput: true,
|
||||
})
|
||||
if err == nil {
|
||||
values = selectedItems
|
||||
}
|
||||
return err
|
||||
},
|
||||
}
|
||||
inv := cmd.Invoke()
|
||||
ptty.Attach(inv)
|
||||
return values, inv.Run()
|
||||
}
|
||||
|
||||
func newMultiSelect(ptty *ptytest.PTY, items []string) ([]string, error) {
|
||||
|
||||
@@ -0,0 +1,53 @@
|
||||
package cliutil
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
"strings"
|
||||
|
||||
"github.com/coder/coder/v2/cli/cliui"
|
||||
"github.com/coder/coder/v2/codersdk"
|
||||
)
|
||||
|
||||
var (
|
||||
warnNoMatchedProvisioners = `Your build has been enqueued, but there are no provisioners that accept the required tags. Once a compatible provisioner becomes available, your build will continue. Please contact your administrator.
|
||||
Details:
|
||||
Provisioner job ID : %s
|
||||
Requested tags : %s
|
||||
`
|
||||
warnNoAvailableProvisioners = `Provisioners that accept the required tags have not responded for longer than expected. This may delay your build. Please contact your administrator if your build does not complete.
|
||||
Details:
|
||||
Provisioner job ID : %s
|
||||
Requested tags : %s
|
||||
Most recently seen : %s
|
||||
`
|
||||
)
|
||||
|
||||
// WarnMatchedProvisioners warns the user if there are no provisioners that
|
||||
// match the requested tags for a given provisioner job.
|
||||
// If the job is not pending, it is ignored.
|
||||
func WarnMatchedProvisioners(w io.Writer, mp *codersdk.MatchedProvisioners, job codersdk.ProvisionerJob) {
|
||||
if mp == nil {
|
||||
// Nothing in the response, nothing to do here!
|
||||
return
|
||||
}
|
||||
if job.Status != codersdk.ProvisionerJobPending {
|
||||
// Only warn if the job is pending.
|
||||
return
|
||||
}
|
||||
var tagsJSON strings.Builder
|
||||
if err := json.NewEncoder(&tagsJSON).Encode(job.Tags); err != nil {
|
||||
// Fall back to the less-pretty string representation.
|
||||
tagsJSON.Reset()
|
||||
_, _ = tagsJSON.WriteString(fmt.Sprintf("%v", job.Tags))
|
||||
}
|
||||
if mp.Count == 0 {
|
||||
cliui.Warnf(w, warnNoMatchedProvisioners, job.ID, tagsJSON.String())
|
||||
return
|
||||
}
|
||||
if mp.Available == 0 {
|
||||
cliui.Warnf(w, warnNoAvailableProvisioners, job.ID, strings.TrimSpace(tagsJSON.String()), mp.MostRecentlySeen.Time)
|
||||
return
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,74 @@
|
||||
package cliutil_test
|
||||
|
||||
import (
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/require"
|
||||
|
||||
"github.com/coder/coder/v2/cli/cliutil"
|
||||
"github.com/coder/coder/v2/codersdk"
|
||||
)
|
||||
|
||||
func TestWarnMatchedProvisioners(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
for _, tt := range []struct {
|
||||
name string
|
||||
mp *codersdk.MatchedProvisioners
|
||||
job codersdk.ProvisionerJob
|
||||
expect string
|
||||
}{
|
||||
{
|
||||
name: "no_match",
|
||||
mp: &codersdk.MatchedProvisioners{
|
||||
Count: 0,
|
||||
Available: 0,
|
||||
},
|
||||
job: codersdk.ProvisionerJob{
|
||||
Status: codersdk.ProvisionerJobPending,
|
||||
},
|
||||
expect: `there are no provisioners that accept the required tags`,
|
||||
},
|
||||
{
|
||||
name: "no_available",
|
||||
mp: &codersdk.MatchedProvisioners{
|
||||
Count: 1,
|
||||
Available: 0,
|
||||
},
|
||||
job: codersdk.ProvisionerJob{
|
||||
Status: codersdk.ProvisionerJobPending,
|
||||
},
|
||||
expect: `Provisioners that accept the required tags have not responded for longer than expected`,
|
||||
},
|
||||
{
|
||||
name: "match",
|
||||
mp: &codersdk.MatchedProvisioners{
|
||||
Count: 1,
|
||||
Available: 1,
|
||||
},
|
||||
job: codersdk.ProvisionerJob{
|
||||
Status: codersdk.ProvisionerJobPending,
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "not_pending",
|
||||
mp: &codersdk.MatchedProvisioners{},
|
||||
job: codersdk.ProvisionerJob{
|
||||
Status: codersdk.ProvisionerJobRunning,
|
||||
},
|
||||
},
|
||||
} {
|
||||
tt := tt
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
t.Parallel()
|
||||
var w strings.Builder
|
||||
cliutil.WarnMatchedProvisioners(&w, tt.mp, tt.job)
|
||||
if tt.expect != "" {
|
||||
require.Contains(t, w.String(), tt.expect)
|
||||
} else {
|
||||
require.Empty(t, w.String())
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
+10
-1
@@ -14,6 +14,7 @@ import (
|
||||
"github.com/coder/pretty"
|
||||
|
||||
"github.com/coder/coder/v2/cli/cliui"
|
||||
"github.com/coder/coder/v2/cli/cliutil"
|
||||
"github.com/coder/coder/v2/coderd/util/ptr"
|
||||
"github.com/coder/coder/v2/coderd/util/slice"
|
||||
"github.com/coder/coder/v2/codersdk"
|
||||
@@ -289,7 +290,7 @@ func (r *RootCmd) create() *serpent.Command {
|
||||
ttlMillis = ptr.Ref(stopAfter.Milliseconds())
|
||||
}
|
||||
|
||||
workspace, err := client.CreateWorkspace(inv.Context(), template.OrganizationID, workspaceOwner, codersdk.CreateWorkspaceRequest{
|
||||
workspace, err := client.CreateUserWorkspace(inv.Context(), workspaceOwner, codersdk.CreateWorkspaceRequest{
|
||||
TemplateVersionID: templateVersionID,
|
||||
Name: workspaceName,
|
||||
AutostartSchedule: schedSpec,
|
||||
@@ -301,6 +302,8 @@ func (r *RootCmd) create() *serpent.Command {
|
||||
return xerrors.Errorf("create workspace: %w", err)
|
||||
}
|
||||
|
||||
cliutil.WarnMatchedProvisioners(inv.Stderr, workspace.LatestBuild.MatchedProvisioners, workspace.LatestBuild.Job)
|
||||
|
||||
err = cliui.WorkspaceBuild(inv.Context(), inv.Stdout, client, workspace.LatestBuild.ID)
|
||||
if err != nil {
|
||||
return xerrors.Errorf("watch build: %w", err)
|
||||
@@ -433,6 +436,12 @@ func prepWorkspaceBuild(inv *serpent.Invocation, client *codersdk.Client, args p
|
||||
if err != nil {
|
||||
return nil, xerrors.Errorf("begin workspace dry-run: %w", err)
|
||||
}
|
||||
|
||||
matchedProvisioners, err := client.TemplateVersionDryRunMatchedProvisioners(inv.Context(), templateVersion.ID, dryRun.ID)
|
||||
if err != nil {
|
||||
return nil, xerrors.Errorf("get matched provisioners: %w", err)
|
||||
}
|
||||
cliutil.WarnMatchedProvisioners(inv.Stdout, &matchedProvisioners, dryRun)
|
||||
_, _ = fmt.Fprintln(inv.Stdout, "Planning workspace...")
|
||||
err = cliui.ProvisionerJob(inv.Context(), inv.Stdout, cliui.ProvisionerJobOptions{
|
||||
Fetch: func() (codersdk.ProvisionerJob, error) {
|
||||
|
||||
@@ -5,6 +5,7 @@ import (
|
||||
"time"
|
||||
|
||||
"github.com/coder/coder/v2/cli/cliui"
|
||||
"github.com/coder/coder/v2/cli/cliutil"
|
||||
"github.com/coder/coder/v2/codersdk"
|
||||
"github.com/coder/serpent"
|
||||
)
|
||||
@@ -55,6 +56,7 @@ func (r *RootCmd) deleteWorkspace() *serpent.Command {
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
cliutil.WarnMatchedProvisioners(inv.Stdout, build.MatchedProvisioners, build.Job)
|
||||
|
||||
err = cliui.WorkspaceBuild(inv.Context(), inv.Stdout, client, build.ID)
|
||||
if err != nil {
|
||||
|
||||
@@ -12,6 +12,8 @@ import (
|
||||
"github.com/coder/coder/v2/cli/clitest"
|
||||
"github.com/coder/coder/v2/coderd/coderdtest"
|
||||
"github.com/coder/coder/v2/coderd/database/dbauthz"
|
||||
"github.com/coder/coder/v2/coderd/database/dbtestutil"
|
||||
"github.com/coder/coder/v2/coderd/rbac"
|
||||
"github.com/coder/coder/v2/codersdk"
|
||||
"github.com/coder/coder/v2/pty/ptytest"
|
||||
"github.com/coder/coder/v2/testutil"
|
||||
@@ -164,4 +166,46 @@ func TestDelete(t *testing.T) {
|
||||
}()
|
||||
<-doneChan
|
||||
})
|
||||
|
||||
t.Run("WarnNoProvisioners", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
if !dbtestutil.WillUsePostgres() {
|
||||
t.Skip("this test requires postgres")
|
||||
}
|
||||
|
||||
store, ps, db := dbtestutil.NewDBWithSQLDB(t)
|
||||
client, closeDaemon := coderdtest.NewWithProvisionerCloser(t, &coderdtest.Options{
|
||||
Database: store,
|
||||
Pubsub: ps,
|
||||
IncludeProvisionerDaemon: true,
|
||||
})
|
||||
|
||||
// Given: a user, template, and workspace
|
||||
user := coderdtest.CreateFirstUser(t, client)
|
||||
templateAdmin, _ := coderdtest.CreateAnotherUser(t, client, user.OrganizationID, rbac.RoleTemplateAdmin())
|
||||
version := coderdtest.CreateTemplateVersion(t, templateAdmin, user.OrganizationID, nil)
|
||||
template := coderdtest.CreateTemplate(t, templateAdmin, user.OrganizationID, version.ID)
|
||||
workspace := coderdtest.CreateWorkspace(t, templateAdmin, template.ID)
|
||||
coderdtest.AwaitWorkspaceBuildJobCompleted(t, templateAdmin, workspace.LatestBuild.ID)
|
||||
|
||||
// When: all provisioner daemons disappear
|
||||
require.NoError(t, closeDaemon.Close())
|
||||
_, err := db.Exec("DELETE FROM provisioner_daemons;")
|
||||
require.NoError(t, err)
|
||||
|
||||
// Then: the workspace deletion should warn about no provisioners
|
||||
inv, root := clitest.New(t, "delete", workspace.Name, "-y")
|
||||
pty := ptytest.New(t).Attach(inv)
|
||||
clitest.SetupConfig(t, templateAdmin, root)
|
||||
doneChan := make(chan struct{})
|
||||
ctx, cancel := context.WithTimeout(context.Background(), testutil.WaitLong)
|
||||
defer cancel()
|
||||
go func() {
|
||||
defer close(doneChan)
|
||||
_ = inv.WithContext(ctx).Run()
|
||||
}()
|
||||
pty.ExpectMatch("there are no provisioners that accept the required tags")
|
||||
cancel()
|
||||
<-doneChan
|
||||
})
|
||||
}
|
||||
|
||||
+13
-3
@@ -41,6 +41,15 @@ func (RootCmd) promptExample() *serpent.Command {
|
||||
Default: "",
|
||||
Value: serpent.StringArrayOf(&multiSelectValues),
|
||||
}
|
||||
|
||||
enableCustomInput bool
|
||||
enableCustomInputOption = serpent.Option{
|
||||
Name: "enable-custom-input",
|
||||
Description: "Enable custom input option in multi-select.",
|
||||
Required: false,
|
||||
Flag: "enable-custom-input",
|
||||
Value: serpent.BoolOf(&enableCustomInput),
|
||||
}
|
||||
)
|
||||
cmd := &serpent.Command{
|
||||
Use: "prompt-example",
|
||||
@@ -156,14 +165,15 @@ func (RootCmd) promptExample() *serpent.Command {
|
||||
multiSelectValues, multiSelectError = cliui.MultiSelect(inv, cliui.MultiSelectOptions{
|
||||
Message: "Select some things:",
|
||||
Options: []string{
|
||||
"Code", "Chair", "Whale", "Diamond", "Carrot",
|
||||
"Code", "Chairs", "Whale", "Diamond", "Carrot",
|
||||
},
|
||||
Defaults: []string{"Code"},
|
||||
Defaults: []string{"Code"},
|
||||
EnableCustomInput: enableCustomInput,
|
||||
})
|
||||
}
|
||||
_, _ = fmt.Fprintf(inv.Stdout, "%q are nice choices.\n", strings.Join(multiSelectValues, ", "))
|
||||
return multiSelectError
|
||||
}, useThingsOption),
|
||||
}, useThingsOption, enableCustomInputOption),
|
||||
promptCmd("rich-parameter", func(inv *serpent.Invocation) error {
|
||||
value, err := cliui.RichSelect(inv, cliui.RichSelectOptions{
|
||||
Options: []codersdk.TemplateVersionParameterOption{
|
||||
|
||||
@@ -8,6 +8,7 @@ import (
|
||||
"golang.org/x/xerrors"
|
||||
|
||||
"github.com/coder/coder/v2/cli/cliui"
|
||||
"github.com/coder/coder/v2/cli/cliutil"
|
||||
"github.com/coder/coder/v2/codersdk"
|
||||
"github.com/coder/serpent"
|
||||
)
|
||||
@@ -35,6 +36,23 @@ func (r *RootCmd) start() *serpent.Command {
|
||||
}
|
||||
var build codersdk.WorkspaceBuild
|
||||
switch workspace.LatestBuild.Status {
|
||||
case codersdk.WorkspaceStatusPending:
|
||||
// The above check is technically duplicated in cliutil.WarnmatchedProvisioners
|
||||
// but we still want to avoid users spamming multiple builds that will
|
||||
// not be picked up.
|
||||
_, _ = fmt.Fprintf(
|
||||
inv.Stdout,
|
||||
"\nThe %s workspace is waiting to start!\n",
|
||||
cliui.Keyword(workspace.Name),
|
||||
)
|
||||
cliutil.WarnMatchedProvisioners(inv.Stderr, workspace.LatestBuild.MatchedProvisioners, workspace.LatestBuild.Job)
|
||||
if _, err := cliui.Prompt(inv, cliui.PromptOptions{
|
||||
Text: "Enqueue another start?",
|
||||
IsConfirm: true,
|
||||
Default: cliui.ConfirmNo,
|
||||
}); err != nil {
|
||||
return err
|
||||
}
|
||||
case codersdk.WorkspaceStatusRunning:
|
||||
_, _ = fmt.Fprintf(
|
||||
inv.Stdout, "\nThe %s workspace is already running!\n",
|
||||
@@ -159,6 +177,7 @@ func startWorkspace(inv *serpent.Invocation, client *codersdk.Client, workspace
|
||||
if err != nil {
|
||||
return codersdk.WorkspaceBuild{}, xerrors.Errorf("create workspace build: %w", err)
|
||||
}
|
||||
cliutil.WarnMatchedProvisioners(inv.Stderr, build.MatchedProvisioners, build.Job)
|
||||
|
||||
return build, nil
|
||||
}
|
||||
|
||||
+17
@@ -5,6 +5,7 @@ import (
|
||||
"time"
|
||||
|
||||
"github.com/coder/coder/v2/cli/cliui"
|
||||
"github.com/coder/coder/v2/cli/cliutil"
|
||||
"github.com/coder/coder/v2/codersdk"
|
||||
"github.com/coder/serpent"
|
||||
)
|
||||
@@ -36,6 +37,21 @@ func (r *RootCmd) stop() *serpent.Command {
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if workspace.LatestBuild.Job.Status == codersdk.ProvisionerJobPending {
|
||||
// cliutil.WarnMatchedProvisioners also checks if the job is pending
|
||||
// but we still want to avoid users spamming multiple builds that will
|
||||
// not be picked up.
|
||||
cliui.Warn(inv.Stderr, "The workspace is already stopping!")
|
||||
cliutil.WarnMatchedProvisioners(inv.Stderr, workspace.LatestBuild.MatchedProvisioners, workspace.LatestBuild.Job)
|
||||
if _, err := cliui.Prompt(inv, cliui.PromptOptions{
|
||||
Text: "Enqueue another stop?",
|
||||
IsConfirm: true,
|
||||
Default: cliui.ConfirmNo,
|
||||
}); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
wbr := codersdk.CreateWorkspaceBuildRequest{
|
||||
Transition: codersdk.WorkspaceTransitionStop,
|
||||
}
|
||||
@@ -46,6 +62,7 @@ func (r *RootCmd) stop() *serpent.Command {
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
cliutil.WarnMatchedProvisioners(inv.Stderr, build.MatchedProvisioners, build.Job)
|
||||
|
||||
err = cliui.WorkspaceBuild(inv.Context(), inv.Stdout, client, build.ID)
|
||||
if err != nil {
|
||||
|
||||
+2
-25
@@ -2,7 +2,6 @@ package cli
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
@@ -17,6 +16,7 @@ import (
|
||||
"golang.org/x/xerrors"
|
||||
|
||||
"github.com/coder/coder/v2/cli/cliui"
|
||||
"github.com/coder/coder/v2/cli/cliutil"
|
||||
"github.com/coder/coder/v2/codersdk"
|
||||
"github.com/coder/coder/v2/provisionersdk"
|
||||
"github.com/coder/pretty"
|
||||
@@ -416,30 +416,7 @@ func createValidTemplateVersion(inv *serpent.Invocation, args createValidTemplat
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
var tagsJSON strings.Builder
|
||||
if err := json.NewEncoder(&tagsJSON).Encode(version.Job.Tags); err != nil {
|
||||
// Fall back to the less-pretty string representation.
|
||||
tagsJSON.Reset()
|
||||
_, _ = tagsJSON.WriteString(fmt.Sprintf("%v", version.Job.Tags))
|
||||
}
|
||||
if version.MatchedProvisioners.Count == 0 {
|
||||
cliui.Warnf(inv.Stderr, `No provisioners are available to handle the job!
|
||||
Please contact your deployment administrator for assistance.
|
||||
Details:
|
||||
Provisioner job ID : %s
|
||||
Requested tags : %s
|
||||
`, version.Job.ID, tagsJSON.String())
|
||||
} else if version.MatchedProvisioners.Available == 0 {
|
||||
cliui.Warnf(inv.Stderr, `All available provisioner daemons have been silent for a while.
|
||||
Your build will proceed once they become available.
|
||||
If this persists, please contact your deployment administrator for assistance.
|
||||
Details:
|
||||
Provisioner job ID : %s
|
||||
Requested tags : %s
|
||||
Most recently seen : %s
|
||||
`, version.Job.ID, strings.TrimSpace(tagsJSON.String()), version.MatchedProvisioners.MostRecentlySeen.Time)
|
||||
}
|
||||
|
||||
cliutil.WarnMatchedProvisioners(inv.Stderr, version.MatchedProvisioners, version.Job)
|
||||
err = cliui.ProvisionerJob(inv.Context(), inv.Stdout, cliui.ProvisionerJobOptions{
|
||||
Fetch: func() (codersdk.ProvisionerJob, error) {
|
||||
version, err := client.TemplateVersion(inv.Context(), version.ID)
|
||||
|
||||
+149
-69
@@ -3,6 +3,7 @@ package cli_test
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"database/sql"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"runtime"
|
||||
@@ -18,6 +19,7 @@ import (
|
||||
"github.com/coder/coder/v2/coderd/coderdtest"
|
||||
"github.com/coder/coder/v2/coderd/database"
|
||||
"github.com/coder/coder/v2/coderd/database/dbtestutil"
|
||||
"github.com/coder/coder/v2/coderd/database/dbtime"
|
||||
"github.com/coder/coder/v2/coderd/rbac"
|
||||
"github.com/coder/coder/v2/codersdk"
|
||||
"github.com/coder/coder/v2/provisioner/echo"
|
||||
@@ -412,84 +414,162 @@ func TestTemplatePush(t *testing.T) {
|
||||
|
||||
t.Run("WorkspaceTagsTerraform", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
ctx := testutil.Context(t, testutil.WaitShort)
|
||||
|
||||
// Start an instance **without** a built-in provisioner.
|
||||
// We're not actually testing that the Terraform applies.
|
||||
// What we test is that a provisioner job is created with the expected
|
||||
// tags based on the __content__ of the Terraform.
|
||||
store, ps := dbtestutil.NewDB(t)
|
||||
client := coderdtest.New(t, &coderdtest.Options{
|
||||
Database: store,
|
||||
Pubsub: ps,
|
||||
})
|
||||
tests := []struct {
|
||||
name string
|
||||
setupDaemon func(ctx context.Context, store database.Store, owner codersdk.CreateFirstUserResponse, tags database.StringMap, now time.Time) error
|
||||
expectOutput string
|
||||
}{
|
||||
{
|
||||
name: "no provisioners available",
|
||||
setupDaemon: func(_ context.Context, _ database.Store, _ codersdk.CreateFirstUserResponse, _ database.StringMap, _ time.Time) error {
|
||||
return nil
|
||||
},
|
||||
expectOutput: "there are no provisioners that accept the required tags",
|
||||
},
|
||||
{
|
||||
name: "provisioner stale",
|
||||
setupDaemon: func(ctx context.Context, store database.Store, owner codersdk.CreateFirstUserResponse, tags database.StringMap, now time.Time) error {
|
||||
pk, err := store.InsertProvisionerKey(ctx, database.InsertProvisionerKeyParams{
|
||||
ID: uuid.New(),
|
||||
CreatedAt: now,
|
||||
OrganizationID: owner.OrganizationID,
|
||||
Name: "test",
|
||||
Tags: tags,
|
||||
HashedSecret: []byte("secret"),
|
||||
})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
oneHourAgo := now.Add(-time.Hour)
|
||||
_, err = store.UpsertProvisionerDaemon(ctx, database.UpsertProvisionerDaemonParams{
|
||||
Provisioners: []database.ProvisionerType{database.ProvisionerTypeTerraform},
|
||||
LastSeenAt: sql.NullTime{Time: oneHourAgo, Valid: true},
|
||||
CreatedAt: oneHourAgo,
|
||||
Name: "test",
|
||||
Tags: tags,
|
||||
OrganizationID: owner.OrganizationID,
|
||||
KeyID: pk.ID,
|
||||
})
|
||||
return err
|
||||
},
|
||||
expectOutput: "Provisioners that accept the required tags have not responded for longer than expected",
|
||||
},
|
||||
{
|
||||
name: "active provisioner",
|
||||
setupDaemon: func(ctx context.Context, store database.Store, owner codersdk.CreateFirstUserResponse, tags database.StringMap, now time.Time) error {
|
||||
pk, err := store.InsertProvisionerKey(ctx, database.InsertProvisionerKeyParams{
|
||||
ID: uuid.New(),
|
||||
CreatedAt: now,
|
||||
OrganizationID: owner.OrganizationID,
|
||||
Name: "test",
|
||||
Tags: tags,
|
||||
HashedSecret: []byte("secret"),
|
||||
})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
_, err = store.UpsertProvisionerDaemon(ctx, database.UpsertProvisionerDaemonParams{
|
||||
Provisioners: []database.ProvisionerType{database.ProvisionerTypeTerraform},
|
||||
LastSeenAt: sql.NullTime{Time: now, Valid: true},
|
||||
CreatedAt: now,
|
||||
Name: "test-active",
|
||||
Tags: tags,
|
||||
OrganizationID: owner.OrganizationID,
|
||||
KeyID: pk.ID,
|
||||
})
|
||||
return err
|
||||
},
|
||||
expectOutput: "",
|
||||
},
|
||||
}
|
||||
|
||||
owner := coderdtest.CreateFirstUser(t, client)
|
||||
templateAdmin, _ := coderdtest.CreateAnotherUser(t, client, owner.OrganizationID, rbac.RoleTemplateAdmin())
|
||||
for _, tt := range tests {
|
||||
tt := tt
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
// Create a tar file with some pre-defined content
|
||||
tarFile := testutil.CreateTar(t, map[string]string{
|
||||
"main.tf": `
|
||||
variable "a" {
|
||||
type = string
|
||||
default = "1"
|
||||
}
|
||||
data "coder_parameter" "b" {
|
||||
type = string
|
||||
default = "2"
|
||||
}
|
||||
resource "null_resource" "test" {}
|
||||
data "coder_workspace_tags" "tags" {
|
||||
tags = {
|
||||
"foo": "bar",
|
||||
"a": var.a,
|
||||
"b": data.coder_parameter.b.value,
|
||||
}
|
||||
}`,
|
||||
})
|
||||
// Start an instance **without** a built-in provisioner.
|
||||
// We're not actually testing that the Terraform applies.
|
||||
// What we test is that a provisioner job is created with the expected
|
||||
// tags based on the __content__ of the Terraform.
|
||||
store, ps := dbtestutil.NewDB(t)
|
||||
client := coderdtest.New(t, &coderdtest.Options{
|
||||
Database: store,
|
||||
Pubsub: ps,
|
||||
})
|
||||
|
||||
// Write the tar file to disk.
|
||||
tempDir := t.TempDir()
|
||||
err := tfparse.WriteArchive(tarFile, "application/x-tar", tempDir)
|
||||
require.NoError(t, err)
|
||||
owner := coderdtest.CreateFirstUser(t, client)
|
||||
templateAdmin, _ := coderdtest.CreateAnotherUser(t, client, owner.OrganizationID, rbac.RoleTemplateAdmin())
|
||||
|
||||
// Run `coder templates push`
|
||||
templateName := strings.ReplaceAll(testutil.GetRandomName(t), "_", "-")
|
||||
var stdout, stderr strings.Builder
|
||||
inv, root := clitest.New(t, "templates", "push", templateName, "-d", tempDir, "--yes")
|
||||
inv.Stdout = &stdout
|
||||
inv.Stderr = &stderr
|
||||
clitest.SetupConfig(t, templateAdmin, root)
|
||||
// Create a tar file with some pre-defined content
|
||||
tarFile := testutil.CreateTar(t, map[string]string{
|
||||
"main.tf": `
|
||||
variable "a" {
|
||||
type = string
|
||||
default = "1"
|
||||
}
|
||||
data "coder_parameter" "b" {
|
||||
type = string
|
||||
default = "2"
|
||||
}
|
||||
resource "null_resource" "test" {}
|
||||
data "coder_workspace_tags" "tags" {
|
||||
tags = {
|
||||
"a": var.a,
|
||||
"b": data.coder_parameter.b.value,
|
||||
"test_name": "` + tt.name + `"
|
||||
}
|
||||
}`,
|
||||
})
|
||||
|
||||
// Don't forget to clean up!
|
||||
cancelCtx, cancel := context.WithCancel(ctx)
|
||||
t.Cleanup(cancel)
|
||||
done := make(chan error)
|
||||
go func() {
|
||||
done <- inv.WithContext(cancelCtx).Run()
|
||||
}()
|
||||
// Write the tar file to disk.
|
||||
tempDir := t.TempDir()
|
||||
err := tfparse.WriteArchive(tarFile, "application/x-tar", tempDir)
|
||||
require.NoError(t, err)
|
||||
|
||||
// Assert that a provisioner job was created with the desired tags.
|
||||
wantTags := database.StringMap(provisionersdk.MutateTags(uuid.Nil, map[string]string{
|
||||
"foo": "bar",
|
||||
"a": "1",
|
||||
"b": "2",
|
||||
}))
|
||||
require.Eventually(t, func() bool {
|
||||
jobs, err := store.GetProvisionerJobsCreatedAfter(ctx, time.Time{})
|
||||
if !assert.NoError(t, err) {
|
||||
return false
|
||||
}
|
||||
if len(jobs) == 0 {
|
||||
return false
|
||||
}
|
||||
return assert.EqualValues(t, wantTags, jobs[0].Tags)
|
||||
}, testutil.WaitShort, testutil.IntervalSlow)
|
||||
wantTags := database.StringMap(provisionersdk.MutateTags(uuid.Nil, map[string]string{
|
||||
"a": "1",
|
||||
"b": "2",
|
||||
"test_name": tt.name,
|
||||
}))
|
||||
|
||||
cancel()
|
||||
<-done
|
||||
templateName := strings.ReplaceAll(testutil.GetRandomName(t), "_", "-")
|
||||
|
||||
require.Contains(t, stderr.String(), "No provisioners are available to handle the job!")
|
||||
inv, root := clitest.New(t, "templates", "push", templateName, "-d", tempDir, "--yes")
|
||||
clitest.SetupConfig(t, templateAdmin, root)
|
||||
pty := ptytest.New(t).Attach(inv)
|
||||
|
||||
ctx := testutil.Context(t, testutil.WaitShort)
|
||||
now := dbtime.Now()
|
||||
require.NoError(t, tt.setupDaemon(ctx, store, owner, wantTags, now))
|
||||
|
||||
cancelCtx, cancel := context.WithCancel(ctx)
|
||||
t.Cleanup(cancel)
|
||||
done := make(chan error)
|
||||
go func() {
|
||||
done <- inv.WithContext(cancelCtx).Run()
|
||||
}()
|
||||
|
||||
require.Eventually(t, func() bool {
|
||||
jobs, err := store.GetProvisionerJobsCreatedAfter(ctx, time.Time{})
|
||||
if !assert.NoError(t, err) {
|
||||
return false
|
||||
}
|
||||
if len(jobs) == 0 {
|
||||
return false
|
||||
}
|
||||
return assert.EqualValues(t, wantTags, jobs[0].Tags)
|
||||
}, testutil.WaitShort, testutil.IntervalFast)
|
||||
|
||||
if tt.expectOutput != "" {
|
||||
pty.ExpectMatch(tt.expectOutput)
|
||||
}
|
||||
|
||||
cancel()
|
||||
<-done
|
||||
})
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("ChangeTags", func(t *testing.T) {
|
||||
|
||||
+6
-1
@@ -50,7 +50,12 @@
|
||||
"deadline": "[timestamp]",
|
||||
"max_deadline": null,
|
||||
"status": "running",
|
||||
"daily_cost": 0
|
||||
"daily_cost": 0,
|
||||
"matched_provisioners": {
|
||||
"count": 0,
|
||||
"available": 0,
|
||||
"most_recently_seen": null
|
||||
}
|
||||
},
|
||||
"outdated": false,
|
||||
"name": "test-workspace",
|
||||
|
||||
Generated
+46
@@ -4851,6 +4851,49 @@ const docTemplate = `{
|
||||
}
|
||||
}
|
||||
},
|
||||
"/templateversions/{templateversion}/dry-run/{jobID}/matched-provisioners": {
|
||||
"get": {
|
||||
"security": [
|
||||
{
|
||||
"CoderSessionToken": []
|
||||
}
|
||||
],
|
||||
"produces": [
|
||||
"application/json"
|
||||
],
|
||||
"tags": [
|
||||
"Templates"
|
||||
],
|
||||
"summary": "Get template version dry-run matched provisioners",
|
||||
"operationId": "get-template-version-dry-run-matched-provisioners",
|
||||
"parameters": [
|
||||
{
|
||||
"type": "string",
|
||||
"format": "uuid",
|
||||
"description": "Template version ID",
|
||||
"name": "templateversion",
|
||||
"in": "path",
|
||||
"required": true
|
||||
},
|
||||
{
|
||||
"type": "string",
|
||||
"format": "uuid",
|
||||
"description": "Job ID",
|
||||
"name": "jobID",
|
||||
"in": "path",
|
||||
"required": true
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "OK",
|
||||
"schema": {
|
||||
"$ref": "#/definitions/codersdk.MatchedProvisioners"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"/templateversions/{templateversion}/dry-run/{jobID}/resources": {
|
||||
"get": {
|
||||
"security": [
|
||||
@@ -15068,6 +15111,9 @@ const docTemplate = `{
|
||||
"job": {
|
||||
"$ref": "#/definitions/codersdk.ProvisionerJob"
|
||||
},
|
||||
"matched_provisioners": {
|
||||
"$ref": "#/definitions/codersdk.MatchedProvisioners"
|
||||
},
|
||||
"max_deadline": {
|
||||
"type": "string",
|
||||
"format": "date-time"
|
||||
|
||||
Generated
+42
@@ -4275,6 +4275,45 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"/templateversions/{templateversion}/dry-run/{jobID}/matched-provisioners": {
|
||||
"get": {
|
||||
"security": [
|
||||
{
|
||||
"CoderSessionToken": []
|
||||
}
|
||||
],
|
||||
"produces": ["application/json"],
|
||||
"tags": ["Templates"],
|
||||
"summary": "Get template version dry-run matched provisioners",
|
||||
"operationId": "get-template-version-dry-run-matched-provisioners",
|
||||
"parameters": [
|
||||
{
|
||||
"type": "string",
|
||||
"format": "uuid",
|
||||
"description": "Template version ID",
|
||||
"name": "templateversion",
|
||||
"in": "path",
|
||||
"required": true
|
||||
},
|
||||
{
|
||||
"type": "string",
|
||||
"format": "uuid",
|
||||
"description": "Job ID",
|
||||
"name": "jobID",
|
||||
"in": "path",
|
||||
"required": true
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "OK",
|
||||
"schema": {
|
||||
"$ref": "#/definitions/codersdk.MatchedProvisioners"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"/templateversions/{templateversion}/dry-run/{jobID}/resources": {
|
||||
"get": {
|
||||
"security": [
|
||||
@@ -13712,6 +13751,9 @@
|
||||
"job": {
|
||||
"$ref": "#/definitions/codersdk.ProvisionerJob"
|
||||
},
|
||||
"matched_provisioners": {
|
||||
"$ref": "#/definitions/codersdk.MatchedProvisioners"
|
||||
},
|
||||
"max_deadline": {
|
||||
"type": "string",
|
||||
"format": "date-time"
|
||||
|
||||
@@ -3,6 +3,7 @@ package autobuild
|
||||
import (
|
||||
"context"
|
||||
"database/sql"
|
||||
"fmt"
|
||||
"net/http"
|
||||
"sync"
|
||||
"sync/atomic"
|
||||
@@ -177,6 +178,15 @@ func (e *Executor) runOnce(t time.Time) Stats {
|
||||
err := e.db.InTx(func(tx database.Store) error {
|
||||
var err error
|
||||
|
||||
ok, err := tx.TryAcquireLock(e.ctx, database.GenLockID(fmt.Sprintf("lifecycle-executor:%s", wsID)))
|
||||
if err != nil {
|
||||
return xerrors.Errorf("try acquire lifecycle executor lock: %w", err)
|
||||
}
|
||||
if !ok {
|
||||
log.Debug(e.ctx, "unable to acquire lock for workspace, skipping")
|
||||
return nil
|
||||
}
|
||||
|
||||
// Re-check eligibility since the first check was outside the
|
||||
// transaction and the workspace settings may have changed.
|
||||
ws, err = tx.GetWorkspaceByID(e.ctx, wsID)
|
||||
@@ -245,7 +255,7 @@ func (e *Executor) runOnce(t time.Time) Stats {
|
||||
}
|
||||
}
|
||||
|
||||
nextBuild, job, err = builder.Build(e.ctx, tx, nil, audit.WorkspaceBuildBaggage{IP: "127.0.0.1"})
|
||||
nextBuild, job, _, err = builder.Build(e.ctx, tx, nil, audit.WorkspaceBuildBaggage{IP: "127.0.0.1"})
|
||||
if err != nil {
|
||||
return xerrors.Errorf("build workspace with transition %q: %w", nextTransition, err)
|
||||
}
|
||||
@@ -372,7 +382,7 @@ func (e *Executor) runOnce(t time.Time) Stats {
|
||||
}
|
||||
return nil
|
||||
}()
|
||||
if err != nil {
|
||||
if err != nil && !xerrors.Is(err, context.Canceled) {
|
||||
log.Error(e.ctx, "failed to transition workspace", slog.Error(err))
|
||||
statsMu.Lock()
|
||||
stats.Errors[wsID] = err
|
||||
|
||||
@@ -18,6 +18,7 @@ import (
|
||||
"github.com/coder/coder/v2/coderd/coderdtest"
|
||||
"github.com/coder/coder/v2/coderd/database"
|
||||
"github.com/coder/coder/v2/coderd/database/dbauthz"
|
||||
"github.com/coder/coder/v2/coderd/database/dbtestutil"
|
||||
"github.com/coder/coder/v2/coderd/notifications"
|
||||
"github.com/coder/coder/v2/coderd/notifications/notificationstest"
|
||||
"github.com/coder/coder/v2/coderd/schedule"
|
||||
@@ -72,6 +73,76 @@ func TestExecutorAutostartOK(t *testing.T) {
|
||||
require.Equal(t, template.AutostartRequirement.DaysOfWeek, []string{"monday", "tuesday", "wednesday", "thursday", "friday", "saturday", "sunday"})
|
||||
}
|
||||
|
||||
func TestMultipleLifecycleExecutors(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
db, ps := dbtestutil.NewDB(t)
|
||||
|
||||
var (
|
||||
sched = mustSchedule(t, "CRON_TZ=UTC 0 * * * *")
|
||||
// Create our first client
|
||||
tickCh = make(chan time.Time, 2)
|
||||
statsChA = make(chan autobuild.Stats)
|
||||
clientA = coderdtest.New(t, &coderdtest.Options{
|
||||
IncludeProvisionerDaemon: true,
|
||||
AutobuildTicker: tickCh,
|
||||
AutobuildStats: statsChA,
|
||||
Database: db,
|
||||
Pubsub: ps,
|
||||
})
|
||||
// ... And then our second client
|
||||
statsChB = make(chan autobuild.Stats)
|
||||
_ = coderdtest.New(t, &coderdtest.Options{
|
||||
IncludeProvisionerDaemon: true,
|
||||
AutobuildTicker: tickCh,
|
||||
AutobuildStats: statsChB,
|
||||
Database: db,
|
||||
Pubsub: ps,
|
||||
})
|
||||
// Now create a workspace (we can use either client, it doesn't matter)
|
||||
workspace = mustProvisionWorkspace(t, clientA, func(cwr *codersdk.CreateWorkspaceRequest) {
|
||||
cwr.AutostartSchedule = ptr.Ref(sched.String())
|
||||
})
|
||||
)
|
||||
|
||||
// Have the workspace stopped so we can perform an autostart
|
||||
workspace = coderdtest.MustTransitionWorkspace(t, clientA, workspace.ID, database.WorkspaceTransitionStart, database.WorkspaceTransitionStop)
|
||||
|
||||
// Get both clients to perform a lifecycle execution tick
|
||||
next := sched.Next(workspace.LatestBuild.CreatedAt)
|
||||
|
||||
startCh := make(chan struct{})
|
||||
go func() {
|
||||
<-startCh
|
||||
tickCh <- next
|
||||
}()
|
||||
go func() {
|
||||
<-startCh
|
||||
tickCh <- next
|
||||
}()
|
||||
close(startCh)
|
||||
|
||||
// Now we want to check the stats for both clients
|
||||
statsA := <-statsChA
|
||||
statsB := <-statsChB
|
||||
|
||||
// We expect there to be no errors
|
||||
assert.Len(t, statsA.Errors, 0)
|
||||
assert.Len(t, statsB.Errors, 0)
|
||||
|
||||
// We also expect there to have been only one transition
|
||||
require.Equal(t, 1, len(statsA.Transitions)+len(statsB.Transitions))
|
||||
|
||||
stats := statsA
|
||||
if len(statsB.Transitions) == 1 {
|
||||
stats = statsB
|
||||
}
|
||||
|
||||
// And we expect this transition to have been a start transition
|
||||
assert.Contains(t, stats.Transitions, workspace.ID)
|
||||
assert.Equal(t, database.WorkspaceTransitionStart, stats.Transitions[workspace.ID])
|
||||
}
|
||||
|
||||
func TestExecutorAutostartTemplateUpdated(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
|
||||
+3
-1
@@ -628,7 +628,8 @@ func New(options *Options) *API {
|
||||
CurrentVersion: buildinfo.Version(),
|
||||
CurrentAPIMajorVersion: proto.CurrentMajor,
|
||||
Store: options.Database,
|
||||
// TimeNow and StaleInterval set to defaults, see healthcheck/provisioner.go
|
||||
StaleInterval: provisionerdserver.StaleInterval,
|
||||
// TimeNow set to default, see healthcheck/provisioner.go
|
||||
},
|
||||
})
|
||||
}
|
||||
@@ -1054,6 +1055,7 @@ func New(options *Options) *API {
|
||||
r.Get("/{jobID}", api.templateVersionDryRun)
|
||||
r.Get("/{jobID}/resources", api.templateVersionDryRunResources)
|
||||
r.Get("/{jobID}/logs", api.templateVersionDryRunLogs)
|
||||
r.Get("/{jobID}/matched-provisioners", api.templateVersionDryRunMatchedProvisioners)
|
||||
r.Patch("/{jobID}/cancel", api.patchTemplateVersionDryRunCancel)
|
||||
})
|
||||
})
|
||||
|
||||
@@ -673,3 +673,23 @@ func CryptoKey(key database.CryptoKey) codersdk.CryptoKey {
|
||||
Secret: key.Secret.String,
|
||||
}
|
||||
}
|
||||
|
||||
func MatchedProvisioners(provisionerDaemons []database.ProvisionerDaemon, now time.Time, staleInterval time.Duration) codersdk.MatchedProvisioners {
|
||||
minLastSeenAt := now.Add(-staleInterval)
|
||||
mostRecentlySeen := codersdk.NullTime{}
|
||||
var matched codersdk.MatchedProvisioners
|
||||
for _, provisioner := range provisionerDaemons {
|
||||
if !provisioner.LastSeenAt.Valid {
|
||||
continue
|
||||
}
|
||||
matched.Count++
|
||||
if provisioner.LastSeenAt.Time.After(minLastSeenAt) {
|
||||
matched.Available++
|
||||
}
|
||||
if provisioner.LastSeenAt.Time.After(mostRecentlySeen.Time) {
|
||||
matched.MostRecentlySeen.Valid = true
|
||||
matched.MostRecentlySeen.Time = provisioner.LastSeenAt.Time
|
||||
}
|
||||
}
|
||||
return matched
|
||||
}
|
||||
|
||||
@@ -299,7 +299,7 @@ var (
|
||||
rbac.ResourceSystem.Type: {policy.WildcardSymbol},
|
||||
rbac.ResourceOrganization.Type: {policy.ActionCreate, policy.ActionRead},
|
||||
rbac.ResourceOrganizationMember.Type: {policy.ActionCreate, policy.ActionDelete, policy.ActionRead},
|
||||
rbac.ResourceProvisionerDaemon.Type: {policy.ActionCreate, policy.ActionUpdate},
|
||||
rbac.ResourceProvisionerDaemon.Type: {policy.ActionCreate, policy.ActionRead, policy.ActionUpdate},
|
||||
rbac.ResourceProvisionerKeys.Type: {policy.ActionCreate, policy.ActionRead, policy.ActionDelete},
|
||||
rbac.ResourceUser.Type: rbac.ResourceUser.AvailableActions(),
|
||||
rbac.ResourceWorkspaceDormant.Type: {policy.ActionUpdate, policy.ActionDelete, policy.ActionWorkspaceStop},
|
||||
@@ -317,6 +317,23 @@ var (
|
||||
}),
|
||||
Scope: rbac.ScopeAll,
|
||||
}.WithCachedASTValue()
|
||||
|
||||
subjectSystemReadProvisionerDaemons = rbac.Subject{
|
||||
FriendlyName: "Provisioner Daemons Reader",
|
||||
ID: uuid.Nil.String(),
|
||||
Roles: rbac.Roles([]rbac.Role{
|
||||
{
|
||||
Identifier: rbac.RoleIdentifier{Name: "system-read-provisioner-daemons"},
|
||||
DisplayName: "Coder",
|
||||
Site: rbac.Permissions(map[string][]policy.Action{
|
||||
rbac.ResourceProvisionerDaemon.Type: {policy.ActionRead},
|
||||
}),
|
||||
Org: map[string][]rbac.Permission{},
|
||||
User: []rbac.Permission{},
|
||||
},
|
||||
}),
|
||||
Scope: rbac.ScopeAll,
|
||||
}.WithCachedASTValue()
|
||||
)
|
||||
|
||||
// AsProvisionerd returns a context with an actor that has permissions required
|
||||
@@ -359,6 +376,12 @@ func AsSystemRestricted(ctx context.Context) context.Context {
|
||||
return context.WithValue(ctx, authContextKey{}, subjectSystemRestricted)
|
||||
}
|
||||
|
||||
// AsSystemReadProvisionerDaemons returns a context with an actor that has permissions
|
||||
// to read provisioner daemons.
|
||||
func AsSystemReadProvisionerDaemons(ctx context.Context) context.Context {
|
||||
return context.WithValue(ctx, authContextKey{}, subjectSystemReadProvisionerDaemons)
|
||||
}
|
||||
|
||||
var AsRemoveActor = rbac.Subject{
|
||||
ID: "remove-actor",
|
||||
}
|
||||
@@ -1538,6 +1561,10 @@ func (q *querier) GetDeploymentWorkspaceStats(ctx context.Context) (database.Get
|
||||
return q.db.GetDeploymentWorkspaceStats(ctx)
|
||||
}
|
||||
|
||||
func (q *querier) GetEligibleProvisionerDaemonsByProvisionerJobIDs(ctx context.Context, provisionerJobIds []uuid.UUID) ([]database.GetEligibleProvisionerDaemonsByProvisionerJobIDsRow, error) {
|
||||
return fetchWithPostFilter(q.auth, policy.ActionRead, q.db.GetEligibleProvisionerDaemonsByProvisionerJobIDs)(ctx, provisionerJobIds)
|
||||
}
|
||||
|
||||
func (q *querier) GetExternalAuthLink(ctx context.Context, arg database.GetExternalAuthLinkParams) (database.ExternalAuthLink, error) {
|
||||
return fetchWithAction(q.log, q.auth, policy.ActionReadPersonal, q.db.GetExternalAuthLink)(ctx, arg)
|
||||
}
|
||||
@@ -3330,13 +3357,6 @@ func (q *querier) RegisterWorkspaceProxy(ctx context.Context, arg database.Regis
|
||||
return updateWithReturn(q.log, q.auth, fetch, q.db.RegisterWorkspaceProxy)(ctx, arg)
|
||||
}
|
||||
|
||||
func (q *querier) RemoveRefreshToken(ctx context.Context, arg database.RemoveRefreshTokenParams) error {
|
||||
fetch := func(ctx context.Context, arg database.RemoveRefreshTokenParams) (database.ExternalAuthLink, error) {
|
||||
return q.db.GetExternalAuthLink(ctx, database.GetExternalAuthLinkParams{UserID: arg.UserID, ProviderID: arg.ProviderID})
|
||||
}
|
||||
return fetchAndExec(q.log, q.auth, policy.ActionUpdatePersonal, fetch, q.db.RemoveRefreshToken)(ctx, arg)
|
||||
}
|
||||
|
||||
func (q *querier) RemoveUserFromAllGroups(ctx context.Context, userID uuid.UUID) error {
|
||||
// This is a system function to clear user groups in group sync.
|
||||
if err := q.authorizeContext(ctx, policy.ActionUpdate, rbac.ResourceSystem); err != nil {
|
||||
@@ -3435,6 +3455,13 @@ func (q *querier) UpdateExternalAuthLink(ctx context.Context, arg database.Updat
|
||||
return fetchAndQuery(q.log, q.auth, policy.ActionUpdatePersonal, fetch, q.db.UpdateExternalAuthLink)(ctx, arg)
|
||||
}
|
||||
|
||||
func (q *querier) UpdateExternalAuthLinkRefreshToken(ctx context.Context, arg database.UpdateExternalAuthLinkRefreshTokenParams) error {
|
||||
fetch := func(ctx context.Context, arg database.UpdateExternalAuthLinkRefreshTokenParams) (database.ExternalAuthLink, error) {
|
||||
return q.db.GetExternalAuthLink(ctx, database.GetExternalAuthLinkParams{UserID: arg.UserID, ProviderID: arg.ProviderID})
|
||||
}
|
||||
return fetchAndExec(q.log, q.auth, policy.ActionUpdatePersonal, fetch, q.db.UpdateExternalAuthLinkRefreshToken)(ctx, arg)
|
||||
}
|
||||
|
||||
func (q *querier) UpdateGitSSHKey(ctx context.Context, arg database.UpdateGitSSHKeyParams) (database.GitSSHKey, error) {
|
||||
fetch := func(ctx context.Context, arg database.UpdateGitSSHKeyParams) (database.GitSSHKey, error) {
|
||||
return q.db.GetGitSSHKey(ctx, arg.UserID)
|
||||
|
||||
@@ -1282,12 +1282,14 @@ func (s *MethodTestSuite) TestUser() {
|
||||
UserID: u.ID,
|
||||
}).Asserts(u, policy.ActionUpdatePersonal)
|
||||
}))
|
||||
s.Run("RemoveRefreshToken", s.Subtest(func(db database.Store, check *expects) {
|
||||
s.Run("UpdateExternalAuthLinkRefreshToken", s.Subtest(func(db database.Store, check *expects) {
|
||||
link := dbgen.ExternalAuthLink(s.T(), db, database.ExternalAuthLink{})
|
||||
check.Args(database.RemoveRefreshTokenParams{
|
||||
ProviderID: link.ProviderID,
|
||||
UserID: link.UserID,
|
||||
UpdatedAt: link.UpdatedAt,
|
||||
check.Args(database.UpdateExternalAuthLinkRefreshTokenParams{
|
||||
OAuthRefreshToken: "",
|
||||
OAuthRefreshTokenKeyID: "",
|
||||
ProviderID: link.ProviderID,
|
||||
UserID: link.UserID,
|
||||
UpdatedAt: link.UpdatedAt,
|
||||
}).Asserts(rbac.ResourceUserObject(link.UserID), policy.ActionUpdatePersonal)
|
||||
}))
|
||||
s.Run("UpdateExternalAuthLink", s.Subtest(func(db database.Store, check *expects) {
|
||||
@@ -2098,6 +2100,29 @@ func (s *MethodTestSuite) TestExtraMethods() {
|
||||
s.NoError(err, "get provisioner daemon by org")
|
||||
check.Args(database.GetProvisionerDaemonsByOrganizationParams{OrganizationID: org.ID}).Asserts(d, policy.ActionRead).Returns(ds)
|
||||
}))
|
||||
s.Run("GetEligibleProvisionerDaemonsByProvisionerJobIDs", s.Subtest(func(db database.Store, check *expects) {
|
||||
org := dbgen.Organization(s.T(), db, database.Organization{})
|
||||
tags := database.StringMap(map[string]string{
|
||||
provisionersdk.TagScope: provisionersdk.ScopeOrganization,
|
||||
})
|
||||
j, err := db.InsertProvisionerJob(context.Background(), database.InsertProvisionerJobParams{
|
||||
OrganizationID: org.ID,
|
||||
Type: database.ProvisionerJobTypeWorkspaceBuild,
|
||||
Tags: tags,
|
||||
Provisioner: database.ProvisionerTypeEcho,
|
||||
StorageMethod: database.ProvisionerStorageMethodFile,
|
||||
})
|
||||
s.NoError(err, "insert provisioner job")
|
||||
d, err := db.UpsertProvisionerDaemon(context.Background(), database.UpsertProvisionerDaemonParams{
|
||||
OrganizationID: org.ID,
|
||||
Tags: tags,
|
||||
Provisioners: []database.ProvisionerType{database.ProvisionerTypeEcho},
|
||||
})
|
||||
s.NoError(err, "insert provisioner daemon")
|
||||
ds, err := db.GetEligibleProvisionerDaemonsByProvisionerJobIDs(context.Background(), []uuid.UUID{j.ID})
|
||||
s.NoError(err, "get provisioner daemon by org")
|
||||
check.Args(uuid.UUIDs{j.ID}).Asserts(d, policy.ActionRead).Returns(ds)
|
||||
}))
|
||||
s.Run("DeleteOldProvisionerDaemons", s.Subtest(func(db database.Store, check *expects) {
|
||||
_, err := db.UpsertProvisionerDaemon(context.Background(), database.UpsertProvisionerDaemonParams{
|
||||
Tags: database.StringMap(map[string]string{
|
||||
|
||||
@@ -209,9 +209,17 @@ func WorkspaceAgentScript(t testing.TB, db database.Store, orig database.Workspa
|
||||
return scripts[0]
|
||||
}
|
||||
|
||||
func WorkspaceAgentScriptTimings(t testing.TB, db database.Store, script database.WorkspaceAgentScript, count int) []database.WorkspaceAgentScriptTiming {
|
||||
timings := make([]database.WorkspaceAgentScriptTiming, count)
|
||||
for i := range count {
|
||||
func WorkspaceAgentScripts(t testing.TB, db database.Store, count int, orig database.WorkspaceAgentScript) []database.WorkspaceAgentScript {
|
||||
scripts := make([]database.WorkspaceAgentScript, 0, count)
|
||||
for range count {
|
||||
scripts = append(scripts, WorkspaceAgentScript(t, db, orig))
|
||||
}
|
||||
return scripts
|
||||
}
|
||||
|
||||
func WorkspaceAgentScriptTimings(t testing.TB, db database.Store, scripts []database.WorkspaceAgentScript) []database.WorkspaceAgentScriptTiming {
|
||||
timings := make([]database.WorkspaceAgentScriptTiming, len(scripts))
|
||||
for i, script := range scripts {
|
||||
timings[i] = WorkspaceAgentScriptTiming(t, db, database.WorkspaceAgentScriptTiming{
|
||||
ScriptID: script.ID,
|
||||
})
|
||||
@@ -502,6 +510,46 @@ func GroupMember(t testing.TB, db database.Store, member database.GroupMemberTab
|
||||
return groupMember
|
||||
}
|
||||
|
||||
// ProvisionerDaemon creates a provisioner daemon as far as the database is concerned. It does not run a provisioner daemon.
|
||||
// If no key is provided, it will create one.
|
||||
func ProvisionerDaemon(t testing.TB, db database.Store, daemon database.ProvisionerDaemon) database.ProvisionerDaemon {
|
||||
t.Helper()
|
||||
|
||||
if daemon.KeyID == uuid.Nil {
|
||||
key, err := db.InsertProvisionerKey(genCtx, database.InsertProvisionerKeyParams{
|
||||
ID: uuid.New(),
|
||||
Name: daemon.Name + "-key",
|
||||
OrganizationID: daemon.OrganizationID,
|
||||
HashedSecret: []byte("secret"),
|
||||
CreatedAt: dbtime.Now(),
|
||||
Tags: daemon.Tags,
|
||||
})
|
||||
require.NoError(t, err)
|
||||
daemon.KeyID = key.ID
|
||||
}
|
||||
|
||||
if daemon.CreatedAt.IsZero() {
|
||||
daemon.CreatedAt = dbtime.Now()
|
||||
}
|
||||
if daemon.Name == "" {
|
||||
daemon.Name = "test-daemon"
|
||||
}
|
||||
|
||||
d, err := db.UpsertProvisionerDaemon(genCtx, database.UpsertProvisionerDaemonParams{
|
||||
Name: daemon.Name,
|
||||
OrganizationID: daemon.OrganizationID,
|
||||
CreatedAt: daemon.CreatedAt,
|
||||
Provisioners: daemon.Provisioners,
|
||||
Tags: daemon.Tags,
|
||||
KeyID: daemon.KeyID,
|
||||
LastSeenAt: daemon.LastSeenAt,
|
||||
Version: daemon.Version,
|
||||
APIVersion: daemon.APIVersion,
|
||||
})
|
||||
require.NoError(t, err)
|
||||
return d
|
||||
}
|
||||
|
||||
// ProvisionerJob is a bit more involved to get the values such as "completedAt", "startedAt", "cancelledAt" set. ps
|
||||
// can be set to nil if you are SURE that you don't require a provisionerdaemon to acquire the job in your test.
|
||||
func ProvisionerJob(t testing.TB, db database.Store, ps pubsub.Pubsub, orig database.ProvisionerJob) database.ProvisionerJob {
|
||||
@@ -788,16 +836,17 @@ func TemplateVersion(t testing.TB, db database.Store, orig database.TemplateVers
|
||||
err := db.InTx(func(db database.Store) error {
|
||||
versionID := takeFirst(orig.ID, uuid.New())
|
||||
err := db.InsertTemplateVersion(genCtx, database.InsertTemplateVersionParams{
|
||||
ID: versionID,
|
||||
TemplateID: takeFirst(orig.TemplateID, uuid.NullUUID{}),
|
||||
OrganizationID: takeFirst(orig.OrganizationID, uuid.New()),
|
||||
CreatedAt: takeFirst(orig.CreatedAt, dbtime.Now()),
|
||||
UpdatedAt: takeFirst(orig.UpdatedAt, dbtime.Now()),
|
||||
Name: takeFirst(orig.Name, testutil.GetRandomName(t)),
|
||||
Message: orig.Message,
|
||||
Readme: takeFirst(orig.Readme, testutil.GetRandomName(t)),
|
||||
JobID: takeFirst(orig.JobID, uuid.New()),
|
||||
CreatedBy: takeFirst(orig.CreatedBy, uuid.New()),
|
||||
ID: versionID,
|
||||
TemplateID: takeFirst(orig.TemplateID, uuid.NullUUID{}),
|
||||
OrganizationID: takeFirst(orig.OrganizationID, uuid.New()),
|
||||
CreatedAt: takeFirst(orig.CreatedAt, dbtime.Now()),
|
||||
UpdatedAt: takeFirst(orig.UpdatedAt, dbtime.Now()),
|
||||
Name: takeFirst(orig.Name, testutil.GetRandomName(t)),
|
||||
Message: orig.Message,
|
||||
Readme: takeFirst(orig.Readme, testutil.GetRandomName(t)),
|
||||
JobID: takeFirst(orig.JobID, uuid.New()),
|
||||
CreatedBy: takeFirst(orig.CreatedBy, uuid.New()),
|
||||
SourceExampleID: takeFirst(orig.SourceExampleID, sql.NullString{}),
|
||||
})
|
||||
if err != nil {
|
||||
return err
|
||||
|
||||
+120
-45
@@ -1119,6 +1119,14 @@ func (q *FakeQuerier) getWorkspaceAgentScriptsByAgentIDsNoLock(ids []uuid.UUID)
|
||||
return scripts, nil
|
||||
}
|
||||
|
||||
// getOwnerFromTags returns the lowercase owner from tags, matching SQL's COALESCE(tags ->> 'owner', ”)
|
||||
func getOwnerFromTags(tags map[string]string) string {
|
||||
if owner, ok := tags["owner"]; ok {
|
||||
return strings.ToLower(owner)
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (*FakeQuerier) AcquireLock(_ context.Context, _ int64) error {
|
||||
return xerrors.New("AcquireLock must only be called within a transaction")
|
||||
}
|
||||
@@ -2743,6 +2751,63 @@ func (q *FakeQuerier) GetDeploymentWorkspaceStats(ctx context.Context) (database
|
||||
return stat, nil
|
||||
}
|
||||
|
||||
func (q *FakeQuerier) GetEligibleProvisionerDaemonsByProvisionerJobIDs(_ context.Context, provisionerJobIds []uuid.UUID) ([]database.GetEligibleProvisionerDaemonsByProvisionerJobIDsRow, error) {
|
||||
q.mutex.RLock()
|
||||
defer q.mutex.RUnlock()
|
||||
|
||||
results := make([]database.GetEligibleProvisionerDaemonsByProvisionerJobIDsRow, 0)
|
||||
seen := make(map[string]struct{}) // Track unique combinations
|
||||
|
||||
for _, jobID := range provisionerJobIds {
|
||||
var job database.ProvisionerJob
|
||||
found := false
|
||||
for _, j := range q.provisionerJobs {
|
||||
if j.ID == jobID {
|
||||
job = j
|
||||
found = true
|
||||
break
|
||||
}
|
||||
}
|
||||
if !found {
|
||||
continue
|
||||
}
|
||||
|
||||
for _, daemon := range q.provisionerDaemons {
|
||||
if daemon.OrganizationID != job.OrganizationID {
|
||||
continue
|
||||
}
|
||||
|
||||
if !tagsSubset(job.Tags, daemon.Tags) {
|
||||
continue
|
||||
}
|
||||
|
||||
provisionerMatches := false
|
||||
for _, p := range daemon.Provisioners {
|
||||
if p == job.Provisioner {
|
||||
provisionerMatches = true
|
||||
break
|
||||
}
|
||||
}
|
||||
if !provisionerMatches {
|
||||
continue
|
||||
}
|
||||
|
||||
key := jobID.String() + "-" + daemon.ID.String()
|
||||
if _, exists := seen[key]; exists {
|
||||
continue
|
||||
}
|
||||
seen[key] = struct{}{}
|
||||
|
||||
results = append(results, database.GetEligibleProvisionerDaemonsByProvisionerJobIDsRow{
|
||||
JobID: jobID,
|
||||
ProvisionerDaemon: daemon,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
return results, nil
|
||||
}
|
||||
|
||||
func (q *FakeQuerier) GetExternalAuthLink(_ context.Context, arg database.GetExternalAuthLinkParams) (database.ExternalAuthLink, error) {
|
||||
if err := validateDatabaseType(arg); err != nil {
|
||||
return database.ExternalAuthLink{}, err
|
||||
@@ -5953,6 +6018,15 @@ func (q *FakeQuerier) GetWorkspaceAgentScriptTimingsByBuildID(ctx context.Contex
|
||||
WorkspaceAgentName: agent.Name,
|
||||
})
|
||||
}
|
||||
|
||||
// We want to only return the first script run for each Script ID.
|
||||
slices.SortFunc(rows, func(a, b database.GetWorkspaceAgentScriptTimingsByBuildIDRow) int {
|
||||
return a.StartedAt.Compare(b.StartedAt)
|
||||
})
|
||||
rows = slices.CompactFunc(rows, func(e1, e2 database.GetWorkspaceAgentScriptTimingsByBuildIDRow) bool {
|
||||
return e1.ScriptID == e2.ScriptID
|
||||
})
|
||||
|
||||
return rows, nil
|
||||
}
|
||||
|
||||
@@ -7699,16 +7773,17 @@ func (q *FakeQuerier) InsertTemplateVersion(_ context.Context, arg database.Inse
|
||||
|
||||
//nolint:gosimple
|
||||
version := database.TemplateVersionTable{
|
||||
ID: arg.ID,
|
||||
TemplateID: arg.TemplateID,
|
||||
OrganizationID: arg.OrganizationID,
|
||||
CreatedAt: arg.CreatedAt,
|
||||
UpdatedAt: arg.UpdatedAt,
|
||||
Name: arg.Name,
|
||||
Message: arg.Message,
|
||||
Readme: arg.Readme,
|
||||
JobID: arg.JobID,
|
||||
CreatedBy: arg.CreatedBy,
|
||||
ID: arg.ID,
|
||||
TemplateID: arg.TemplateID,
|
||||
OrganizationID: arg.OrganizationID,
|
||||
CreatedAt: arg.CreatedAt,
|
||||
UpdatedAt: arg.UpdatedAt,
|
||||
Name: arg.Name,
|
||||
Message: arg.Message,
|
||||
Readme: arg.Readme,
|
||||
JobID: arg.JobID,
|
||||
CreatedBy: arg.CreatedBy,
|
||||
SourceExampleID: arg.SourceExampleID,
|
||||
}
|
||||
q.templateVersions = append(q.templateVersions, version)
|
||||
return nil
|
||||
@@ -8555,29 +8630,6 @@ func (q *FakeQuerier) RegisterWorkspaceProxy(_ context.Context, arg database.Reg
|
||||
return database.WorkspaceProxy{}, sql.ErrNoRows
|
||||
}
|
||||
|
||||
func (q *FakeQuerier) RemoveRefreshToken(_ context.Context, arg database.RemoveRefreshTokenParams) error {
|
||||
if err := validateDatabaseType(arg); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
q.mutex.Lock()
|
||||
defer q.mutex.Unlock()
|
||||
for index, gitAuthLink := range q.externalAuthLinks {
|
||||
if gitAuthLink.ProviderID != arg.ProviderID {
|
||||
continue
|
||||
}
|
||||
if gitAuthLink.UserID != arg.UserID {
|
||||
continue
|
||||
}
|
||||
gitAuthLink.UpdatedAt = arg.UpdatedAt
|
||||
gitAuthLink.OAuthRefreshToken = ""
|
||||
q.externalAuthLinks[index] = gitAuthLink
|
||||
|
||||
return nil
|
||||
}
|
||||
return sql.ErrNoRows
|
||||
}
|
||||
|
||||
func (q *FakeQuerier) RemoveUserFromAllGroups(_ context.Context, userID uuid.UUID) error {
|
||||
q.mutex.Lock()
|
||||
defer q.mutex.Unlock()
|
||||
@@ -8797,6 +8849,29 @@ func (q *FakeQuerier) UpdateExternalAuthLink(_ context.Context, arg database.Upd
|
||||
return database.ExternalAuthLink{}, sql.ErrNoRows
|
||||
}
|
||||
|
||||
func (q *FakeQuerier) UpdateExternalAuthLinkRefreshToken(_ context.Context, arg database.UpdateExternalAuthLinkRefreshTokenParams) error {
|
||||
if err := validateDatabaseType(arg); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
q.mutex.Lock()
|
||||
defer q.mutex.Unlock()
|
||||
for index, gitAuthLink := range q.externalAuthLinks {
|
||||
if gitAuthLink.ProviderID != arg.ProviderID {
|
||||
continue
|
||||
}
|
||||
if gitAuthLink.UserID != arg.UserID {
|
||||
continue
|
||||
}
|
||||
gitAuthLink.UpdatedAt = arg.UpdatedAt
|
||||
gitAuthLink.OAuthRefreshToken = arg.OAuthRefreshToken
|
||||
q.externalAuthLinks[index] = gitAuthLink
|
||||
|
||||
return nil
|
||||
}
|
||||
return sql.ErrNoRows
|
||||
}
|
||||
|
||||
func (q *FakeQuerier) UpdateGitSSHKey(_ context.Context, arg database.UpdateGitSSHKeyParams) (database.GitSSHKey, error) {
|
||||
if err := validateDatabaseType(arg); err != nil {
|
||||
return database.GitSSHKey{}, err
|
||||
@@ -10248,25 +10323,26 @@ func (q *FakeQuerier) UpsertOAuthSigningKey(_ context.Context, value string) err
|
||||
}
|
||||
|
||||
func (q *FakeQuerier) UpsertProvisionerDaemon(_ context.Context, arg database.UpsertProvisionerDaemonParams) (database.ProvisionerDaemon, error) {
|
||||
err := validateDatabaseType(arg)
|
||||
if err != nil {
|
||||
if err := validateDatabaseType(arg); err != nil {
|
||||
return database.ProvisionerDaemon{}, err
|
||||
}
|
||||
|
||||
q.mutex.Lock()
|
||||
defer q.mutex.Unlock()
|
||||
for _, d := range q.provisionerDaemons {
|
||||
if d.Name == arg.Name {
|
||||
if d.Tags[provisionersdk.TagScope] == provisionersdk.ScopeOrganization && arg.Tags[provisionersdk.TagOwner] != "" {
|
||||
continue
|
||||
}
|
||||
if d.Tags[provisionersdk.TagScope] == provisionersdk.ScopeUser && arg.Tags[provisionersdk.TagOwner] != d.Tags[provisionersdk.TagOwner] {
|
||||
continue
|
||||
}
|
||||
|
||||
// Look for existing daemon using the same composite key as SQL
|
||||
for i, d := range q.provisionerDaemons {
|
||||
if d.OrganizationID == arg.OrganizationID &&
|
||||
d.Name == arg.Name &&
|
||||
getOwnerFromTags(d.Tags) == getOwnerFromTags(arg.Tags) {
|
||||
d.Provisioners = arg.Provisioners
|
||||
d.Tags = maps.Clone(arg.Tags)
|
||||
d.Version = arg.Version
|
||||
d.LastSeenAt = arg.LastSeenAt
|
||||
d.Version = arg.Version
|
||||
d.APIVersion = arg.APIVersion
|
||||
d.OrganizationID = arg.OrganizationID
|
||||
d.KeyID = arg.KeyID
|
||||
q.provisionerDaemons[i] = d
|
||||
return d, nil
|
||||
}
|
||||
}
|
||||
@@ -10276,7 +10352,6 @@ func (q *FakeQuerier) UpsertProvisionerDaemon(_ context.Context, arg database.Up
|
||||
Name: arg.Name,
|
||||
Provisioners: arg.Provisioners,
|
||||
Tags: maps.Clone(arg.Tags),
|
||||
ReplicaID: uuid.NullUUID{},
|
||||
LastSeenAt: arg.LastSeenAt,
|
||||
Version: arg.Version,
|
||||
APIVersion: arg.APIVersion,
|
||||
|
||||
@@ -630,6 +630,13 @@ func (m queryMetricsStore) GetDeploymentWorkspaceStats(ctx context.Context) (dat
|
||||
return row, err
|
||||
}
|
||||
|
||||
func (m queryMetricsStore) GetEligibleProvisionerDaemonsByProvisionerJobIDs(ctx context.Context, provisionerJobIds []uuid.UUID) ([]database.GetEligibleProvisionerDaemonsByProvisionerJobIDsRow, error) {
|
||||
start := time.Now()
|
||||
r0, r1 := m.s.GetEligibleProvisionerDaemonsByProvisionerJobIDs(ctx, provisionerJobIds)
|
||||
m.queryLatencies.WithLabelValues("GetEligibleProvisionerDaemonsByProvisionerJobIDs").Observe(time.Since(start).Seconds())
|
||||
return r0, r1
|
||||
}
|
||||
|
||||
func (m queryMetricsStore) GetExternalAuthLink(ctx context.Context, arg database.GetExternalAuthLinkParams) (database.ExternalAuthLink, error) {
|
||||
start := time.Now()
|
||||
link, err := m.s.GetExternalAuthLink(ctx, arg)
|
||||
@@ -2093,13 +2100,6 @@ func (m queryMetricsStore) RegisterWorkspaceProxy(ctx context.Context, arg datab
|
||||
return proxy, err
|
||||
}
|
||||
|
||||
func (m queryMetricsStore) RemoveRefreshToken(ctx context.Context, arg database.RemoveRefreshTokenParams) error {
|
||||
start := time.Now()
|
||||
r0 := m.s.RemoveRefreshToken(ctx, arg)
|
||||
m.queryLatencies.WithLabelValues("RemoveRefreshToken").Observe(time.Since(start).Seconds())
|
||||
return r0
|
||||
}
|
||||
|
||||
func (m queryMetricsStore) RemoveUserFromAllGroups(ctx context.Context, userID uuid.UUID) error {
|
||||
start := time.Now()
|
||||
r0 := m.s.RemoveUserFromAllGroups(ctx, userID)
|
||||
@@ -2170,6 +2170,13 @@ func (m queryMetricsStore) UpdateExternalAuthLink(ctx context.Context, arg datab
|
||||
return link, err
|
||||
}
|
||||
|
||||
func (m queryMetricsStore) UpdateExternalAuthLinkRefreshToken(ctx context.Context, arg database.UpdateExternalAuthLinkRefreshTokenParams) error {
|
||||
start := time.Now()
|
||||
r0 := m.s.UpdateExternalAuthLinkRefreshToken(ctx, arg)
|
||||
m.queryLatencies.WithLabelValues("UpdateExternalAuthLinkRefreshToken").Observe(time.Since(start).Seconds())
|
||||
return r0
|
||||
}
|
||||
|
||||
func (m queryMetricsStore) UpdateGitSSHKey(ctx context.Context, arg database.UpdateGitSSHKeyParams) (database.GitSSHKey, error) {
|
||||
start := time.Now()
|
||||
key, err := m.s.UpdateGitSSHKey(ctx, arg)
|
||||
|
||||
@@ -1267,6 +1267,21 @@ func (mr *MockStoreMockRecorder) GetDeploymentWorkspaceStats(arg0 any) *gomock.C
|
||||
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetDeploymentWorkspaceStats", reflect.TypeOf((*MockStore)(nil).GetDeploymentWorkspaceStats), arg0)
|
||||
}
|
||||
|
||||
// GetEligibleProvisionerDaemonsByProvisionerJobIDs mocks base method.
|
||||
func (m *MockStore) GetEligibleProvisionerDaemonsByProvisionerJobIDs(arg0 context.Context, arg1 []uuid.UUID) ([]database.GetEligibleProvisionerDaemonsByProvisionerJobIDsRow, error) {
|
||||
m.ctrl.T.Helper()
|
||||
ret := m.ctrl.Call(m, "GetEligibleProvisionerDaemonsByProvisionerJobIDs", arg0, arg1)
|
||||
ret0, _ := ret[0].([]database.GetEligibleProvisionerDaemonsByProvisionerJobIDsRow)
|
||||
ret1, _ := ret[1].(error)
|
||||
return ret0, ret1
|
||||
}
|
||||
|
||||
// GetEligibleProvisionerDaemonsByProvisionerJobIDs indicates an expected call of GetEligibleProvisionerDaemonsByProvisionerJobIDs.
|
||||
func (mr *MockStoreMockRecorder) GetEligibleProvisionerDaemonsByProvisionerJobIDs(arg0, arg1 any) *gomock.Call {
|
||||
mr.mock.ctrl.T.Helper()
|
||||
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetEligibleProvisionerDaemonsByProvisionerJobIDs", reflect.TypeOf((*MockStore)(nil).GetEligibleProvisionerDaemonsByProvisionerJobIDs), arg0, arg1)
|
||||
}
|
||||
|
||||
// GetExternalAuthLink mocks base method.
|
||||
func (m *MockStore) GetExternalAuthLink(arg0 context.Context, arg1 database.GetExternalAuthLinkParams) (database.ExternalAuthLink, error) {
|
||||
m.ctrl.T.Helper()
|
||||
@@ -4463,20 +4478,6 @@ func (mr *MockStoreMockRecorder) RegisterWorkspaceProxy(arg0, arg1 any) *gomock.
|
||||
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "RegisterWorkspaceProxy", reflect.TypeOf((*MockStore)(nil).RegisterWorkspaceProxy), arg0, arg1)
|
||||
}
|
||||
|
||||
// RemoveRefreshToken mocks base method.
|
||||
func (m *MockStore) RemoveRefreshToken(arg0 context.Context, arg1 database.RemoveRefreshTokenParams) error {
|
||||
m.ctrl.T.Helper()
|
||||
ret := m.ctrl.Call(m, "RemoveRefreshToken", arg0, arg1)
|
||||
ret0, _ := ret[0].(error)
|
||||
return ret0
|
||||
}
|
||||
|
||||
// RemoveRefreshToken indicates an expected call of RemoveRefreshToken.
|
||||
func (mr *MockStoreMockRecorder) RemoveRefreshToken(arg0, arg1 any) *gomock.Call {
|
||||
mr.mock.ctrl.T.Helper()
|
||||
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "RemoveRefreshToken", reflect.TypeOf((*MockStore)(nil).RemoveRefreshToken), arg0, arg1)
|
||||
}
|
||||
|
||||
// RemoveUserFromAllGroups mocks base method.
|
||||
func (m *MockStore) RemoveUserFromAllGroups(arg0 context.Context, arg1 uuid.UUID) error {
|
||||
m.ctrl.T.Helper()
|
||||
@@ -4622,6 +4623,20 @@ func (mr *MockStoreMockRecorder) UpdateExternalAuthLink(arg0, arg1 any) *gomock.
|
||||
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "UpdateExternalAuthLink", reflect.TypeOf((*MockStore)(nil).UpdateExternalAuthLink), arg0, arg1)
|
||||
}
|
||||
|
||||
// UpdateExternalAuthLinkRefreshToken mocks base method.
|
||||
func (m *MockStore) UpdateExternalAuthLinkRefreshToken(arg0 context.Context, arg1 database.UpdateExternalAuthLinkRefreshTokenParams) error {
|
||||
m.ctrl.T.Helper()
|
||||
ret := m.ctrl.Call(m, "UpdateExternalAuthLinkRefreshToken", arg0, arg1)
|
||||
ret0, _ := ret[0].(error)
|
||||
return ret0
|
||||
}
|
||||
|
||||
// UpdateExternalAuthLinkRefreshToken indicates an expected call of UpdateExternalAuthLinkRefreshToken.
|
||||
func (mr *MockStoreMockRecorder) UpdateExternalAuthLinkRefreshToken(arg0, arg1 any) *gomock.Call {
|
||||
mr.mock.ctrl.T.Helper()
|
||||
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "UpdateExternalAuthLinkRefreshToken", reflect.TypeOf((*MockStore)(nil).UpdateExternalAuthLinkRefreshToken), arg0, arg1)
|
||||
}
|
||||
|
||||
// UpdateGitSSHKey mocks base method.
|
||||
func (m *MockStore) UpdateGitSSHKey(arg0 context.Context, arg1 database.UpdateGitSSHKeyParams) (database.GitSSHKey, error) {
|
||||
m.ctrl.T.Helper()
|
||||
|
||||
Generated
+3
-1
@@ -1217,7 +1217,8 @@ CREATE TABLE template_versions (
|
||||
created_by uuid NOT NULL,
|
||||
external_auth_providers jsonb DEFAULT '[]'::jsonb NOT NULL,
|
||||
message character varying(1048576) DEFAULT ''::character varying NOT NULL,
|
||||
archived boolean DEFAULT false NOT NULL
|
||||
archived boolean DEFAULT false NOT NULL,
|
||||
source_example_id text
|
||||
);
|
||||
|
||||
COMMENT ON COLUMN template_versions.external_auth_providers IS 'IDs of External auth providers for a specific template version';
|
||||
@@ -1245,6 +1246,7 @@ CREATE VIEW template_version_with_user AS
|
||||
template_versions.external_auth_providers,
|
||||
template_versions.message,
|
||||
template_versions.archived,
|
||||
template_versions.source_example_id,
|
||||
COALESCE(visible_users.avatar_url, ''::text) AS created_by_avatar_url,
|
||||
COALESCE(visible_users.username, ''::text) AS created_by_username
|
||||
FROM (template_versions
|
||||
|
||||
@@ -0,0 +1,28 @@
|
||||
-- We cannot alter the column type while a view depends on it, so we drop it and recreate it.
|
||||
DROP VIEW template_version_with_user;
|
||||
|
||||
ALTER TABLE
|
||||
template_versions
|
||||
DROP COLUMN source_example_id;
|
||||
|
||||
-- Recreate `template_version_with_user` as described in dump.sql
|
||||
CREATE VIEW template_version_with_user AS
|
||||
SELECT
|
||||
template_versions.id,
|
||||
template_versions.template_id,
|
||||
template_versions.organization_id,
|
||||
template_versions.created_at,
|
||||
template_versions.updated_at,
|
||||
template_versions.name,
|
||||
template_versions.readme,
|
||||
template_versions.job_id,
|
||||
template_versions.created_by,
|
||||
template_versions.external_auth_providers,
|
||||
template_versions.message,
|
||||
template_versions.archived,
|
||||
COALESCE(visible_users.avatar_url, ''::text) AS created_by_avatar_url,
|
||||
COALESCE(visible_users.username, ''::text) AS created_by_username
|
||||
FROM (template_versions
|
||||
LEFT JOIN visible_users ON (template_versions.created_by = visible_users.id));
|
||||
|
||||
COMMENT ON VIEW template_version_with_user IS 'Joins in the username + avatar url of the created by user.';
|
||||
@@ -0,0 +1,30 @@
|
||||
-- We cannot alter the column type while a view depends on it, so we drop it and recreate it.
|
||||
DROP VIEW template_version_with_user;
|
||||
|
||||
ALTER TABLE
|
||||
template_versions
|
||||
ADD
|
||||
COLUMN source_example_id TEXT;
|
||||
|
||||
-- Recreate `template_version_with_user` as described in dump.sql
|
||||
CREATE VIEW template_version_with_user AS
|
||||
SELECT
|
||||
template_versions.id,
|
||||
template_versions.template_id,
|
||||
template_versions.organization_id,
|
||||
template_versions.created_at,
|
||||
template_versions.updated_at,
|
||||
template_versions.name,
|
||||
template_versions.readme,
|
||||
template_versions.job_id,
|
||||
template_versions.created_by,
|
||||
template_versions.external_auth_providers,
|
||||
template_versions.message,
|
||||
template_versions.archived,
|
||||
template_versions.source_example_id,
|
||||
COALESCE(visible_users.avatar_url, ''::text) AS created_by_avatar_url,
|
||||
COALESCE(visible_users.username, ''::text) AS created_by_username
|
||||
FROM (template_versions
|
||||
LEFT JOIN visible_users ON (template_versions.created_by = visible_users.id));
|
||||
|
||||
COMMENT ON VIEW template_version_with_user IS 'Joins in the username + avatar url of the created by user.';
|
||||
@@ -268,6 +268,10 @@ func (p ProvisionerDaemon) RBACObject() rbac.Object {
|
||||
InOrg(p.OrganizationID)
|
||||
}
|
||||
|
||||
func (p GetEligibleProvisionerDaemonsByProvisionerJobIDsRow) RBACObject() rbac.Object {
|
||||
return p.ProvisionerDaemon.RBACObject()
|
||||
}
|
||||
|
||||
func (p ProvisionerKey) RBACObject() rbac.Object {
|
||||
return rbac.ResourceProvisionerKeys.
|
||||
WithID(p.ID).
|
||||
|
||||
@@ -2773,6 +2773,7 @@ type TemplateVersion struct {
|
||||
ExternalAuthProviders json.RawMessage `db:"external_auth_providers" json:"external_auth_providers"`
|
||||
Message string `db:"message" json:"message"`
|
||||
Archived bool `db:"archived" json:"archived"`
|
||||
SourceExampleID sql.NullString `db:"source_example_id" json:"source_example_id"`
|
||||
CreatedByAvatarURL string `db:"created_by_avatar_url" json:"created_by_avatar_url"`
|
||||
CreatedByUsername string `db:"created_by_username" json:"created_by_username"`
|
||||
}
|
||||
@@ -2826,8 +2827,9 @@ type TemplateVersionTable struct {
|
||||
// IDs of External auth providers for a specific template version
|
||||
ExternalAuthProviders json.RawMessage `db:"external_auth_providers" json:"external_auth_providers"`
|
||||
// Message describing the changes in this version of the template, similar to a Git commit message. Like a commit message, this should be a short, high-level description of the changes in this version of the template. This message is immutable and should not be updated after the fact.
|
||||
Message string `db:"message" json:"message"`
|
||||
Archived bool `db:"archived" json:"archived"`
|
||||
Message string `db:"message" json:"message"`
|
||||
Archived bool `db:"archived" json:"archived"`
|
||||
SourceExampleID sql.NullString `db:"source_example_id" json:"source_example_id"`
|
||||
}
|
||||
|
||||
type TemplateVersionVariable struct {
|
||||
|
||||
@@ -144,6 +144,7 @@ type sqlcQuerier interface {
|
||||
GetDeploymentWorkspaceAgentStats(ctx context.Context, createdAt time.Time) (GetDeploymentWorkspaceAgentStatsRow, error)
|
||||
GetDeploymentWorkspaceAgentUsageStats(ctx context.Context, createdAt time.Time) (GetDeploymentWorkspaceAgentUsageStatsRow, error)
|
||||
GetDeploymentWorkspaceStats(ctx context.Context) (GetDeploymentWorkspaceStatsRow, error)
|
||||
GetEligibleProvisionerDaemonsByProvisionerJobIDs(ctx context.Context, provisionerJobIds []uuid.UUID) ([]GetEligibleProvisionerDaemonsByProvisionerJobIDsRow, error)
|
||||
GetExternalAuthLink(ctx context.Context, arg GetExternalAuthLinkParams) (ExternalAuthLink, error)
|
||||
GetExternalAuthLinksByUserID(ctx context.Context, userID uuid.UUID) ([]ExternalAuthLink, error)
|
||||
GetFailedWorkspaceBuildsByTemplateID(ctx context.Context, arg GetFailedWorkspaceBuildsByTemplateIDParams) ([]GetFailedWorkspaceBuildsByTemplateIDRow, error)
|
||||
@@ -424,10 +425,6 @@ type sqlcQuerier interface {
|
||||
OrganizationMembers(ctx context.Context, arg OrganizationMembersParams) ([]OrganizationMembersRow, error)
|
||||
ReduceWorkspaceAgentShareLevelToAuthenticatedByTemplate(ctx context.Context, templateID uuid.UUID) error
|
||||
RegisterWorkspaceProxy(ctx context.Context, arg RegisterWorkspaceProxyParams) (WorkspaceProxy, error)
|
||||
// Removing the refresh token disables the refresh behavior for a given
|
||||
// auth token. If a refresh token is marked invalid, it is better to remove it
|
||||
// then continually attempt to refresh the token.
|
||||
RemoveRefreshToken(ctx context.Context, arg RemoveRefreshTokenParams) error
|
||||
RemoveUserFromAllGroups(ctx context.Context, userID uuid.UUID) error
|
||||
RemoveUserFromGroups(ctx context.Context, arg RemoveUserFromGroupsParams) ([]uuid.UUID, error)
|
||||
RevokeDBCryptKey(ctx context.Context, activeKeyDigest string) error
|
||||
@@ -443,6 +440,7 @@ type sqlcQuerier interface {
|
||||
UpdateCryptoKeyDeletesAt(ctx context.Context, arg UpdateCryptoKeyDeletesAtParams) (CryptoKey, error)
|
||||
UpdateCustomRole(ctx context.Context, arg UpdateCustomRoleParams) (CustomRole, error)
|
||||
UpdateExternalAuthLink(ctx context.Context, arg UpdateExternalAuthLinkParams) (ExternalAuthLink, error)
|
||||
UpdateExternalAuthLinkRefreshToken(ctx context.Context, arg UpdateExternalAuthLinkRefreshTokenParams) error
|
||||
UpdateGitSSHKey(ctx context.Context, arg UpdateGitSSHKeyParams) (GitSSHKey, error)
|
||||
UpdateGroupByID(ctx context.Context, arg UpdateGroupByIDParams) (Group, error)
|
||||
UpdateInactiveUsersToDormant(ctx context.Context, arg UpdateInactiveUsersToDormantParams) ([]UpdateInactiveUsersToDormantRow, error)
|
||||
|
||||
@@ -27,6 +27,7 @@ import (
|
||||
"github.com/coder/coder/v2/coderd/httpmw"
|
||||
"github.com/coder/coder/v2/coderd/rbac"
|
||||
"github.com/coder/coder/v2/coderd/rbac/policy"
|
||||
"github.com/coder/coder/v2/provisionersdk"
|
||||
"github.com/coder/coder/v2/testutil"
|
||||
)
|
||||
|
||||
@@ -211,6 +212,145 @@ func TestGetDeploymentWorkspaceAgentUsageStats(t *testing.T) {
|
||||
})
|
||||
}
|
||||
|
||||
func TestGetEligibleProvisionerDaemonsByProvisionerJobIDs(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
t.Run("NoJobsReturnsEmpty", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
db, _ := dbtestutil.NewDB(t)
|
||||
daemons, err := db.GetEligibleProvisionerDaemonsByProvisionerJobIDs(context.Background(), []uuid.UUID{})
|
||||
require.NoError(t, err)
|
||||
require.Empty(t, daemons)
|
||||
})
|
||||
|
||||
t.Run("MatchesProvisionerType", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
db, _ := dbtestutil.NewDB(t)
|
||||
org := dbgen.Organization(t, db, database.Organization{})
|
||||
|
||||
job := dbgen.ProvisionerJob(t, db, nil, database.ProvisionerJob{
|
||||
OrganizationID: org.ID,
|
||||
Type: database.ProvisionerJobTypeWorkspaceBuild,
|
||||
Provisioner: database.ProvisionerTypeEcho,
|
||||
Tags: database.StringMap{
|
||||
provisionersdk.TagScope: provisionersdk.ScopeOrganization,
|
||||
},
|
||||
})
|
||||
|
||||
matchingDaemon := dbgen.ProvisionerDaemon(t, db, database.ProvisionerDaemon{
|
||||
Name: "matching-daemon",
|
||||
OrganizationID: org.ID,
|
||||
Provisioners: []database.ProvisionerType{database.ProvisionerTypeEcho},
|
||||
Tags: database.StringMap{
|
||||
provisionersdk.TagScope: provisionersdk.ScopeOrganization,
|
||||
},
|
||||
})
|
||||
|
||||
dbgen.ProvisionerDaemon(t, db, database.ProvisionerDaemon{
|
||||
Name: "non-matching-daemon",
|
||||
OrganizationID: org.ID,
|
||||
Provisioners: []database.ProvisionerType{database.ProvisionerTypeTerraform},
|
||||
Tags: database.StringMap{
|
||||
provisionersdk.TagScope: provisionersdk.ScopeOrganization,
|
||||
},
|
||||
})
|
||||
|
||||
daemons, err := db.GetEligibleProvisionerDaemonsByProvisionerJobIDs(context.Background(), []uuid.UUID{job.ID})
|
||||
require.NoError(t, err)
|
||||
require.Len(t, daemons, 1)
|
||||
require.Equal(t, matchingDaemon.ID, daemons[0].ProvisionerDaemon.ID)
|
||||
})
|
||||
|
||||
t.Run("MatchesOrganizationScope", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
db, _ := dbtestutil.NewDB(t)
|
||||
org := dbgen.Organization(t, db, database.Organization{})
|
||||
|
||||
job := dbgen.ProvisionerJob(t, db, nil, database.ProvisionerJob{
|
||||
OrganizationID: org.ID,
|
||||
Type: database.ProvisionerJobTypeWorkspaceBuild,
|
||||
Provisioner: database.ProvisionerTypeEcho,
|
||||
Tags: database.StringMap{
|
||||
provisionersdk.TagScope: provisionersdk.ScopeOrganization,
|
||||
provisionersdk.TagOwner: "",
|
||||
},
|
||||
})
|
||||
|
||||
orgDaemon := dbgen.ProvisionerDaemon(t, db, database.ProvisionerDaemon{
|
||||
Name: "org-daemon",
|
||||
OrganizationID: org.ID,
|
||||
Provisioners: []database.ProvisionerType{database.ProvisionerTypeEcho},
|
||||
Tags: database.StringMap{
|
||||
provisionersdk.TagScope: provisionersdk.ScopeOrganization,
|
||||
provisionersdk.TagOwner: "",
|
||||
},
|
||||
})
|
||||
|
||||
dbgen.ProvisionerDaemon(t, db, database.ProvisionerDaemon{
|
||||
Name: "user-daemon",
|
||||
OrganizationID: org.ID,
|
||||
Provisioners: []database.ProvisionerType{database.ProvisionerTypeEcho},
|
||||
Tags: database.StringMap{
|
||||
provisionersdk.TagScope: provisionersdk.ScopeUser,
|
||||
},
|
||||
})
|
||||
|
||||
daemons, err := db.GetEligibleProvisionerDaemonsByProvisionerJobIDs(context.Background(), []uuid.UUID{job.ID})
|
||||
require.NoError(t, err)
|
||||
require.Len(t, daemons, 1)
|
||||
require.Equal(t, orgDaemon.ID, daemons[0].ProvisionerDaemon.ID)
|
||||
})
|
||||
|
||||
t.Run("MatchesMultipleProvisioners", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
db, _ := dbtestutil.NewDB(t)
|
||||
org := dbgen.Organization(t, db, database.Organization{})
|
||||
|
||||
job := dbgen.ProvisionerJob(t, db, nil, database.ProvisionerJob{
|
||||
OrganizationID: org.ID,
|
||||
Type: database.ProvisionerJobTypeWorkspaceBuild,
|
||||
Provisioner: database.ProvisionerTypeEcho,
|
||||
Tags: database.StringMap{
|
||||
provisionersdk.TagScope: provisionersdk.ScopeOrganization,
|
||||
},
|
||||
})
|
||||
|
||||
daemon1 := dbgen.ProvisionerDaemon(t, db, database.ProvisionerDaemon{
|
||||
Name: "daemon-1",
|
||||
OrganizationID: org.ID,
|
||||
Provisioners: []database.ProvisionerType{database.ProvisionerTypeEcho},
|
||||
Tags: database.StringMap{
|
||||
provisionersdk.TagScope: provisionersdk.ScopeOrganization,
|
||||
},
|
||||
})
|
||||
|
||||
daemon2 := dbgen.ProvisionerDaemon(t, db, database.ProvisionerDaemon{
|
||||
Name: "daemon-2",
|
||||
OrganizationID: org.ID,
|
||||
Provisioners: []database.ProvisionerType{database.ProvisionerTypeEcho},
|
||||
Tags: database.StringMap{
|
||||
provisionersdk.TagScope: provisionersdk.ScopeOrganization,
|
||||
},
|
||||
})
|
||||
|
||||
dbgen.ProvisionerDaemon(t, db, database.ProvisionerDaemon{
|
||||
Name: "daemon-3",
|
||||
OrganizationID: org.ID,
|
||||
Provisioners: []database.ProvisionerType{database.ProvisionerTypeTerraform},
|
||||
Tags: database.StringMap{
|
||||
provisionersdk.TagScope: provisionersdk.ScopeOrganization,
|
||||
},
|
||||
})
|
||||
|
||||
daemons, err := db.GetEligibleProvisionerDaemonsByProvisionerJobIDs(context.Background(), []uuid.UUID{job.ID})
|
||||
require.NoError(t, err)
|
||||
require.Len(t, daemons, 2)
|
||||
|
||||
daemonIDs := []uuid.UUID{daemons[0].ProvisionerDaemon.ID, daemons[1].ProvisionerDaemon.ID}
|
||||
require.ElementsMatch(t, []uuid.UUID{daemon1.ID, daemon2.ID}, daemonIDs)
|
||||
})
|
||||
}
|
||||
|
||||
func TestGetWorkspaceAgentUsageStats(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
|
||||
+120
-44
@@ -1194,29 +1194,6 @@ func (q *sqlQuerier) InsertExternalAuthLink(ctx context.Context, arg InsertExter
|
||||
return i, err
|
||||
}
|
||||
|
||||
const removeRefreshToken = `-- name: RemoveRefreshToken :exec
|
||||
UPDATE
|
||||
external_auth_links
|
||||
SET
|
||||
oauth_refresh_token = '',
|
||||
updated_at = $1
|
||||
WHERE provider_id = $2 AND user_id = $3
|
||||
`
|
||||
|
||||
type RemoveRefreshTokenParams struct {
|
||||
UpdatedAt time.Time `db:"updated_at" json:"updated_at"`
|
||||
ProviderID string `db:"provider_id" json:"provider_id"`
|
||||
UserID uuid.UUID `db:"user_id" json:"user_id"`
|
||||
}
|
||||
|
||||
// Removing the refresh token disables the refresh behavior for a given
|
||||
// auth token. If a refresh token is marked invalid, it is better to remove it
|
||||
// then continually attempt to refresh the token.
|
||||
func (q *sqlQuerier) RemoveRefreshToken(ctx context.Context, arg RemoveRefreshTokenParams) error {
|
||||
_, err := q.db.ExecContext(ctx, removeRefreshToken, arg.UpdatedAt, arg.ProviderID, arg.UserID)
|
||||
return err
|
||||
}
|
||||
|
||||
const updateExternalAuthLink = `-- name: UpdateExternalAuthLink :one
|
||||
UPDATE external_auth_links SET
|
||||
updated_at = $3,
|
||||
@@ -1269,6 +1246,40 @@ func (q *sqlQuerier) UpdateExternalAuthLink(ctx context.Context, arg UpdateExter
|
||||
return i, err
|
||||
}
|
||||
|
||||
const updateExternalAuthLinkRefreshToken = `-- name: UpdateExternalAuthLinkRefreshToken :exec
|
||||
UPDATE
|
||||
external_auth_links
|
||||
SET
|
||||
oauth_refresh_token = $1,
|
||||
updated_at = $2
|
||||
WHERE
|
||||
provider_id = $3
|
||||
AND
|
||||
user_id = $4
|
||||
AND
|
||||
-- Required for sqlc to generate a parameter for the oauth_refresh_token_key_id
|
||||
$5 :: text = $5 :: text
|
||||
`
|
||||
|
||||
type UpdateExternalAuthLinkRefreshTokenParams struct {
|
||||
OAuthRefreshToken string `db:"oauth_refresh_token" json:"oauth_refresh_token"`
|
||||
UpdatedAt time.Time `db:"updated_at" json:"updated_at"`
|
||||
ProviderID string `db:"provider_id" json:"provider_id"`
|
||||
UserID uuid.UUID `db:"user_id" json:"user_id"`
|
||||
OAuthRefreshTokenKeyID string `db:"oauth_refresh_token_key_id" json:"oauth_refresh_token_key_id"`
|
||||
}
|
||||
|
||||
func (q *sqlQuerier) UpdateExternalAuthLinkRefreshToken(ctx context.Context, arg UpdateExternalAuthLinkRefreshTokenParams) error {
|
||||
_, err := q.db.ExecContext(ctx, updateExternalAuthLinkRefreshToken,
|
||||
arg.OAuthRefreshToken,
|
||||
arg.UpdatedAt,
|
||||
arg.ProviderID,
|
||||
arg.UserID,
|
||||
arg.OAuthRefreshTokenKeyID,
|
||||
)
|
||||
return err
|
||||
}
|
||||
|
||||
const getFileByHashAndCreator = `-- name: GetFileByHashAndCreator :one
|
||||
SELECT
|
||||
hash, created_at, created_by, mimetype, data, id
|
||||
@@ -5244,6 +5255,60 @@ func (q *sqlQuerier) DeleteOldProvisionerDaemons(ctx context.Context) error {
|
||||
return err
|
||||
}
|
||||
|
||||
const getEligibleProvisionerDaemonsByProvisionerJobIDs = `-- name: GetEligibleProvisionerDaemonsByProvisionerJobIDs :many
|
||||
SELECT DISTINCT
|
||||
provisioner_jobs.id as job_id, provisioner_daemons.id, provisioner_daemons.created_at, provisioner_daemons.name, provisioner_daemons.provisioners, provisioner_daemons.replica_id, provisioner_daemons.tags, provisioner_daemons.last_seen_at, provisioner_daemons.version, provisioner_daemons.api_version, provisioner_daemons.organization_id, provisioner_daemons.key_id
|
||||
FROM
|
||||
provisioner_jobs
|
||||
JOIN
|
||||
provisioner_daemons ON provisioner_daemons.organization_id = provisioner_jobs.organization_id
|
||||
AND provisioner_tagset_contains(provisioner_daemons.tags::tagset, provisioner_jobs.tags::tagset)
|
||||
AND provisioner_jobs.provisioner = ANY(provisioner_daemons.provisioners)
|
||||
WHERE
|
||||
provisioner_jobs.id = ANY($1 :: uuid[])
|
||||
`
|
||||
|
||||
type GetEligibleProvisionerDaemonsByProvisionerJobIDsRow struct {
|
||||
JobID uuid.UUID `db:"job_id" json:"job_id"`
|
||||
ProvisionerDaemon ProvisionerDaemon `db:"provisioner_daemon" json:"provisioner_daemon"`
|
||||
}
|
||||
|
||||
func (q *sqlQuerier) GetEligibleProvisionerDaemonsByProvisionerJobIDs(ctx context.Context, provisionerJobIds []uuid.UUID) ([]GetEligibleProvisionerDaemonsByProvisionerJobIDsRow, error) {
|
||||
rows, err := q.db.QueryContext(ctx, getEligibleProvisionerDaemonsByProvisionerJobIDs, pq.Array(provisionerJobIds))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
var items []GetEligibleProvisionerDaemonsByProvisionerJobIDsRow
|
||||
for rows.Next() {
|
||||
var i GetEligibleProvisionerDaemonsByProvisionerJobIDsRow
|
||||
if err := rows.Scan(
|
||||
&i.JobID,
|
||||
&i.ProvisionerDaemon.ID,
|
||||
&i.ProvisionerDaemon.CreatedAt,
|
||||
&i.ProvisionerDaemon.Name,
|
||||
pq.Array(&i.ProvisionerDaemon.Provisioners),
|
||||
&i.ProvisionerDaemon.ReplicaID,
|
||||
&i.ProvisionerDaemon.Tags,
|
||||
&i.ProvisionerDaemon.LastSeenAt,
|
||||
&i.ProvisionerDaemon.Version,
|
||||
&i.ProvisionerDaemon.APIVersion,
|
||||
&i.ProvisionerDaemon.OrganizationID,
|
||||
&i.ProvisionerDaemon.KeyID,
|
||||
); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
items = append(items, i)
|
||||
}
|
||||
if err := rows.Close(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if err := rows.Err(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return items, nil
|
||||
}
|
||||
|
||||
const getProvisionerDaemons = `-- name: GetProvisionerDaemons :many
|
||||
SELECT
|
||||
id, created_at, name, provisioners, replica_id, tags, last_seen_at, version, api_version, organization_id, key_id
|
||||
@@ -8996,7 +9061,7 @@ FROM
|
||||
-- Scope an archive to a single template and ignore already archived template versions
|
||||
(
|
||||
SELECT
|
||||
id, template_id, organization_id, created_at, updated_at, name, readme, job_id, created_by, external_auth_providers, message, archived
|
||||
id, template_id, organization_id, created_at, updated_at, name, readme, job_id, created_by, external_auth_providers, message, archived, source_example_id
|
||||
FROM
|
||||
template_versions
|
||||
WHERE
|
||||
@@ -9097,7 +9162,7 @@ func (q *sqlQuerier) ArchiveUnusedTemplateVersions(ctx context.Context, arg Arch
|
||||
|
||||
const getPreviousTemplateVersion = `-- name: GetPreviousTemplateVersion :one
|
||||
SELECT
|
||||
id, template_id, organization_id, created_at, updated_at, name, readme, job_id, created_by, external_auth_providers, message, archived, created_by_avatar_url, created_by_username
|
||||
id, template_id, organization_id, created_at, updated_at, name, readme, job_id, created_by, external_auth_providers, message, archived, source_example_id, created_by_avatar_url, created_by_username
|
||||
FROM
|
||||
template_version_with_user AS template_versions
|
||||
WHERE
|
||||
@@ -9134,6 +9199,7 @@ func (q *sqlQuerier) GetPreviousTemplateVersion(ctx context.Context, arg GetPrev
|
||||
&i.ExternalAuthProviders,
|
||||
&i.Message,
|
||||
&i.Archived,
|
||||
&i.SourceExampleID,
|
||||
&i.CreatedByAvatarURL,
|
||||
&i.CreatedByUsername,
|
||||
)
|
||||
@@ -9142,7 +9208,7 @@ func (q *sqlQuerier) GetPreviousTemplateVersion(ctx context.Context, arg GetPrev
|
||||
|
||||
const getTemplateVersionByID = `-- name: GetTemplateVersionByID :one
|
||||
SELECT
|
||||
id, template_id, organization_id, created_at, updated_at, name, readme, job_id, created_by, external_auth_providers, message, archived, created_by_avatar_url, created_by_username
|
||||
id, template_id, organization_id, created_at, updated_at, name, readme, job_id, created_by, external_auth_providers, message, archived, source_example_id, created_by_avatar_url, created_by_username
|
||||
FROM
|
||||
template_version_with_user AS template_versions
|
||||
WHERE
|
||||
@@ -9165,6 +9231,7 @@ func (q *sqlQuerier) GetTemplateVersionByID(ctx context.Context, id uuid.UUID) (
|
||||
&i.ExternalAuthProviders,
|
||||
&i.Message,
|
||||
&i.Archived,
|
||||
&i.SourceExampleID,
|
||||
&i.CreatedByAvatarURL,
|
||||
&i.CreatedByUsername,
|
||||
)
|
||||
@@ -9173,7 +9240,7 @@ func (q *sqlQuerier) GetTemplateVersionByID(ctx context.Context, id uuid.UUID) (
|
||||
|
||||
const getTemplateVersionByJobID = `-- name: GetTemplateVersionByJobID :one
|
||||
SELECT
|
||||
id, template_id, organization_id, created_at, updated_at, name, readme, job_id, created_by, external_auth_providers, message, archived, created_by_avatar_url, created_by_username
|
||||
id, template_id, organization_id, created_at, updated_at, name, readme, job_id, created_by, external_auth_providers, message, archived, source_example_id, created_by_avatar_url, created_by_username
|
||||
FROM
|
||||
template_version_with_user AS template_versions
|
||||
WHERE
|
||||
@@ -9196,6 +9263,7 @@ func (q *sqlQuerier) GetTemplateVersionByJobID(ctx context.Context, jobID uuid.U
|
||||
&i.ExternalAuthProviders,
|
||||
&i.Message,
|
||||
&i.Archived,
|
||||
&i.SourceExampleID,
|
||||
&i.CreatedByAvatarURL,
|
||||
&i.CreatedByUsername,
|
||||
)
|
||||
@@ -9204,7 +9272,7 @@ func (q *sqlQuerier) GetTemplateVersionByJobID(ctx context.Context, jobID uuid.U
|
||||
|
||||
const getTemplateVersionByTemplateIDAndName = `-- name: GetTemplateVersionByTemplateIDAndName :one
|
||||
SELECT
|
||||
id, template_id, organization_id, created_at, updated_at, name, readme, job_id, created_by, external_auth_providers, message, archived, created_by_avatar_url, created_by_username
|
||||
id, template_id, organization_id, created_at, updated_at, name, readme, job_id, created_by, external_auth_providers, message, archived, source_example_id, created_by_avatar_url, created_by_username
|
||||
FROM
|
||||
template_version_with_user AS template_versions
|
||||
WHERE
|
||||
@@ -9233,6 +9301,7 @@ func (q *sqlQuerier) GetTemplateVersionByTemplateIDAndName(ctx context.Context,
|
||||
&i.ExternalAuthProviders,
|
||||
&i.Message,
|
||||
&i.Archived,
|
||||
&i.SourceExampleID,
|
||||
&i.CreatedByAvatarURL,
|
||||
&i.CreatedByUsername,
|
||||
)
|
||||
@@ -9241,7 +9310,7 @@ func (q *sqlQuerier) GetTemplateVersionByTemplateIDAndName(ctx context.Context,
|
||||
|
||||
const getTemplateVersionsByIDs = `-- name: GetTemplateVersionsByIDs :many
|
||||
SELECT
|
||||
id, template_id, organization_id, created_at, updated_at, name, readme, job_id, created_by, external_auth_providers, message, archived, created_by_avatar_url, created_by_username
|
||||
id, template_id, organization_id, created_at, updated_at, name, readme, job_id, created_by, external_auth_providers, message, archived, source_example_id, created_by_avatar_url, created_by_username
|
||||
FROM
|
||||
template_version_with_user AS template_versions
|
||||
WHERE
|
||||
@@ -9270,6 +9339,7 @@ func (q *sqlQuerier) GetTemplateVersionsByIDs(ctx context.Context, ids []uuid.UU
|
||||
&i.ExternalAuthProviders,
|
||||
&i.Message,
|
||||
&i.Archived,
|
||||
&i.SourceExampleID,
|
||||
&i.CreatedByAvatarURL,
|
||||
&i.CreatedByUsername,
|
||||
); err != nil {
|
||||
@@ -9288,7 +9358,7 @@ func (q *sqlQuerier) GetTemplateVersionsByIDs(ctx context.Context, ids []uuid.UU
|
||||
|
||||
const getTemplateVersionsByTemplateID = `-- name: GetTemplateVersionsByTemplateID :many
|
||||
SELECT
|
||||
id, template_id, organization_id, created_at, updated_at, name, readme, job_id, created_by, external_auth_providers, message, archived, created_by_avatar_url, created_by_username
|
||||
id, template_id, organization_id, created_at, updated_at, name, readme, job_id, created_by, external_auth_providers, message, archived, source_example_id, created_by_avatar_url, created_by_username
|
||||
FROM
|
||||
template_version_with_user AS template_versions
|
||||
WHERE
|
||||
@@ -9364,6 +9434,7 @@ func (q *sqlQuerier) GetTemplateVersionsByTemplateID(ctx context.Context, arg Ge
|
||||
&i.ExternalAuthProviders,
|
||||
&i.Message,
|
||||
&i.Archived,
|
||||
&i.SourceExampleID,
|
||||
&i.CreatedByAvatarURL,
|
||||
&i.CreatedByUsername,
|
||||
); err != nil {
|
||||
@@ -9381,7 +9452,7 @@ func (q *sqlQuerier) GetTemplateVersionsByTemplateID(ctx context.Context, arg Ge
|
||||
}
|
||||
|
||||
const getTemplateVersionsCreatedAfter = `-- name: GetTemplateVersionsCreatedAfter :many
|
||||
SELECT id, template_id, organization_id, created_at, updated_at, name, readme, job_id, created_by, external_auth_providers, message, archived, created_by_avatar_url, created_by_username FROM template_version_with_user AS template_versions WHERE created_at > $1
|
||||
SELECT id, template_id, organization_id, created_at, updated_at, name, readme, job_id, created_by, external_auth_providers, message, archived, source_example_id, created_by_avatar_url, created_by_username FROM template_version_with_user AS template_versions WHERE created_at > $1
|
||||
`
|
||||
|
||||
func (q *sqlQuerier) GetTemplateVersionsCreatedAfter(ctx context.Context, createdAt time.Time) ([]TemplateVersion, error) {
|
||||
@@ -9406,6 +9477,7 @@ func (q *sqlQuerier) GetTemplateVersionsCreatedAfter(ctx context.Context, create
|
||||
&i.ExternalAuthProviders,
|
||||
&i.Message,
|
||||
&i.Archived,
|
||||
&i.SourceExampleID,
|
||||
&i.CreatedByAvatarURL,
|
||||
&i.CreatedByUsername,
|
||||
); err != nil {
|
||||
@@ -9434,23 +9506,25 @@ INSERT INTO
|
||||
message,
|
||||
readme,
|
||||
job_id,
|
||||
created_by
|
||||
created_by,
|
||||
source_example_id
|
||||
)
|
||||
VALUES
|
||||
($1, $2, $3, $4, $5, $6, $7, $8, $9, $10)
|
||||
($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11)
|
||||
`
|
||||
|
||||
type InsertTemplateVersionParams struct {
|
||||
ID uuid.UUID `db:"id" json:"id"`
|
||||
TemplateID uuid.NullUUID `db:"template_id" json:"template_id"`
|
||||
OrganizationID uuid.UUID `db:"organization_id" json:"organization_id"`
|
||||
CreatedAt time.Time `db:"created_at" json:"created_at"`
|
||||
UpdatedAt time.Time `db:"updated_at" json:"updated_at"`
|
||||
Name string `db:"name" json:"name"`
|
||||
Message string `db:"message" json:"message"`
|
||||
Readme string `db:"readme" json:"readme"`
|
||||
JobID uuid.UUID `db:"job_id" json:"job_id"`
|
||||
CreatedBy uuid.UUID `db:"created_by" json:"created_by"`
|
||||
ID uuid.UUID `db:"id" json:"id"`
|
||||
TemplateID uuid.NullUUID `db:"template_id" json:"template_id"`
|
||||
OrganizationID uuid.UUID `db:"organization_id" json:"organization_id"`
|
||||
CreatedAt time.Time `db:"created_at" json:"created_at"`
|
||||
UpdatedAt time.Time `db:"updated_at" json:"updated_at"`
|
||||
Name string `db:"name" json:"name"`
|
||||
Message string `db:"message" json:"message"`
|
||||
Readme string `db:"readme" json:"readme"`
|
||||
JobID uuid.UUID `db:"job_id" json:"job_id"`
|
||||
CreatedBy uuid.UUID `db:"created_by" json:"created_by"`
|
||||
SourceExampleID sql.NullString `db:"source_example_id" json:"source_example_id"`
|
||||
}
|
||||
|
||||
func (q *sqlQuerier) InsertTemplateVersion(ctx context.Context, arg InsertTemplateVersionParams) error {
|
||||
@@ -9465,6 +9539,7 @@ func (q *sqlQuerier) InsertTemplateVersion(ctx context.Context, arg InsertTempla
|
||||
arg.Readme,
|
||||
arg.JobID,
|
||||
arg.CreatedBy,
|
||||
arg.SourceExampleID,
|
||||
)
|
||||
return err
|
||||
}
|
||||
@@ -11581,7 +11656,7 @@ func (q *sqlQuerier) GetWorkspaceAgentMetadata(ctx context.Context, arg GetWorks
|
||||
|
||||
const getWorkspaceAgentScriptTimingsByBuildID = `-- name: GetWorkspaceAgentScriptTimingsByBuildID :many
|
||||
SELECT
|
||||
workspace_agent_script_timings.script_id, workspace_agent_script_timings.started_at, workspace_agent_script_timings.ended_at, workspace_agent_script_timings.exit_code, workspace_agent_script_timings.stage, workspace_agent_script_timings.status,
|
||||
DISTINCT ON (workspace_agent_script_timings.script_id) workspace_agent_script_timings.script_id, workspace_agent_script_timings.started_at, workspace_agent_script_timings.ended_at, workspace_agent_script_timings.exit_code, workspace_agent_script_timings.stage, workspace_agent_script_timings.status,
|
||||
workspace_agent_scripts.display_name,
|
||||
workspace_agents.id as workspace_agent_id,
|
||||
workspace_agents.name as workspace_agent_name
|
||||
@@ -11591,6 +11666,7 @@ INNER JOIN workspace_agents ON workspace_agents.id = workspace_agent_scripts.wor
|
||||
INNER JOIN workspace_resources ON workspace_resources.id = workspace_agents.resource_id
|
||||
INNER JOIN workspace_builds ON workspace_builds.job_id = workspace_resources.job_id
|
||||
WHERE workspace_builds.id = $1
|
||||
ORDER BY workspace_agent_script_timings.script_id, workspace_agent_script_timings.started_at
|
||||
`
|
||||
|
||||
type GetWorkspaceAgentScriptTimingsByBuildIDRow struct {
|
||||
|
||||
@@ -43,13 +43,16 @@ UPDATE external_auth_links SET
|
||||
oauth_extra = $9
|
||||
WHERE provider_id = $1 AND user_id = $2 RETURNING *;
|
||||
|
||||
-- name: RemoveRefreshToken :exec
|
||||
-- Removing the refresh token disables the refresh behavior for a given
|
||||
-- auth token. If a refresh token is marked invalid, it is better to remove it
|
||||
-- then continually attempt to refresh the token.
|
||||
-- name: UpdateExternalAuthLinkRefreshToken :exec
|
||||
UPDATE
|
||||
external_auth_links
|
||||
SET
|
||||
oauth_refresh_token = '',
|
||||
oauth_refresh_token = @oauth_refresh_token,
|
||||
updated_at = @updated_at
|
||||
WHERE provider_id = @provider_id AND user_id = @user_id;
|
||||
WHERE
|
||||
provider_id = @provider_id
|
||||
AND
|
||||
user_id = @user_id
|
||||
AND
|
||||
-- Required for sqlc to generate a parameter for the oauth_refresh_token_key_id
|
||||
@oauth_refresh_token_key_id :: text = @oauth_refresh_token_key_id :: text;
|
||||
|
||||
@@ -16,6 +16,18 @@ WHERE
|
||||
-- adding support for searching by tags:
|
||||
(@want_tags :: tagset = 'null' :: tagset OR provisioner_tagset_contains(provisioner_daemons.tags::tagset, @want_tags::tagset));
|
||||
|
||||
-- name: GetEligibleProvisionerDaemonsByProvisionerJobIDs :many
|
||||
SELECT DISTINCT
|
||||
provisioner_jobs.id as job_id, sqlc.embed(provisioner_daemons)
|
||||
FROM
|
||||
provisioner_jobs
|
||||
JOIN
|
||||
provisioner_daemons ON provisioner_daemons.organization_id = provisioner_jobs.organization_id
|
||||
AND provisioner_tagset_contains(provisioner_daemons.tags::tagset, provisioner_jobs.tags::tagset)
|
||||
AND provisioner_jobs.provisioner = ANY(provisioner_daemons.provisioners)
|
||||
WHERE
|
||||
provisioner_jobs.id = ANY(@provisioner_job_ids :: uuid[]);
|
||||
|
||||
-- name: DeleteOldProvisionerDaemons :exec
|
||||
-- Delete provisioner daemons that have been created at least a week ago
|
||||
-- and have not connected to coderd since a week.
|
||||
|
||||
@@ -87,10 +87,11 @@ INSERT INTO
|
||||
message,
|
||||
readme,
|
||||
job_id,
|
||||
created_by
|
||||
created_by,
|
||||
source_example_id
|
||||
)
|
||||
VALUES
|
||||
($1, $2, $3, $4, $5, $6, $7, $8, $9, $10);
|
||||
($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11);
|
||||
|
||||
-- name: UpdateTemplateVersionByID :exec
|
||||
UPDATE
|
||||
|
||||
@@ -304,7 +304,7 @@ RETURNING workspace_agent_script_timings.*;
|
||||
|
||||
-- name: GetWorkspaceAgentScriptTimingsByBuildID :many
|
||||
SELECT
|
||||
workspace_agent_script_timings.*,
|
||||
DISTINCT ON (workspace_agent_script_timings.script_id) workspace_agent_script_timings.*,
|
||||
workspace_agent_scripts.display_name,
|
||||
workspace_agents.id as workspace_agent_id,
|
||||
workspace_agents.name as workspace_agent_name
|
||||
@@ -313,4 +313,5 @@ INNER JOIN workspace_agent_scripts ON workspace_agent_scripts.id = workspace_age
|
||||
INNER JOIN workspace_agents ON workspace_agents.id = workspace_agent_scripts.workspace_agent_id
|
||||
INNER JOIN workspace_resources ON workspace_resources.id = workspace_agents.resource_id
|
||||
INNER JOIN workspace_builds ON workspace_builds.job_id = workspace_resources.job_id
|
||||
WHERE workspace_builds.id = $1;
|
||||
WHERE workspace_builds.id = $1
|
||||
ORDER BY workspace_agent_script_timings.script_id, workspace_agent_script_timings.started_at;
|
||||
|
||||
@@ -143,10 +143,12 @@ func (c *Config) RefreshToken(ctx context.Context, db database.Store, externalAu
|
||||
// get rid of it. Keeping it around will cause additional refresh
|
||||
// attempts that will fail and cost us api rate limits.
|
||||
if isFailedRefresh(existingToken, err) {
|
||||
dbExecErr := db.RemoveRefreshToken(ctx, database.RemoveRefreshTokenParams{
|
||||
UpdatedAt: dbtime.Now(),
|
||||
ProviderID: externalAuthLink.ProviderID,
|
||||
UserID: externalAuthLink.UserID,
|
||||
dbExecErr := db.UpdateExternalAuthLinkRefreshToken(ctx, database.UpdateExternalAuthLinkRefreshTokenParams{
|
||||
OAuthRefreshToken: "", // It is better to clear the refresh token than to keep retrying.
|
||||
OAuthRefreshTokenKeyID: externalAuthLink.OAuthRefreshTokenKeyID.String,
|
||||
UpdatedAt: dbtime.Now(),
|
||||
ProviderID: externalAuthLink.ProviderID,
|
||||
UserID: externalAuthLink.UserID,
|
||||
})
|
||||
if dbExecErr != nil {
|
||||
// This error should be rare.
|
||||
|
||||
@@ -190,7 +190,7 @@ func TestRefreshToken(t *testing.T) {
|
||||
|
||||
// Try again with a bad refresh token error
|
||||
// Expect DB call to remove the refresh token
|
||||
mDB.EXPECT().RemoveRefreshToken(gomock.Any(), gomock.Any()).Return(nil).Times(1)
|
||||
mDB.EXPECT().UpdateExternalAuthLinkRefreshToken(gomock.Any(), gomock.Any()).Return(nil).Times(1)
|
||||
refreshErr = &oauth2.RetrieveError{ // github error
|
||||
Response: &http.Response{
|
||||
StatusCode: http.StatusOK,
|
||||
|
||||
@@ -50,7 +50,7 @@ func (r *ProvisionerDaemonsReport) Run(ctx context.Context, opts *ProvisionerDae
|
||||
now := opts.TimeNow()
|
||||
|
||||
if opts.StaleInterval == 0 {
|
||||
opts.StaleInterval = provisionerdserver.DefaultHeartbeatInterval * 3
|
||||
opts.StaleInterval = provisionerdserver.StaleInterval
|
||||
}
|
||||
|
||||
if opts.CurrentVersion == "" {
|
||||
|
||||
@@ -15,15 +15,21 @@ import (
|
||||
"github.com/coder/coder/v2/coderd/database/dbtime"
|
||||
"github.com/coder/coder/v2/coderd/healthcheck"
|
||||
"github.com/coder/coder/v2/coderd/healthcheck/health"
|
||||
"github.com/coder/coder/v2/coderd/provisionerdserver"
|
||||
"github.com/coder/coder/v2/codersdk"
|
||||
"github.com/coder/coder/v2/codersdk/healthsdk"
|
||||
"github.com/coder/coder/v2/provisionerd/proto"
|
||||
"github.com/coder/coder/v2/testutil"
|
||||
)
|
||||
|
||||
func TestProvisionerDaemonReport(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
now := dbtime.Now()
|
||||
var (
|
||||
now = dbtime.Now()
|
||||
oneHourAgo = now.Add(-time.Hour)
|
||||
staleThreshold = now.Add(-provisionerdserver.StaleInterval).Add(-time.Second)
|
||||
)
|
||||
|
||||
for _, tt := range []struct {
|
||||
name string
|
||||
@@ -65,7 +71,9 @@ func TestProvisionerDaemonReport(t *testing.T) {
|
||||
currentVersion: "v1.2.3",
|
||||
currentAPIMajorVersion: proto.CurrentMajor,
|
||||
expectedSeverity: health.SeverityOK,
|
||||
provisionerDaemons: []database.ProvisionerDaemon{fakeProvisionerDaemon(t, "pd-ok", "v1.2.3", "1.0", now)},
|
||||
provisionerDaemons: []database.ProvisionerDaemon{
|
||||
fakeProvisionerDaemon(t, withName("pd-ok"), withVersion("v1.2.3"), withAPIVersion("1.0"), withCreatedAt(now), withLastSeenAt(now)),
|
||||
},
|
||||
expectedItems: []healthsdk.ProvisionerDaemonsReportItem{
|
||||
{
|
||||
ProvisionerDaemon: codersdk.ProvisionerDaemon{
|
||||
@@ -88,7 +96,9 @@ func TestProvisionerDaemonReport(t *testing.T) {
|
||||
currentAPIMajorVersion: proto.CurrentMajor,
|
||||
expectedSeverity: health.SeverityWarning,
|
||||
expectedWarningCode: health.CodeProvisionerDaemonVersionMismatch,
|
||||
provisionerDaemons: []database.ProvisionerDaemon{fakeProvisionerDaemon(t, "pd-old", "v1.1.2", "1.0", now)},
|
||||
provisionerDaemons: []database.ProvisionerDaemon{
|
||||
fakeProvisionerDaemon(t, withName("pd-old"), withVersion("v1.1.2"), withAPIVersion("1.0"), withCreatedAt(now), withLastSeenAt(now)),
|
||||
},
|
||||
expectedItems: []healthsdk.ProvisionerDaemonsReportItem{
|
||||
{
|
||||
ProvisionerDaemon: codersdk.ProvisionerDaemon{
|
||||
@@ -116,7 +126,9 @@ func TestProvisionerDaemonReport(t *testing.T) {
|
||||
currentAPIMajorVersion: proto.CurrentMajor,
|
||||
expectedSeverity: health.SeverityError,
|
||||
expectedWarningCode: health.CodeUnknown,
|
||||
provisionerDaemons: []database.ProvisionerDaemon{fakeProvisionerDaemon(t, "pd-invalid-version", "invalid", "1.0", now)},
|
||||
provisionerDaemons: []database.ProvisionerDaemon{
|
||||
fakeProvisionerDaemon(t, withName("pd-invalid-version"), withVersion("invalid"), withAPIVersion("1.0"), withCreatedAt(now), withLastSeenAt(now)),
|
||||
},
|
||||
expectedItems: []healthsdk.ProvisionerDaemonsReportItem{
|
||||
{
|
||||
ProvisionerDaemon: codersdk.ProvisionerDaemon{
|
||||
@@ -144,7 +156,9 @@ func TestProvisionerDaemonReport(t *testing.T) {
|
||||
currentAPIMajorVersion: proto.CurrentMajor,
|
||||
expectedSeverity: health.SeverityError,
|
||||
expectedWarningCode: health.CodeUnknown,
|
||||
provisionerDaemons: []database.ProvisionerDaemon{fakeProvisionerDaemon(t, "pd-invalid-api", "v1.2.3", "invalid", now)},
|
||||
provisionerDaemons: []database.ProvisionerDaemon{
|
||||
fakeProvisionerDaemon(t, withName("pd-invalid-api"), withVersion("v1.2.3"), withAPIVersion("invalid"), withCreatedAt(now), withLastSeenAt(now)),
|
||||
},
|
||||
expectedItems: []healthsdk.ProvisionerDaemonsReportItem{
|
||||
{
|
||||
ProvisionerDaemon: codersdk.ProvisionerDaemon{
|
||||
@@ -172,7 +186,9 @@ func TestProvisionerDaemonReport(t *testing.T) {
|
||||
currentAPIMajorVersion: 2,
|
||||
expectedSeverity: health.SeverityWarning,
|
||||
expectedWarningCode: health.CodeProvisionerDaemonAPIMajorVersionDeprecated,
|
||||
provisionerDaemons: []database.ProvisionerDaemon{fakeProvisionerDaemon(t, "pd-old-api", "v2.3.4", "1.0", now)},
|
||||
provisionerDaemons: []database.ProvisionerDaemon{
|
||||
fakeProvisionerDaemon(t, withName("pd-old-api"), withVersion("v2.3.4"), withAPIVersion("1.0"), withCreatedAt(now), withLastSeenAt(now)),
|
||||
},
|
||||
expectedItems: []healthsdk.ProvisionerDaemonsReportItem{
|
||||
{
|
||||
ProvisionerDaemon: codersdk.ProvisionerDaemon{
|
||||
@@ -200,7 +216,10 @@ func TestProvisionerDaemonReport(t *testing.T) {
|
||||
currentAPIMajorVersion: proto.CurrentMajor,
|
||||
expectedSeverity: health.SeverityWarning,
|
||||
expectedWarningCode: health.CodeProvisionerDaemonVersionMismatch,
|
||||
provisionerDaemons: []database.ProvisionerDaemon{fakeProvisionerDaemon(t, "pd-ok", "v1.2.3", "1.0", now), fakeProvisionerDaemon(t, "pd-old", "v1.1.2", "1.0", now)},
|
||||
provisionerDaemons: []database.ProvisionerDaemon{
|
||||
fakeProvisionerDaemon(t, withName("pd-ok"), withVersion("v1.2.3"), withAPIVersion("1.0"), withCreatedAt(now), withLastSeenAt(now)),
|
||||
fakeProvisionerDaemon(t, withName("pd-old"), withVersion("v1.1.2"), withAPIVersion("1.0"), withCreatedAt(now), withLastSeenAt(now)),
|
||||
},
|
||||
expectedItems: []healthsdk.ProvisionerDaemonsReportItem{
|
||||
{
|
||||
ProvisionerDaemon: codersdk.ProvisionerDaemon{
|
||||
@@ -241,7 +260,10 @@ func TestProvisionerDaemonReport(t *testing.T) {
|
||||
currentAPIMajorVersion: proto.CurrentMajor,
|
||||
expectedSeverity: health.SeverityWarning,
|
||||
expectedWarningCode: health.CodeProvisionerDaemonVersionMismatch,
|
||||
provisionerDaemons: []database.ProvisionerDaemon{fakeProvisionerDaemon(t, "pd-ok", "v1.2.3", "1.0", now), fakeProvisionerDaemon(t, "pd-new", "v2.3.4", "1.0", now)},
|
||||
provisionerDaemons: []database.ProvisionerDaemon{
|
||||
fakeProvisionerDaemon(t, withName("pd-ok"), withVersion("v1.2.3"), withAPIVersion("1.0"), withCreatedAt(now), withLastSeenAt(now)),
|
||||
fakeProvisionerDaemon(t, withName("pd-new"), withVersion("v2.3.4"), withAPIVersion("1.0"), withCreatedAt(now), withLastSeenAt(now)),
|
||||
},
|
||||
expectedItems: []healthsdk.ProvisionerDaemonsReportItem{
|
||||
{
|
||||
ProvisionerDaemon: codersdk.ProvisionerDaemon{
|
||||
@@ -281,7 +303,10 @@ func TestProvisionerDaemonReport(t *testing.T) {
|
||||
currentVersion: "v2.3.4",
|
||||
currentAPIMajorVersion: proto.CurrentMajor,
|
||||
expectedSeverity: health.SeverityOK,
|
||||
provisionerDaemons: []database.ProvisionerDaemon{fakeProvisionerDaemonStale(t, "pd-stale", "v1.2.3", "0.9", now.Add(-5*time.Minute), now), fakeProvisionerDaemon(t, "pd-ok", "v2.3.4", "1.0", now)},
|
||||
provisionerDaemons: []database.ProvisionerDaemon{
|
||||
fakeProvisionerDaemon(t, withName("pd-stale"), withVersion("v1.2.3"), withAPIVersion("0.9"), withCreatedAt(oneHourAgo), withLastSeenAt(staleThreshold)),
|
||||
fakeProvisionerDaemon(t, withName("pd-ok"), withVersion("v2.3.4"), withAPIVersion("1.0"), withCreatedAt(now), withLastSeenAt(now)),
|
||||
},
|
||||
expectedItems: []healthsdk.ProvisionerDaemonsReportItem{
|
||||
{
|
||||
ProvisionerDaemon: codersdk.ProvisionerDaemon{
|
||||
@@ -304,8 +329,10 @@ func TestProvisionerDaemonReport(t *testing.T) {
|
||||
currentAPIMajorVersion: proto.CurrentMajor,
|
||||
expectedSeverity: health.SeverityError,
|
||||
expectedWarningCode: health.CodeProvisionerDaemonsNoProvisionerDaemons,
|
||||
provisionerDaemons: []database.ProvisionerDaemon{fakeProvisionerDaemonStale(t, "pd-ok", "v1.2.3", "0.9", now.Add(-5*time.Minute), now)},
|
||||
expectedItems: []healthsdk.ProvisionerDaemonsReportItem{},
|
||||
provisionerDaemons: []database.ProvisionerDaemon{
|
||||
fakeProvisionerDaemon(t, withName("pd-stale"), withVersion("v1.2.3"), withAPIVersion("0.9"), withCreatedAt(oneHourAgo), withLastSeenAt(staleThreshold)),
|
||||
},
|
||||
expectedItems: []healthsdk.ProvisionerDaemonsReportItem{},
|
||||
},
|
||||
} {
|
||||
tt := tt
|
||||
@@ -353,25 +380,52 @@ func TestProvisionerDaemonReport(t *testing.T) {
|
||||
}
|
||||
}
|
||||
|
||||
func fakeProvisionerDaemon(t *testing.T, name, version, apiVersion string, now time.Time) database.ProvisionerDaemon {
|
||||
t.Helper()
|
||||
return database.ProvisionerDaemon{
|
||||
ID: uuid.Nil,
|
||||
Name: name,
|
||||
CreatedAt: now,
|
||||
LastSeenAt: sql.NullTime{Time: now, Valid: true},
|
||||
Provisioners: []database.ProvisionerType{database.ProvisionerTypeEcho, database.ProvisionerTypeTerraform},
|
||||
ReplicaID: uuid.NullUUID{},
|
||||
Tags: map[string]string{},
|
||||
Version: version,
|
||||
APIVersion: apiVersion,
|
||||
func withName(s string) func(*database.ProvisionerDaemon) {
|
||||
return func(pd *database.ProvisionerDaemon) {
|
||||
pd.Name = s
|
||||
}
|
||||
}
|
||||
|
||||
func fakeProvisionerDaemonStale(t *testing.T, name, version, apiVersion string, lastSeenAt, now time.Time) database.ProvisionerDaemon {
|
||||
t.Helper()
|
||||
d := fakeProvisionerDaemon(t, name, version, apiVersion, now)
|
||||
d.LastSeenAt.Valid = true
|
||||
d.LastSeenAt.Time = lastSeenAt
|
||||
return d
|
||||
func withCreatedAt(at time.Time) func(*database.ProvisionerDaemon) {
|
||||
return func(pd *database.ProvisionerDaemon) {
|
||||
pd.CreatedAt = at
|
||||
}
|
||||
}
|
||||
|
||||
func withLastSeenAt(at time.Time) func(*database.ProvisionerDaemon) {
|
||||
return func(pd *database.ProvisionerDaemon) {
|
||||
pd.LastSeenAt.Valid = true
|
||||
pd.LastSeenAt.Time = at
|
||||
}
|
||||
}
|
||||
|
||||
func withVersion(v string) func(*database.ProvisionerDaemon) {
|
||||
return func(pd *database.ProvisionerDaemon) {
|
||||
pd.Version = v
|
||||
}
|
||||
}
|
||||
|
||||
func withAPIVersion(v string) func(*database.ProvisionerDaemon) {
|
||||
return func(pd *database.ProvisionerDaemon) {
|
||||
pd.APIVersion = v
|
||||
}
|
||||
}
|
||||
|
||||
func fakeProvisionerDaemon(t *testing.T, opts ...func(*database.ProvisionerDaemon)) database.ProvisionerDaemon {
|
||||
t.Helper()
|
||||
pd := database.ProvisionerDaemon{
|
||||
ID: uuid.Nil,
|
||||
Name: testutil.GetRandomName(t),
|
||||
CreatedAt: time.Time{},
|
||||
LastSeenAt: sql.NullTime{},
|
||||
Provisioners: []database.ProvisionerType{database.ProvisionerTypeEcho, database.ProvisionerTypeTerraform},
|
||||
ReplicaID: uuid.NullUUID{},
|
||||
Tags: map[string]string{},
|
||||
Version: "",
|
||||
APIVersion: "",
|
||||
}
|
||||
for _, o := range opts {
|
||||
o(&pd)
|
||||
}
|
||||
return pd
|
||||
}
|
||||
|
||||
@@ -109,37 +109,20 @@ func ExtractWorkspaceAgentAndLatestBuild(opts ExtractWorkspaceAgentAndLatestBuil
|
||||
return
|
||||
}
|
||||
|
||||
//nolint:gocritic // System needs to be able to get owner roles.
|
||||
roles, err := opts.DB.GetAuthorizationUserRoles(dbauthz.AsSystemRestricted(ctx), row.WorkspaceTable.OwnerID)
|
||||
subject, _, err := UserRBACSubject(ctx, opts.DB, row.WorkspaceTable.OwnerID, rbac.WorkspaceAgentScope(rbac.WorkspaceAgentScopeParams{
|
||||
WorkspaceID: row.WorkspaceTable.ID,
|
||||
OwnerID: row.WorkspaceTable.OwnerID,
|
||||
TemplateID: row.WorkspaceTable.TemplateID,
|
||||
VersionID: row.WorkspaceBuild.TemplateVersionID,
|
||||
}))
|
||||
if err != nil {
|
||||
httpapi.Write(ctx, rw, http.StatusInternalServerError, codersdk.Response{
|
||||
Message: "Internal error checking workspace agent authorization.",
|
||||
Message: "Internal error with workspace agent authorization context.",
|
||||
Detail: err.Error(),
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
roleNames, err := roles.RoleNames()
|
||||
if err != nil {
|
||||
httpapi.Write(ctx, rw, http.StatusInternalServerError, codersdk.Response{
|
||||
Message: "Internal server error",
|
||||
Detail: err.Error(),
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
subject := rbac.Subject{
|
||||
ID: row.WorkspaceTable.OwnerID.String(),
|
||||
Roles: rbac.RoleIdentifiers(roleNames),
|
||||
Groups: roles.Groups,
|
||||
Scope: rbac.WorkspaceAgentScope(rbac.WorkspaceAgentScopeParams{
|
||||
WorkspaceID: row.WorkspaceTable.ID,
|
||||
OwnerID: row.WorkspaceTable.OwnerID,
|
||||
TemplateID: row.WorkspaceTable.TemplateID,
|
||||
VersionID: row.WorkspaceBuild.TemplateVersionID,
|
||||
}),
|
||||
}.WithCachedASTValue()
|
||||
|
||||
ctx = context.WithValue(ctx, workspaceAgentContextKey{}, row.WorkspaceAgent)
|
||||
ctx = context.WithValue(ctx, latestBuildContextKey{}, row.WorkspaceBuild)
|
||||
// Also set the dbauthz actor for the request.
|
||||
|
||||
@@ -57,6 +57,10 @@ const (
|
||||
// DefaultHeartbeatInterval is the interval at which the provisioner daemon
|
||||
// will update its last seen at timestamp in the database.
|
||||
DefaultHeartbeatInterval = time.Minute
|
||||
|
||||
// StaleInterval is the amount of time after the last heartbeat for which
|
||||
// the provisioner will be reported as 'stale'.
|
||||
StaleInterval = 90 * time.Second
|
||||
)
|
||||
|
||||
type Options struct {
|
||||
|
||||
@@ -15,6 +15,7 @@ import (
|
||||
"nhooyr.io/websocket"
|
||||
|
||||
"cdr.dev/slog"
|
||||
"github.com/coder/coder/v2/codersdk/wsjson"
|
||||
|
||||
"github.com/coder/coder/v2/coderd/database"
|
||||
"github.com/coder/coder/v2/coderd/database/db2sdk"
|
||||
@@ -312,6 +313,7 @@ type logFollower struct {
|
||||
r *http.Request
|
||||
rw http.ResponseWriter
|
||||
conn *websocket.Conn
|
||||
enc *wsjson.Encoder[codersdk.ProvisionerJobLog]
|
||||
|
||||
jobID uuid.UUID
|
||||
after int64
|
||||
@@ -391,6 +393,7 @@ func (f *logFollower) follow() {
|
||||
}
|
||||
defer f.conn.Close(websocket.StatusNormalClosure, "done")
|
||||
go httpapi.Heartbeat(f.ctx, f.conn)
|
||||
f.enc = wsjson.NewEncoder[codersdk.ProvisionerJobLog](f.conn, websocket.MessageText)
|
||||
|
||||
// query for logs once right away, so we can get historical data from before
|
||||
// subscription
|
||||
@@ -488,11 +491,7 @@ func (f *logFollower) query() error {
|
||||
return xerrors.Errorf("error fetching logs: %w", err)
|
||||
}
|
||||
for _, log := range logs {
|
||||
logB, err := json.Marshal(convertProvisionerJobLog(log))
|
||||
if err != nil {
|
||||
return xerrors.Errorf("error marshaling log: %w", err)
|
||||
}
|
||||
err = f.conn.Write(f.ctx, websocket.MessageText, logB)
|
||||
err := f.enc.Encode(convertProvisionerJobLog(log))
|
||||
if err != nil {
|
||||
return xerrors.Errorf("error writing to websocket: %w", err)
|
||||
}
|
||||
|
||||
@@ -868,6 +868,9 @@ func ConvertTemplateVersion(version database.TemplateVersion) TemplateVersion {
|
||||
if version.TemplateID.Valid {
|
||||
snapVersion.TemplateID = &version.TemplateID.UUID
|
||||
}
|
||||
if version.SourceExampleID.Valid {
|
||||
snapVersion.SourceExampleID = &version.SourceExampleID.String
|
||||
}
|
||||
return snapVersion
|
||||
}
|
||||
|
||||
@@ -1116,11 +1119,12 @@ type Template struct {
|
||||
}
|
||||
|
||||
type TemplateVersion struct {
|
||||
ID uuid.UUID `json:"id"`
|
||||
CreatedAt time.Time `json:"created_at"`
|
||||
TemplateID *uuid.UUID `json:"template_id,omitempty"`
|
||||
OrganizationID uuid.UUID `json:"organization_id"`
|
||||
JobID uuid.UUID `json:"job_id"`
|
||||
ID uuid.UUID `json:"id"`
|
||||
CreatedAt time.Time `json:"created_at"`
|
||||
TemplateID *uuid.UUID `json:"template_id,omitempty"`
|
||||
OrganizationID uuid.UUID `json:"organization_id"`
|
||||
JobID uuid.UUID `json:"job_id"`
|
||||
SourceExampleID *string `json:"source_example_id,omitempty"`
|
||||
}
|
||||
|
||||
type ProvisionerJob struct {
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
package telemetry_test
|
||||
|
||||
import (
|
||||
"database/sql"
|
||||
"encoding/json"
|
||||
"net/http"
|
||||
"net/http/httptest"
|
||||
@@ -48,6 +49,10 @@ func TestTelemetry(t *testing.T) {
|
||||
_ = dbgen.Template(t, db, database.Template{
|
||||
Provisioner: database.ProvisionerTypeTerraform,
|
||||
})
|
||||
sourceExampleID := uuid.NewString()
|
||||
_ = dbgen.TemplateVersion(t, db, database.TemplateVersion{
|
||||
SourceExampleID: sql.NullString{String: sourceExampleID, Valid: true},
|
||||
})
|
||||
_ = dbgen.TemplateVersion(t, db, database.TemplateVersion{})
|
||||
user := dbgen.User(t, db, database.User{})
|
||||
_ = dbgen.Workspace(t, db, database.WorkspaceTable{})
|
||||
@@ -93,7 +98,7 @@ func TestTelemetry(t *testing.T) {
|
||||
require.Len(t, snapshot.ProvisionerJobs, 1)
|
||||
require.Len(t, snapshot.Licenses, 1)
|
||||
require.Len(t, snapshot.Templates, 1)
|
||||
require.Len(t, snapshot.TemplateVersions, 1)
|
||||
require.Len(t, snapshot.TemplateVersions, 2)
|
||||
require.Len(t, snapshot.Users, 1)
|
||||
require.Len(t, snapshot.Groups, 2)
|
||||
// 1 member in the everyone group + 1 member in the custom group
|
||||
@@ -111,6 +116,17 @@ func TestTelemetry(t *testing.T) {
|
||||
require.Len(t, wsa.Subsystems, 2)
|
||||
require.Equal(t, string(database.WorkspaceAgentSubsystemEnvbox), wsa.Subsystems[0])
|
||||
require.Equal(t, string(database.WorkspaceAgentSubsystemExectrace), wsa.Subsystems[1])
|
||||
|
||||
tvs := snapshot.TemplateVersions
|
||||
sort.Slice(tvs, func(i, j int) bool {
|
||||
// Sort by SourceExampleID presence (non-nil comes before nil)
|
||||
if (tvs[i].SourceExampleID != nil) != (tvs[j].SourceExampleID != nil) {
|
||||
return tvs[i].SourceExampleID != nil
|
||||
}
|
||||
return false
|
||||
})
|
||||
require.Equal(t, tvs[0].SourceExampleID, &sourceExampleID)
|
||||
require.Nil(t, tvs[1].SourceExampleID)
|
||||
})
|
||||
t.Run("HashedEmail", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
+164
-66
@@ -10,7 +10,6 @@ import (
|
||||
"fmt"
|
||||
"net/http"
|
||||
"os"
|
||||
"time"
|
||||
|
||||
"github.com/go-chi/chi/v5"
|
||||
"github.com/google/uuid"
|
||||
@@ -22,6 +21,8 @@ import (
|
||||
|
||||
"github.com/coder/coder/v2/coderd/audit"
|
||||
"github.com/coder/coder/v2/coderd/database"
|
||||
"github.com/coder/coder/v2/coderd/database/db2sdk"
|
||||
"github.com/coder/coder/v2/coderd/database/dbauthz"
|
||||
"github.com/coder/coder/v2/coderd/database/dbtime"
|
||||
"github.com/coder/coder/v2/coderd/database/provisionerjobs"
|
||||
"github.com/coder/coder/v2/coderd/externalauth"
|
||||
@@ -32,6 +33,7 @@ import (
|
||||
"github.com/coder/coder/v2/coderd/rbac/policy"
|
||||
"github.com/coder/coder/v2/coderd/render"
|
||||
"github.com/coder/coder/v2/coderd/tracing"
|
||||
"github.com/coder/coder/v2/coderd/util/ptr"
|
||||
"github.com/coder/coder/v2/codersdk"
|
||||
"github.com/coder/coder/v2/examples"
|
||||
"github.com/coder/coder/v2/provisioner/terraform/tfparse"
|
||||
@@ -60,6 +62,22 @@ func (api *API) templateVersion(rw http.ResponseWriter, r *http.Request) {
|
||||
return
|
||||
}
|
||||
|
||||
var matchedProvisioners *codersdk.MatchedProvisioners
|
||||
if jobs[0].ProvisionerJob.JobStatus == database.ProvisionerJobStatusPending {
|
||||
// nolint: gocritic // The user hitting this endpoint may not have
|
||||
// permission to read provisioner daemons, but we want to show them
|
||||
// information about the provisioner daemons that are available.
|
||||
provisioners, err := api.Database.GetProvisionerDaemonsByOrganization(dbauthz.AsSystemReadProvisionerDaemons(ctx), database.GetProvisionerDaemonsByOrganizationParams{
|
||||
OrganizationID: jobs[0].ProvisionerJob.OrganizationID,
|
||||
WantTags: jobs[0].ProvisionerJob.Tags,
|
||||
})
|
||||
if err != nil {
|
||||
api.Logger.Error(ctx, "failed to fetch provisioners for job id", slog.F("job_id", jobs[0].ProvisionerJob.ID), slog.Error(err))
|
||||
} else {
|
||||
matchedProvisioners = ptr.Ref(db2sdk.MatchedProvisioners(provisioners, dbtime.Now(), provisionerdserver.StaleInterval))
|
||||
}
|
||||
}
|
||||
|
||||
schemas, err := api.Database.GetParameterSchemasByJobID(ctx, jobs[0].ProvisionerJob.ID)
|
||||
if errors.Is(err, sql.ErrNoRows) {
|
||||
err = nil
|
||||
@@ -77,7 +95,7 @@ func (api *API) templateVersion(rw http.ResponseWriter, r *http.Request) {
|
||||
warnings = append(warnings, codersdk.TemplateVersionWarningUnsupportedWorkspaces)
|
||||
}
|
||||
|
||||
httpapi.Write(ctx, rw, http.StatusOK, convertTemplateVersion(templateVersion, convertProvisionerJob(jobs[0]), codersdk.MatchedProvisioners{}, warnings))
|
||||
httpapi.Write(ctx, rw, http.StatusOK, convertTemplateVersion(templateVersion, convertProvisionerJob(jobs[0]), matchedProvisioners, warnings))
|
||||
}
|
||||
|
||||
// @Summary Patch template version by ID
|
||||
@@ -173,7 +191,23 @@ func (api *API) patchTemplateVersion(rw http.ResponseWriter, r *http.Request) {
|
||||
return
|
||||
}
|
||||
|
||||
httpapi.Write(ctx, rw, http.StatusOK, convertTemplateVersion(updatedTemplateVersion, convertProvisionerJob(jobs[0]), codersdk.MatchedProvisioners{}, nil))
|
||||
var matchedProvisioners *codersdk.MatchedProvisioners
|
||||
if jobs[0].ProvisionerJob.JobStatus == database.ProvisionerJobStatusPending {
|
||||
// nolint: gocritic // The user hitting this endpoint may not have
|
||||
// permission to read provisioner daemons, but we want to show them
|
||||
// information about the provisioner daemons that are available.
|
||||
provisioners, err := api.Database.GetProvisionerDaemonsByOrganization(dbauthz.AsSystemReadProvisionerDaemons(ctx), database.GetProvisionerDaemonsByOrganizationParams{
|
||||
OrganizationID: jobs[0].ProvisionerJob.OrganizationID,
|
||||
WantTags: jobs[0].ProvisionerJob.Tags,
|
||||
})
|
||||
if err != nil {
|
||||
api.Logger.Error(ctx, "failed to fetch provisioners for job id", slog.F("job_id", jobs[0].ProvisionerJob.ID), slog.Error(err))
|
||||
} else {
|
||||
matchedProvisioners = ptr.Ref(db2sdk.MatchedProvisioners(provisioners, dbtime.Now(), provisionerdserver.StaleInterval))
|
||||
}
|
||||
}
|
||||
|
||||
httpapi.Write(ctx, rw, http.StatusOK, convertTemplateVersion(updatedTemplateVersion, convertProvisionerJob(jobs[0]), matchedProvisioners, nil))
|
||||
}
|
||||
|
||||
// @Summary Cancel template version by ID
|
||||
@@ -546,6 +580,43 @@ func (api *API) templateVersionDryRun(rw http.ResponseWriter, r *http.Request) {
|
||||
httpapi.Write(ctx, rw, http.StatusOK, convertProvisionerJob(job))
|
||||
}
|
||||
|
||||
// @Summary Get template version dry-run matched provisioners
|
||||
// @ID get-template-version-dry-run-matched-provisioners
|
||||
// @Security CoderSessionToken
|
||||
// @Produce json
|
||||
// @Tags Templates
|
||||
// @Param templateversion path string true "Template version ID" format(uuid)
|
||||
// @Param jobID path string true "Job ID" format(uuid)
|
||||
// @Success 200 {object} codersdk.MatchedProvisioners
|
||||
// @Router /templateversions/{templateversion}/dry-run/{jobID}/matched-provisioners [get]
|
||||
func (api *API) templateVersionDryRunMatchedProvisioners(rw http.ResponseWriter, r *http.Request) {
|
||||
ctx := r.Context()
|
||||
job, ok := api.fetchTemplateVersionDryRunJob(rw, r)
|
||||
if !ok {
|
||||
return
|
||||
}
|
||||
|
||||
// nolint:gocritic // The user may not have permissions to read all
|
||||
// provisioner daemons in the org.
|
||||
daemons, err := api.Database.GetProvisionerDaemonsByOrganization(dbauthz.AsSystemReadProvisionerDaemons(ctx), database.GetProvisionerDaemonsByOrganizationParams{
|
||||
OrganizationID: job.ProvisionerJob.OrganizationID,
|
||||
WantTags: job.ProvisionerJob.Tags,
|
||||
})
|
||||
if err != nil {
|
||||
if !errors.Is(err, sql.ErrNoRows) {
|
||||
httpapi.Write(ctx, rw, http.StatusInternalServerError, codersdk.Response{
|
||||
Message: "Internal error fetching provisioner daemons by organization.",
|
||||
Detail: err.Error(),
|
||||
})
|
||||
return
|
||||
}
|
||||
daemons = []database.ProvisionerDaemon{}
|
||||
}
|
||||
|
||||
matchedProvisioners := db2sdk.MatchedProvisioners(daemons, dbtime.Now(), provisionerdserver.StaleInterval)
|
||||
httpapi.Write(ctx, rw, http.StatusOK, matchedProvisioners)
|
||||
}
|
||||
|
||||
// @Summary Get template version dry-run resources by job ID
|
||||
// @ID get-template-version-dry-run-resources-by-job-id
|
||||
// @Security CoderSessionToken
|
||||
@@ -814,7 +885,7 @@ func (api *API) templateVersionsByTemplate(rw http.ResponseWriter, r *http.Reque
|
||||
return err
|
||||
}
|
||||
|
||||
apiVersions = append(apiVersions, convertTemplateVersion(version, convertProvisionerJob(job), codersdk.MatchedProvisioners{}, nil))
|
||||
apiVersions = append(apiVersions, convertTemplateVersion(version, convertProvisionerJob(job), nil, nil))
|
||||
}
|
||||
|
||||
return nil
|
||||
@@ -868,8 +939,23 @@ func (api *API) templateVersionByName(rw http.ResponseWriter, r *http.Request) {
|
||||
})
|
||||
return
|
||||
}
|
||||
var matchedProvisioners *codersdk.MatchedProvisioners
|
||||
if jobs[0].ProvisionerJob.JobStatus == database.ProvisionerJobStatusPending {
|
||||
// nolint: gocritic // The user hitting this endpoint may not have
|
||||
// permission to read provisioner daemons, but we want to show them
|
||||
// information about the provisioner daemons that are available.
|
||||
provisioners, err := api.Database.GetProvisionerDaemonsByOrganization(dbauthz.AsSystemReadProvisionerDaemons(ctx), database.GetProvisionerDaemonsByOrganizationParams{
|
||||
OrganizationID: jobs[0].ProvisionerJob.OrganizationID,
|
||||
WantTags: jobs[0].ProvisionerJob.Tags,
|
||||
})
|
||||
if err != nil {
|
||||
api.Logger.Error(ctx, "failed to fetch provisioners for job id", slog.F("job_id", jobs[0].ProvisionerJob.ID), slog.Error(err))
|
||||
} else {
|
||||
matchedProvisioners = ptr.Ref(db2sdk.MatchedProvisioners(provisioners, dbtime.Now(), provisionerdserver.StaleInterval))
|
||||
}
|
||||
}
|
||||
|
||||
httpapi.Write(ctx, rw, http.StatusOK, convertTemplateVersion(templateVersion, convertProvisionerJob(jobs[0]), codersdk.MatchedProvisioners{}, nil))
|
||||
httpapi.Write(ctx, rw, http.StatusOK, convertTemplateVersion(templateVersion, convertProvisionerJob(jobs[0]), matchedProvisioners, nil))
|
||||
}
|
||||
|
||||
// @Summary Get template version by organization, template, and name
|
||||
@@ -934,7 +1020,23 @@ func (api *API) templateVersionByOrganizationTemplateAndName(rw http.ResponseWri
|
||||
return
|
||||
}
|
||||
|
||||
httpapi.Write(ctx, rw, http.StatusOK, convertTemplateVersion(templateVersion, convertProvisionerJob(jobs[0]), codersdk.MatchedProvisioners{}, nil))
|
||||
var matchedProvisioners *codersdk.MatchedProvisioners
|
||||
if jobs[0].ProvisionerJob.JobStatus == database.ProvisionerJobStatusPending {
|
||||
// nolint: gocritic // The user hitting this endpoint may not have
|
||||
// permission to read provisioner daemons, but we want to show them
|
||||
// information about the provisioner daemons that are available.
|
||||
provisioners, err := api.Database.GetProvisionerDaemonsByOrganization(dbauthz.AsSystemReadProvisionerDaemons(ctx), database.GetProvisionerDaemonsByOrganizationParams{
|
||||
OrganizationID: jobs[0].ProvisionerJob.OrganizationID,
|
||||
WantTags: jobs[0].ProvisionerJob.Tags,
|
||||
})
|
||||
if err != nil {
|
||||
api.Logger.Error(ctx, "failed to fetch provisioners for job id", slog.F("job_id", jobs[0].ProvisionerJob.ID), slog.Error(err))
|
||||
} else {
|
||||
matchedProvisioners = ptr.Ref(db2sdk.MatchedProvisioners(provisioners, dbtime.Now(), provisionerdserver.StaleInterval))
|
||||
}
|
||||
}
|
||||
|
||||
httpapi.Write(ctx, rw, http.StatusOK, convertTemplateVersion(templateVersion, convertProvisionerJob(jobs[0]), matchedProvisioners, nil))
|
||||
}
|
||||
|
||||
// @Summary Get previous template version by organization, template, and name
|
||||
@@ -1020,7 +1122,23 @@ func (api *API) previousTemplateVersionByOrganizationTemplateAndName(rw http.Res
|
||||
return
|
||||
}
|
||||
|
||||
httpapi.Write(ctx, rw, http.StatusOK, convertTemplateVersion(previousTemplateVersion, convertProvisionerJob(jobs[0]), codersdk.MatchedProvisioners{}, nil))
|
||||
var matchedProvisioners *codersdk.MatchedProvisioners
|
||||
if jobs[0].ProvisionerJob.JobStatus == database.ProvisionerJobStatusPending {
|
||||
// nolint: gocritic // The user hitting this endpoint may not have
|
||||
// permission to read provisioner daemons, but we want to show them
|
||||
// information about the provisioner daemons that are available.
|
||||
provisioners, err := api.Database.GetProvisionerDaemonsByOrganization(dbauthz.AsSystemReadProvisionerDaemons(ctx), database.GetProvisionerDaemonsByOrganizationParams{
|
||||
OrganizationID: jobs[0].ProvisionerJob.OrganizationID,
|
||||
WantTags: jobs[0].ProvisionerJob.Tags,
|
||||
})
|
||||
if err != nil {
|
||||
api.Logger.Error(ctx, "failed to fetch provisioners for job id", slog.F("job_id", jobs[0].ProvisionerJob.ID), slog.Error(err))
|
||||
} else {
|
||||
matchedProvisioners = ptr.Ref(db2sdk.MatchedProvisioners(provisioners, dbtime.Now(), provisionerdserver.StaleInterval))
|
||||
}
|
||||
}
|
||||
|
||||
httpapi.Write(ctx, rw, http.StatusOK, convertTemplateVersion(previousTemplateVersion, convertProvisionerJob(jobs[0]), matchedProvisioners, nil))
|
||||
}
|
||||
|
||||
// @Summary Archive template unused versions by template id
|
||||
@@ -1479,11 +1597,9 @@ func (api *API) postTemplateVersionsByOrganization(rw http.ResponseWriter, r *ht
|
||||
}
|
||||
|
||||
// Ensure the "owner" tag is properly applied in addition to request tags and coder_workspace_tags.
|
||||
// Tag order precedence:
|
||||
// 1) User-specified tags in the request
|
||||
// 2) Tags parsed from coder_workspace_tags data source in template file
|
||||
// 2 may clobber 1.
|
||||
tags := provisionersdk.MutateTags(apiKey.UserID, req.ProvisionerTags, parsedTags)
|
||||
// User-specified tags in the request will take precedence over tags parsed from `coder_workspace_tags`
|
||||
// data sources defined in the template file.
|
||||
tags := provisionersdk.MutateTags(apiKey.UserID, parsedTags, req.ProvisionerTags)
|
||||
|
||||
var templateVersion database.TemplateVersion
|
||||
var provisionerJob database.ProvisionerJob
|
||||
@@ -1513,27 +1629,6 @@ func (api *API) postTemplateVersionsByOrganization(rw http.ResponseWriter, r *ht
|
||||
return err
|
||||
}
|
||||
|
||||
// Check for eligible provisioners. This allows us to log a message warning deployment administrators
|
||||
// of users submitting jobs for which no provisioners are available.
|
||||
matchedProvisioners, err = checkProvisioners(ctx, tx, organization.ID, tags, api.DeploymentValues.Provisioner.DaemonPollInterval.Value())
|
||||
if err != nil {
|
||||
api.Logger.Error(ctx, "failed to check eligible provisioner daemons for job", slog.Error(err))
|
||||
} else if matchedProvisioners.Count == 0 {
|
||||
api.Logger.Warn(ctx, "no matching provisioners found for job",
|
||||
slog.F("user_id", apiKey.UserID),
|
||||
slog.F("job_id", jobID),
|
||||
slog.F("job_type", database.ProvisionerJobTypeTemplateVersionImport),
|
||||
slog.F("tags", tags),
|
||||
)
|
||||
} else if matchedProvisioners.Available == 0 {
|
||||
api.Logger.Warn(ctx, "no active provisioners found for job",
|
||||
slog.F("user_id", apiKey.UserID),
|
||||
slog.F("job_id", jobID),
|
||||
slog.F("job_type", database.ProvisionerJobTypeTemplateVersionImport),
|
||||
slog.F("tags", tags),
|
||||
)
|
||||
}
|
||||
|
||||
provisionerJob, err = tx.InsertProvisionerJob(ctx, database.InsertProvisionerJobParams{
|
||||
ID: jobID,
|
||||
CreatedAt: dbtime.Now(),
|
||||
@@ -1559,6 +1654,36 @@ func (api *API) postTemplateVersionsByOrganization(rw http.ResponseWriter, r *ht
|
||||
return err
|
||||
}
|
||||
|
||||
// Check for eligible provisioners. This allows us to return a warning to the user if they
|
||||
// submit a job for which no provisioner is available.
|
||||
// nolint: gocritic // The user hitting this endpoint may not have
|
||||
// permission to read provisioner daemons, but we want to show them
|
||||
// information about the provisioner daemons that are available.
|
||||
eligibleProvisioners, err := tx.GetProvisionerDaemonsByOrganization(dbauthz.AsSystemReadProvisionerDaemons(ctx), database.GetProvisionerDaemonsByOrganizationParams{
|
||||
OrganizationID: organization.ID,
|
||||
WantTags: provisionerJob.Tags,
|
||||
})
|
||||
if err != nil {
|
||||
// Log the error but do not return any warnings. This is purely advisory and we should not block.
|
||||
api.Logger.Error(ctx, "failed to check eligible provisioner daemons for job", slog.Error(err))
|
||||
}
|
||||
matchedProvisioners = db2sdk.MatchedProvisioners(eligibleProvisioners, provisionerJob.CreatedAt, provisionerdserver.StaleInterval)
|
||||
if matchedProvisioners.Count == 0 {
|
||||
api.Logger.Warn(ctx, "no matching provisioners found for job",
|
||||
slog.F("user_id", apiKey.UserID),
|
||||
slog.F("job_id", jobID),
|
||||
slog.F("job_type", database.ProvisionerJobTypeTemplateVersionImport),
|
||||
slog.F("tags", tags),
|
||||
)
|
||||
} else if matchedProvisioners.Available == 0 {
|
||||
api.Logger.Warn(ctx, "no active provisioners found for job",
|
||||
slog.F("user_id", apiKey.UserID),
|
||||
slog.F("job_id", jobID),
|
||||
slog.F("job_type", database.ProvisionerJobTypeTemplateVersionImport),
|
||||
slog.F("tags", tags),
|
||||
)
|
||||
}
|
||||
|
||||
var templateID uuid.NullUUID
|
||||
if req.TemplateID != uuid.Nil {
|
||||
templateID = uuid.NullUUID{
|
||||
@@ -1582,6 +1707,10 @@ func (api *API) postTemplateVersionsByOrganization(rw http.ResponseWriter, r *ht
|
||||
Readme: "",
|
||||
JobID: provisionerJob.ID,
|
||||
CreatedBy: apiKey.UserID,
|
||||
SourceExampleID: sql.NullString{
|
||||
String: req.ExampleID,
|
||||
Valid: req.ExampleID != "",
|
||||
},
|
||||
})
|
||||
if err != nil {
|
||||
if database.IsUniqueViolation(err, database.UniqueTemplateVersionsTemplateIDNameKey) {
|
||||
@@ -1629,7 +1758,7 @@ func (api *API) postTemplateVersionsByOrganization(rw http.ResponseWriter, r *ht
|
||||
ProvisionerJob: provisionerJob,
|
||||
QueuePosition: 0,
|
||||
}),
|
||||
matchedProvisioners,
|
||||
&matchedProvisioners,
|
||||
warnings))
|
||||
}
|
||||
|
||||
@@ -1697,7 +1826,7 @@ func (api *API) templateVersionLogs(rw http.ResponseWriter, r *http.Request) {
|
||||
api.provisionerJobLogs(rw, r, job)
|
||||
}
|
||||
|
||||
func convertTemplateVersion(version database.TemplateVersion, job codersdk.ProvisionerJob, matchedProvisioners codersdk.MatchedProvisioners, warnings []codersdk.TemplateVersionWarning) codersdk.TemplateVersion {
|
||||
func convertTemplateVersion(version database.TemplateVersion, job codersdk.ProvisionerJob, matchedProvisioners *codersdk.MatchedProvisioners, warnings []codersdk.TemplateVersionWarning) codersdk.TemplateVersion {
|
||||
return codersdk.TemplateVersion{
|
||||
ID: version.ID,
|
||||
TemplateID: &version.TemplateID.UUID,
|
||||
@@ -1818,34 +1947,3 @@ func (api *API) publishTemplateUpdate(ctx context.Context, templateID uuid.UUID)
|
||||
slog.F("template_id", templateID), slog.Error(err))
|
||||
}
|
||||
}
|
||||
|
||||
func checkProvisioners(ctx context.Context, store database.Store, orgID uuid.UUID, wantTags map[string]string, pollInterval time.Duration) (codersdk.MatchedProvisioners, error) {
|
||||
// Check for eligible provisioners. This allows us to return a warning to the user if they
|
||||
// submit a job for which no provisioner is available.
|
||||
eligibleProvisioners, err := store.GetProvisionerDaemonsByOrganization(ctx, database.GetProvisionerDaemonsByOrganizationParams{
|
||||
OrganizationID: orgID,
|
||||
WantTags: wantTags,
|
||||
})
|
||||
if err != nil {
|
||||
// Log the error but do not return any warnings. This is purely advisory and we should not block.
|
||||
return codersdk.MatchedProvisioners{}, xerrors.Errorf("provisioner daemons by organization: %w", err)
|
||||
}
|
||||
|
||||
threePollsAgo := time.Now().Add(-3 * pollInterval)
|
||||
mostRecentlySeen := codersdk.NullTime{}
|
||||
var matched codersdk.MatchedProvisioners
|
||||
for _, provisioner := range eligibleProvisioners {
|
||||
if !provisioner.LastSeenAt.Valid {
|
||||
continue
|
||||
}
|
||||
matched.Count++
|
||||
if provisioner.LastSeenAt.Time.After(threePollsAgo) {
|
||||
matched.Available++
|
||||
}
|
||||
if provisioner.LastSeenAt.Time.After(mostRecentlySeen.Time) {
|
||||
matched.MostRecentlySeen.Valid = true
|
||||
matched.MostRecentlySeen.Time = provisioner.LastSeenAt.Time
|
||||
}
|
||||
}
|
||||
return matched, nil
|
||||
}
|
||||
|
||||
+233
-38
@@ -16,6 +16,7 @@ import (
|
||||
"github.com/coder/coder/v2/coderd/audit"
|
||||
"github.com/coder/coder/v2/coderd/coderdtest"
|
||||
"github.com/coder/coder/v2/coderd/database"
|
||||
"github.com/coder/coder/v2/coderd/database/dbauthz"
|
||||
"github.com/coder/coder/v2/coderd/database/dbtestutil"
|
||||
"github.com/coder/coder/v2/coderd/externalauth"
|
||||
"github.com/coder/coder/v2/coderd/rbac"
|
||||
@@ -49,6 +50,12 @@ func TestTemplateVersion(t *testing.T) {
|
||||
tv, err := client.TemplateVersion(ctx, version.ID)
|
||||
authz.AssertChecked(t, policy.ActionRead, tv)
|
||||
require.NoError(t, err)
|
||||
if assert.Equal(t, tv.Job.Status, codersdk.ProvisionerJobPending) {
|
||||
assert.NotNil(t, tv.MatchedProvisioners)
|
||||
assert.Zero(t, tv.MatchedProvisioners.Available)
|
||||
assert.Zero(t, tv.MatchedProvisioners.Count)
|
||||
assert.False(t, tv.MatchedProvisioners.MostRecentlySeen.Valid)
|
||||
}
|
||||
|
||||
assert.Equal(t, "bananas", tv.Name)
|
||||
assert.Equal(t, "first try", tv.Message)
|
||||
@@ -86,8 +93,14 @@ func TestTemplateVersion(t *testing.T) {
|
||||
|
||||
client1, _ := coderdtest.CreateAnotherUser(t, client, user.OrganizationID)
|
||||
|
||||
_, err := client1.TemplateVersion(ctx, version.ID)
|
||||
tv, err := client1.TemplateVersion(ctx, version.ID)
|
||||
require.NoError(t, err)
|
||||
if assert.Equal(t, tv.Job.Status, codersdk.ProvisionerJobPending) {
|
||||
assert.NotNil(t, tv.MatchedProvisioners)
|
||||
assert.Zero(t, tv.MatchedProvisioners.Available)
|
||||
assert.Zero(t, tv.MatchedProvisioners.Count)
|
||||
assert.False(t, tv.MatchedProvisioners.MostRecentlySeen.Valid)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
@@ -134,7 +147,7 @@ func TestPostTemplateVersionsByOrganization(t *testing.T) {
|
||||
t.Run("WithParameters", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
auditor := audit.NewMock()
|
||||
client := coderdtest.New(t, &coderdtest.Options{IncludeProvisionerDaemon: true, Auditor: auditor})
|
||||
client, db := coderdtest.NewWithDatabase(t, &coderdtest.Options{IncludeProvisionerDaemon: true, Auditor: auditor})
|
||||
user := coderdtest.CreateFirstUser(t, client)
|
||||
data, err := echo.Tar(&echo.Responses{
|
||||
Parse: echo.ParseComplete,
|
||||
@@ -157,14 +170,26 @@ func TestPostTemplateVersionsByOrganization(t *testing.T) {
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, "bananas", version.Name)
|
||||
require.Equal(t, provisionersdk.ScopeOrganization, version.Job.Tags[provisionersdk.TagScope])
|
||||
if assert.Equal(t, version.Job.Status, codersdk.ProvisionerJobPending) {
|
||||
assert.NotNil(t, version.MatchedProvisioners)
|
||||
assert.Equal(t, version.MatchedProvisioners.Available, 1)
|
||||
assert.Equal(t, version.MatchedProvisioners.Count, 1)
|
||||
assert.True(t, version.MatchedProvisioners.MostRecentlySeen.Valid)
|
||||
}
|
||||
|
||||
require.Len(t, auditor.AuditLogs(), 2)
|
||||
assert.Equal(t, database.AuditActionCreate, auditor.AuditLogs()[1].Action)
|
||||
|
||||
admin, err := client.User(ctx, user.UserID.String())
|
||||
require.NoError(t, err)
|
||||
tvDB, err := db.GetTemplateVersionByID(dbauthz.As(ctx, coderdtest.AuthzUserSubject(admin, user.OrganizationID)), version.ID)
|
||||
require.NoError(t, err)
|
||||
require.False(t, tvDB.SourceExampleID.Valid)
|
||||
})
|
||||
|
||||
t.Run("Example", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
client := coderdtest.New(t, nil)
|
||||
client, db := coderdtest.NewWithDatabase(t, nil)
|
||||
user := coderdtest.CreateFirstUser(t, client)
|
||||
|
||||
ctx, cancel := context.WithTimeout(context.Background(), testutil.WaitLong)
|
||||
@@ -205,6 +230,12 @@ func TestPostTemplateVersionsByOrganization(t *testing.T) {
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, "my-example", tv.Name)
|
||||
|
||||
admin, err := client.User(ctx, user.UserID.String())
|
||||
require.NoError(t, err)
|
||||
tvDB, err := db.GetTemplateVersionByID(dbauthz.As(ctx, coderdtest.AuthzUserSubject(admin, user.OrganizationID)), tv.ID)
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, ls[0].ID, tvDB.SourceExampleID.String)
|
||||
|
||||
// ensure the template tar was uploaded correctly
|
||||
fl, ct, err := client.Download(ctx, tv.Job.FileID)
|
||||
require.NoError(t, err)
|
||||
@@ -262,6 +293,11 @@ func TestPostTemplateVersionsByOrganization(t *testing.T) {
|
||||
type = string
|
||||
default = "2"
|
||||
}
|
||||
data "coder_parameter" "unrelated" {
|
||||
name = "unrelated"
|
||||
type = "list(string)"
|
||||
default = jsonencode(["a", "b"])
|
||||
}
|
||||
resource "null_resource" "test" {}`,
|
||||
},
|
||||
wantTags: map[string]string{"owner": "", "scope": "organization"},
|
||||
@@ -270,18 +306,23 @@ func TestPostTemplateVersionsByOrganization(t *testing.T) {
|
||||
name: "main.tf with empty workspace tags",
|
||||
files: map[string]string{
|
||||
`main.tf`: `
|
||||
variable "a" {
|
||||
type = string
|
||||
default = "1"
|
||||
}
|
||||
data "coder_parameter" "b" {
|
||||
type = string
|
||||
default = "2"
|
||||
}
|
||||
resource "null_resource" "test" {}
|
||||
data "coder_workspace_tags" "tags" {
|
||||
tags = {}
|
||||
}`,
|
||||
variable "a" {
|
||||
type = string
|
||||
default = "1"
|
||||
}
|
||||
data "coder_parameter" "b" {
|
||||
type = string
|
||||
default = "2"
|
||||
}
|
||||
data "coder_parameter" "unrelated" {
|
||||
name = "unrelated"
|
||||
type = "list(string)"
|
||||
default = jsonencode(["a", "b"])
|
||||
}
|
||||
resource "null_resource" "test" {}
|
||||
data "coder_workspace_tags" "tags" {
|
||||
tags = {}
|
||||
}`,
|
||||
},
|
||||
wantTags: map[string]string{"owner": "", "scope": "organization"},
|
||||
},
|
||||
@@ -297,6 +338,11 @@ func TestPostTemplateVersionsByOrganization(t *testing.T) {
|
||||
type = string
|
||||
default = "2"
|
||||
}
|
||||
data "coder_parameter" "unrelated" {
|
||||
name = "unrelated"
|
||||
type = "list(string)"
|
||||
default = jsonencode(["a", "b"])
|
||||
}
|
||||
resource "null_resource" "test" {}
|
||||
data "coder_workspace_tags" "tags" {
|
||||
tags = {
|
||||
@@ -309,29 +355,84 @@ func TestPostTemplateVersionsByOrganization(t *testing.T) {
|
||||
wantTags: map[string]string{"owner": "", "scope": "organization", "foo": "bar", "a": "1", "b": "2"},
|
||||
},
|
||||
{
|
||||
name: "main.tf with workspace tags and request tags",
|
||||
name: "main.tf with request tags not clobbering workspace tags",
|
||||
files: map[string]string{
|
||||
`main.tf`: `
|
||||
variable "a" {
|
||||
type = string
|
||||
default = "1"
|
||||
}
|
||||
data "coder_parameter" "b" {
|
||||
type = string
|
||||
default = "2"
|
||||
}
|
||||
resource "null_resource" "test" {}
|
||||
data "coder_workspace_tags" "tags" {
|
||||
tags = {
|
||||
"foo": "bar",
|
||||
"a": var.a,
|
||||
"b": data.coder_parameter.b.value,
|
||||
// This file is, once again, the same as the above, except
|
||||
// for a slightly different comment.
|
||||
variable "a" {
|
||||
type = string
|
||||
default = "1"
|
||||
}
|
||||
}`,
|
||||
data "coder_parameter" "b" {
|
||||
type = string
|
||||
default = "2"
|
||||
}
|
||||
data "coder_parameter" "unrelated" {
|
||||
name = "unrelated"
|
||||
type = "list(string)"
|
||||
default = jsonencode(["a", "b"])
|
||||
}
|
||||
resource "null_resource" "test" {}
|
||||
data "coder_workspace_tags" "tags" {
|
||||
tags = {
|
||||
"foo": "bar",
|
||||
"a": var.a,
|
||||
"b": data.coder_parameter.b.value,
|
||||
}
|
||||
}`,
|
||||
},
|
||||
reqTags: map[string]string{"baz": "zap", "foo": "noclobber"},
|
||||
reqTags: map[string]string{"baz": "zap"},
|
||||
wantTags: map[string]string{"owner": "", "scope": "organization", "foo": "bar", "baz": "zap", "a": "1", "b": "2"},
|
||||
},
|
||||
{
|
||||
name: "main.tf with request tags clobbering workspace tags",
|
||||
files: map[string]string{
|
||||
`main.tf`: `
|
||||
// This file is the same as the above, except for this comment.
|
||||
variable "a" {
|
||||
type = string
|
||||
default = "1"
|
||||
}
|
||||
data "coder_parameter" "b" {
|
||||
type = string
|
||||
default = "2"
|
||||
}
|
||||
data "coder_parameter" "unrelated" {
|
||||
name = "unrelated"
|
||||
type = "list(string)"
|
||||
default = jsonencode(["a", "b"])
|
||||
}
|
||||
resource "null_resource" "test" {}
|
||||
data "coder_workspace_tags" "tags" {
|
||||
tags = {
|
||||
"foo": "bar",
|
||||
"a": var.a,
|
||||
"b": data.coder_parameter.b.value,
|
||||
}
|
||||
}`,
|
||||
},
|
||||
reqTags: map[string]string{"baz": "zap", "foo": "clobbered"},
|
||||
wantTags: map[string]string{"owner": "", "scope": "organization", "foo": "clobbered", "baz": "zap", "a": "1", "b": "2"},
|
||||
},
|
||||
// FIXME(cian): we should skip evaluating tags for which values have already been provided.
|
||||
{
|
||||
name: "main.tf with variable missing default value but value is passed in request",
|
||||
files: map[string]string{
|
||||
`main.tf`: `
|
||||
variable "a" {
|
||||
type = string
|
||||
}
|
||||
data "coder_workspace_tags" "tags" {
|
||||
tags = {
|
||||
"a": var.a,
|
||||
}
|
||||
}`,
|
||||
},
|
||||
reqTags: map[string]string{"a": "b"},
|
||||
// wantTags: map[string]string{"owner": "", "scope": "organization", "a": "b"},
|
||||
expectError: `provisioner tag "a" evaluated to an empty value`,
|
||||
},
|
||||
{
|
||||
name: "main.tf with disallowed workspace tag value",
|
||||
files: map[string]string{
|
||||
@@ -344,6 +445,11 @@ func TestPostTemplateVersionsByOrganization(t *testing.T) {
|
||||
type = string
|
||||
default = "2"
|
||||
}
|
||||
data "coder_parameter" "unrelated" {
|
||||
name = "unrelated"
|
||||
type = "list(string)"
|
||||
default = jsonencode(["a", "b"])
|
||||
}
|
||||
resource "null_resource" "test" {
|
||||
name = "foo"
|
||||
}
|
||||
@@ -370,6 +476,11 @@ func TestPostTemplateVersionsByOrganization(t *testing.T) {
|
||||
type = string
|
||||
default = "2"
|
||||
}
|
||||
data "coder_parameter" "unrelated" {
|
||||
name = "unrelated"
|
||||
type = "list(string)"
|
||||
default = jsonencode(["a", "b"])
|
||||
}
|
||||
resource "null_resource" "test" {
|
||||
name = "foo"
|
||||
}
|
||||
@@ -392,6 +503,11 @@ func TestPostTemplateVersionsByOrganization(t *testing.T) {
|
||||
name: "main.tf with workspace tags that attempts to set user scope",
|
||||
files: map[string]string{
|
||||
`main.tf`: `
|
||||
data "coder_parameter" "unrelated" {
|
||||
name = "unrelated"
|
||||
type = "list(string)"
|
||||
default = jsonencode(["a", "b"])
|
||||
}
|
||||
resource "null_resource" "test" {}
|
||||
data "coder_workspace_tags" "tags" {
|
||||
tags = {
|
||||
@@ -406,6 +522,11 @@ func TestPostTemplateVersionsByOrganization(t *testing.T) {
|
||||
name: "main.tf with workspace tags that attempt to clobber org ID",
|
||||
files: map[string]string{
|
||||
`main.tf`: `
|
||||
data "coder_parameter" "unrelated" {
|
||||
name = "unrelated"
|
||||
type = "list(string)"
|
||||
default = jsonencode(["a", "b"])
|
||||
}
|
||||
resource "null_resource" "test" {}
|
||||
data "coder_workspace_tags" "tags" {
|
||||
tags = {
|
||||
@@ -420,6 +541,11 @@ func TestPostTemplateVersionsByOrganization(t *testing.T) {
|
||||
name: "main.tf with workspace tags that set scope=user",
|
||||
files: map[string]string{
|
||||
`main.tf`: `
|
||||
data "coder_parameter" "unrelated" {
|
||||
name = "unrelated"
|
||||
type = "list(string)"
|
||||
default = jsonencode(["a", "b"])
|
||||
}
|
||||
resource "null_resource" "test" {}
|
||||
data "coder_workspace_tags" "tags" {
|
||||
tags = {
|
||||
@@ -429,6 +555,19 @@ func TestPostTemplateVersionsByOrganization(t *testing.T) {
|
||||
},
|
||||
wantTags: map[string]string{"owner": templateAdminUser.ID.String(), "scope": "user"},
|
||||
},
|
||||
// Ref: https://github.com/coder/coder/issues/16021
|
||||
{
|
||||
name: "main.tf with no workspace_tags and a function call in a parameter default",
|
||||
files: map[string]string{
|
||||
`main.tf`: `
|
||||
data "coder_parameter" "unrelated" {
|
||||
name = "unrelated"
|
||||
type = "list(string)"
|
||||
default = jsonencode(["a", "b"])
|
||||
}`,
|
||||
},
|
||||
wantTags: map[string]string{"owner": "", "scope": "organization"},
|
||||
},
|
||||
} {
|
||||
tt := tt
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
@@ -458,14 +597,13 @@ func TestPostTemplateVersionsByOrganization(t *testing.T) {
|
||||
pj, err := store.GetProvisionerJobByID(ctx, tv.Job.ID)
|
||||
require.NoError(t, err)
|
||||
require.EqualValues(t, tt.wantTags, pj.Tags)
|
||||
// Also assert that we get the expected information back from the API endpoint
|
||||
require.Zero(t, tv.MatchedProvisioners.Count)
|
||||
require.Zero(t, tv.MatchedProvisioners.Available)
|
||||
require.Zero(t, tv.MatchedProvisioners.MostRecentlySeen.Time)
|
||||
} else {
|
||||
require.ErrorContains(t, err, tt.expectError)
|
||||
}
|
||||
|
||||
// Also assert that we get the expected information back from the API endpoint
|
||||
require.Zero(t, tv.MatchedProvisioners.Count)
|
||||
require.Zero(t, tv.MatchedProvisioners.Available)
|
||||
require.Zero(t, tv.MatchedProvisioners.MostRecentlySeen.Time)
|
||||
})
|
||||
}
|
||||
})
|
||||
@@ -778,8 +916,15 @@ func TestTemplateVersionByName(t *testing.T) {
|
||||
ctx, cancel := context.WithTimeout(context.Background(), testutil.WaitLong)
|
||||
defer cancel()
|
||||
|
||||
_, err := client.TemplateVersionByName(ctx, template.ID, version.Name)
|
||||
tv, err := client.TemplateVersionByName(ctx, template.ID, version.Name)
|
||||
require.NoError(t, err)
|
||||
|
||||
if assert.Equal(t, tv.Job.Status, codersdk.ProvisionerJobPending) {
|
||||
assert.NotNil(t, tv.MatchedProvisioners)
|
||||
assert.Zero(t, tv.MatchedProvisioners.Available)
|
||||
assert.Zero(t, tv.MatchedProvisioners.Count)
|
||||
assert.False(t, tv.MatchedProvisioners.MostRecentlySeen.Valid)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
@@ -967,6 +1112,13 @@ func TestTemplateVersionDryRun(t *testing.T) {
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, job.ID, newJob.ID)
|
||||
|
||||
// Check matched provisioners
|
||||
matched, err := client.TemplateVersionDryRunMatchedProvisioners(ctx, version.ID, job.ID)
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, 1, matched.Count)
|
||||
require.Equal(t, 1, matched.Available)
|
||||
require.NotZero(t, matched.MostRecentlySeen.Time)
|
||||
|
||||
// Stream logs
|
||||
logs, closer, err := client.TemplateVersionDryRunLogsAfter(ctx, version.ID, job.ID, 0)
|
||||
require.NoError(t, err)
|
||||
@@ -1139,6 +1291,49 @@ func TestTemplateVersionDryRun(t *testing.T) {
|
||||
require.Equal(t, http.StatusBadRequest, apiErr.StatusCode())
|
||||
})
|
||||
})
|
||||
|
||||
t.Run("Pending", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
if !dbtestutil.WillUsePostgres() {
|
||||
t.Skip("this test requires postgres")
|
||||
}
|
||||
|
||||
store, ps, db := dbtestutil.NewDBWithSQLDB(t)
|
||||
client, closer := coderdtest.NewWithProvisionerCloser(t, &coderdtest.Options{
|
||||
Database: store,
|
||||
Pubsub: ps,
|
||||
IncludeProvisionerDaemon: true,
|
||||
})
|
||||
defer closer.Close()
|
||||
|
||||
owner := coderdtest.CreateFirstUser(t, client)
|
||||
version := coderdtest.CreateTemplateVersion(t, client, owner.OrganizationID, &echo.Responses{
|
||||
Parse: echo.ParseComplete,
|
||||
ProvisionApply: echo.ApplyComplete,
|
||||
})
|
||||
version = coderdtest.AwaitTemplateVersionJobCompleted(t, client, version.ID)
|
||||
require.Equal(t, codersdk.ProvisionerJobSucceeded, version.Job.Status)
|
||||
|
||||
templateAdmin, _ := coderdtest.CreateAnotherUser(t, client, owner.OrganizationID, rbac.RoleTemplateAdmin())
|
||||
ctx := testutil.Context(t, testutil.WaitShort)
|
||||
|
||||
_, err := db.Exec("DELETE FROM provisioner_daemons")
|
||||
require.NoError(t, err)
|
||||
|
||||
job, err := templateAdmin.CreateTemplateVersionDryRun(ctx, version.ID, codersdk.CreateTemplateVersionDryRunRequest{
|
||||
WorkspaceName: "test",
|
||||
RichParameterValues: []codersdk.WorkspaceBuildParameter{},
|
||||
UserVariableValues: []codersdk.VariableValue{},
|
||||
})
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, codersdk.ProvisionerJobPending, job.Status)
|
||||
|
||||
matched, err := templateAdmin.TemplateVersionDryRunMatchedProvisioners(ctx, version.ID, job.ID)
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, 0, matched.Count)
|
||||
require.Equal(t, 0, matched.Available)
|
||||
require.Zero(t, matched.MostRecentlySeen.Time)
|
||||
})
|
||||
}
|
||||
|
||||
// TestPaginatedTemplateVersions creates a list of template versions and paginate.
|
||||
|
||||
@@ -39,6 +39,7 @@ import (
|
||||
"github.com/coder/coder/v2/codersdk"
|
||||
"github.com/coder/coder/v2/codersdk/agentsdk"
|
||||
"github.com/coder/coder/v2/codersdk/workspacesdk"
|
||||
"github.com/coder/coder/v2/codersdk/wsjson"
|
||||
"github.com/coder/coder/v2/tailnet"
|
||||
"github.com/coder/coder/v2/tailnet/proto"
|
||||
)
|
||||
@@ -396,11 +397,9 @@ func (api *API) workspaceAgentLogs(rw http.ResponseWriter, r *http.Request) {
|
||||
}
|
||||
go httpapi.Heartbeat(ctx, conn)
|
||||
|
||||
ctx, wsNetConn := codersdk.WebsocketNetConn(ctx, conn, websocket.MessageText)
|
||||
defer wsNetConn.Close() // Also closes conn.
|
||||
encoder := wsjson.NewEncoder[[]codersdk.WorkspaceAgentLog](conn, websocket.MessageText)
|
||||
defer encoder.Close(websocket.StatusNormalClosure)
|
||||
|
||||
// The Go stdlib JSON encoder appends a newline character after message write.
|
||||
encoder := json.NewEncoder(wsNetConn)
|
||||
err = encoder.Encode(convertWorkspaceAgentLogs(logs))
|
||||
if err != nil {
|
||||
return
|
||||
@@ -740,16 +739,8 @@ func (api *API) derpMapUpdates(rw http.ResponseWriter, r *http.Request) {
|
||||
})
|
||||
return
|
||||
}
|
||||
ctx, nconn := codersdk.WebsocketNetConn(ctx, ws, websocket.MessageBinary)
|
||||
defer nconn.Close()
|
||||
|
||||
// Slurp all packets from the connection into io.Discard so pongs get sent
|
||||
// by the websocket package. We don't do any reads ourselves so this is
|
||||
// necessary.
|
||||
go func() {
|
||||
_, _ = io.Copy(io.Discard, nconn)
|
||||
_ = nconn.Close()
|
||||
}()
|
||||
encoder := wsjson.NewEncoder[*tailcfg.DERPMap](ws, websocket.MessageBinary)
|
||||
defer encoder.Close(websocket.StatusGoingAway)
|
||||
|
||||
go func(ctx context.Context) {
|
||||
// TODO(mafredri): Is this too frequent? Use separate ping disconnect timeout?
|
||||
@@ -767,7 +758,7 @@ func (api *API) derpMapUpdates(rw http.ResponseWriter, r *http.Request) {
|
||||
err := ws.Ping(ctx)
|
||||
cancel()
|
||||
if err != nil {
|
||||
_ = nconn.Close()
|
||||
_ = ws.Close(websocket.StatusGoingAway, "ping failed")
|
||||
return
|
||||
}
|
||||
}
|
||||
@@ -780,9 +771,8 @@ func (api *API) derpMapUpdates(rw http.ResponseWriter, r *http.Request) {
|
||||
for {
|
||||
derpMap := api.DERPMap()
|
||||
if lastDERPMap == nil || !tailnet.CompareDERPMaps(lastDERPMap, derpMap) {
|
||||
err := json.NewEncoder(nconn).Encode(derpMap)
|
||||
err := encoder.Encode(derpMap)
|
||||
if err != nil {
|
||||
_ = nconn.Close()
|
||||
return
|
||||
}
|
||||
lastDERPMap = derpMap
|
||||
|
||||
+61
-27
@@ -27,6 +27,7 @@ import (
|
||||
"github.com/coder/coder/v2/coderd/database/provisionerjobs"
|
||||
"github.com/coder/coder/v2/coderd/httpapi"
|
||||
"github.com/coder/coder/v2/coderd/httpmw"
|
||||
"github.com/coder/coder/v2/coderd/provisionerdserver"
|
||||
"github.com/coder/coder/v2/coderd/rbac"
|
||||
"github.com/coder/coder/v2/coderd/rbac/policy"
|
||||
"github.com/coder/coder/v2/coderd/wsbuilder"
|
||||
@@ -85,6 +86,7 @@ func (api *API) workspaceBuild(rw http.ResponseWriter, r *http.Request) {
|
||||
data.scripts,
|
||||
data.logSources,
|
||||
data.templateVersions[0],
|
||||
nil,
|
||||
)
|
||||
if err != nil {
|
||||
httpapi.Write(ctx, rw, http.StatusInternalServerError, codersdk.Response{
|
||||
@@ -200,6 +202,7 @@ func (api *API) workspaceBuilds(rw http.ResponseWriter, r *http.Request) {
|
||||
data.scripts,
|
||||
data.logSources,
|
||||
data.templateVersions,
|
||||
data.provisionerDaemons,
|
||||
)
|
||||
if err != nil {
|
||||
httpapi.Write(ctx, rw, http.StatusInternalServerError, codersdk.Response{
|
||||
@@ -289,6 +292,7 @@ func (api *API) workspaceBuildByBuildNumber(rw http.ResponseWriter, r *http.Requ
|
||||
data.scripts,
|
||||
data.logSources,
|
||||
data.templateVersions[0],
|
||||
data.provisionerDaemons,
|
||||
)
|
||||
if err != nil {
|
||||
httpapi.Write(ctx, rw, http.StatusInternalServerError, codersdk.Response{
|
||||
@@ -352,7 +356,7 @@ func (api *API) postWorkspaceBuilds(rw http.ResponseWriter, r *http.Request) {
|
||||
builder = builder.State(createBuild.ProvisionerState)
|
||||
}
|
||||
|
||||
workspaceBuild, provisionerJob, err := builder.Build(
|
||||
workspaceBuild, provisionerJob, provisionerDaemons, err := builder.Build(
|
||||
ctx,
|
||||
api.Database,
|
||||
func(action policy.Action, object rbac.Objecter) bool {
|
||||
@@ -384,10 +388,12 @@ func (api *API) postWorkspaceBuilds(rw http.ResponseWriter, r *http.Request) {
|
||||
})
|
||||
return
|
||||
}
|
||||
err = provisionerjobs.PostJob(api.Pubsub, *provisionerJob)
|
||||
if err != nil {
|
||||
// Client probably doesn't care about this error, so just log it.
|
||||
api.Logger.Error(ctx, "failed to post provisioner job to pubsub", slog.Error(err))
|
||||
|
||||
if provisionerJob != nil {
|
||||
if err := provisionerjobs.PostJob(api.Pubsub, *provisionerJob); err != nil {
|
||||
// Client probably doesn't care about this error, so just log it.
|
||||
api.Logger.Error(ctx, "failed to post provisioner job to pubsub", slog.Error(err))
|
||||
}
|
||||
}
|
||||
|
||||
apiBuild, err := api.convertWorkspaceBuild(
|
||||
@@ -404,6 +410,7 @@ func (api *API) postWorkspaceBuilds(rw http.ResponseWriter, r *http.Request) {
|
||||
[]database.WorkspaceAgentScript{},
|
||||
[]database.WorkspaceAgentLogSource{},
|
||||
database.TemplateVersion{},
|
||||
provisionerDaemons,
|
||||
)
|
||||
if err != nil {
|
||||
httpapi.Write(ctx, rw, http.StatusInternalServerError, codersdk.Response{
|
||||
@@ -638,14 +645,15 @@ func (api *API) workspaceBuildTimings(rw http.ResponseWriter, r *http.Request) {
|
||||
}
|
||||
|
||||
type workspaceBuildsData struct {
|
||||
jobs []database.GetProvisionerJobsByIDsWithQueuePositionRow
|
||||
templateVersions []database.TemplateVersion
|
||||
resources []database.WorkspaceResource
|
||||
metadata []database.WorkspaceResourceMetadatum
|
||||
agents []database.WorkspaceAgent
|
||||
apps []database.WorkspaceApp
|
||||
scripts []database.WorkspaceAgentScript
|
||||
logSources []database.WorkspaceAgentLogSource
|
||||
jobs []database.GetProvisionerJobsByIDsWithQueuePositionRow
|
||||
templateVersions []database.TemplateVersion
|
||||
resources []database.WorkspaceResource
|
||||
metadata []database.WorkspaceResourceMetadatum
|
||||
agents []database.WorkspaceAgent
|
||||
apps []database.WorkspaceApp
|
||||
scripts []database.WorkspaceAgentScript
|
||||
logSources []database.WorkspaceAgentLogSource
|
||||
provisionerDaemons []database.GetEligibleProvisionerDaemonsByProvisionerJobIDsRow
|
||||
}
|
||||
|
||||
func (api *API) workspaceBuildsData(ctx context.Context, workspaceBuilds []database.WorkspaceBuild) (workspaceBuildsData, error) {
|
||||
@@ -657,6 +665,17 @@ func (api *API) workspaceBuildsData(ctx context.Context, workspaceBuilds []datab
|
||||
if err != nil && !errors.Is(err, sql.ErrNoRows) {
|
||||
return workspaceBuildsData{}, xerrors.Errorf("get provisioner jobs: %w", err)
|
||||
}
|
||||
pendingJobIDs := []uuid.UUID{}
|
||||
for _, job := range jobs {
|
||||
if job.ProvisionerJob.JobStatus == database.ProvisionerJobStatusPending {
|
||||
pendingJobIDs = append(pendingJobIDs, job.ProvisionerJob.ID)
|
||||
}
|
||||
}
|
||||
|
||||
pendingJobProvisioners, err := api.Database.GetEligibleProvisionerDaemonsByProvisionerJobIDs(ctx, pendingJobIDs)
|
||||
if err != nil && !errors.Is(err, sql.ErrNoRows) {
|
||||
return workspaceBuildsData{}, xerrors.Errorf("get provisioner daemons: %w", err)
|
||||
}
|
||||
|
||||
templateVersionIDs := make([]uuid.UUID, 0, len(workspaceBuilds))
|
||||
for _, build := range workspaceBuilds {
|
||||
@@ -677,8 +696,9 @@ func (api *API) workspaceBuildsData(ctx context.Context, workspaceBuilds []datab
|
||||
|
||||
if len(resources) == 0 {
|
||||
return workspaceBuildsData{
|
||||
jobs: jobs,
|
||||
templateVersions: templateVersions,
|
||||
jobs: jobs,
|
||||
templateVersions: templateVersions,
|
||||
provisionerDaemons: pendingJobProvisioners,
|
||||
}, nil
|
||||
}
|
||||
|
||||
@@ -701,10 +721,11 @@ func (api *API) workspaceBuildsData(ctx context.Context, workspaceBuilds []datab
|
||||
|
||||
if len(resources) == 0 {
|
||||
return workspaceBuildsData{
|
||||
jobs: jobs,
|
||||
templateVersions: templateVersions,
|
||||
resources: resources,
|
||||
metadata: metadata,
|
||||
jobs: jobs,
|
||||
templateVersions: templateVersions,
|
||||
resources: resources,
|
||||
metadata: metadata,
|
||||
provisionerDaemons: pendingJobProvisioners,
|
||||
}, nil
|
||||
}
|
||||
|
||||
@@ -741,14 +762,15 @@ func (api *API) workspaceBuildsData(ctx context.Context, workspaceBuilds []datab
|
||||
}
|
||||
|
||||
return workspaceBuildsData{
|
||||
jobs: jobs,
|
||||
templateVersions: templateVersions,
|
||||
resources: resources,
|
||||
metadata: metadata,
|
||||
agents: agents,
|
||||
apps: apps,
|
||||
scripts: scripts,
|
||||
logSources: logSources,
|
||||
jobs: jobs,
|
||||
templateVersions: templateVersions,
|
||||
resources: resources,
|
||||
metadata: metadata,
|
||||
agents: agents,
|
||||
apps: apps,
|
||||
scripts: scripts,
|
||||
logSources: logSources,
|
||||
provisionerDaemons: pendingJobProvisioners,
|
||||
}, nil
|
||||
}
|
||||
|
||||
@@ -763,6 +785,7 @@ func (api *API) convertWorkspaceBuilds(
|
||||
agentScripts []database.WorkspaceAgentScript,
|
||||
agentLogSources []database.WorkspaceAgentLogSource,
|
||||
templateVersions []database.TemplateVersion,
|
||||
provisionerDaemons []database.GetEligibleProvisionerDaemonsByProvisionerJobIDsRow,
|
||||
) ([]codersdk.WorkspaceBuild, error) {
|
||||
workspaceByID := map[uuid.UUID]database.Workspace{}
|
||||
for _, workspace := range workspaces {
|
||||
@@ -804,6 +827,7 @@ func (api *API) convertWorkspaceBuilds(
|
||||
agentScripts,
|
||||
agentLogSources,
|
||||
templateVersion,
|
||||
provisionerDaemons,
|
||||
)
|
||||
if err != nil {
|
||||
return nil, xerrors.Errorf("converting workspace build: %w", err)
|
||||
@@ -826,6 +850,7 @@ func (api *API) convertWorkspaceBuild(
|
||||
agentScripts []database.WorkspaceAgentScript,
|
||||
agentLogSources []database.WorkspaceAgentLogSource,
|
||||
templateVersion database.TemplateVersion,
|
||||
provisionerDaemons []database.GetEligibleProvisionerDaemonsByProvisionerJobIDsRow,
|
||||
) (codersdk.WorkspaceBuild, error) {
|
||||
resourcesByJobID := map[uuid.UUID][]database.WorkspaceResource{}
|
||||
for _, resource := range workspaceResources {
|
||||
@@ -851,6 +876,14 @@ func (api *API) convertWorkspaceBuild(
|
||||
for _, logSource := range agentLogSources {
|
||||
logSourcesByAgentID[logSource.WorkspaceAgentID] = append(logSourcesByAgentID[logSource.WorkspaceAgentID], logSource)
|
||||
}
|
||||
provisionerDaemonsForThisWorkspaceBuild := []database.ProvisionerDaemon{}
|
||||
for _, provisionerDaemon := range provisionerDaemons {
|
||||
if provisionerDaemon.JobID != job.ProvisionerJob.ID {
|
||||
continue
|
||||
}
|
||||
provisionerDaemonsForThisWorkspaceBuild = append(provisionerDaemonsForThisWorkspaceBuild, provisionerDaemon.ProvisionerDaemon)
|
||||
}
|
||||
matchedProvisioners := db2sdk.MatchedProvisioners(provisionerDaemonsForThisWorkspaceBuild, job.ProvisionerJob.CreatedAt, provisionerdserver.StaleInterval)
|
||||
|
||||
resources := resourcesByJobID[job.ProvisionerJob.ID]
|
||||
apiResources := make([]codersdk.WorkspaceResource, 0)
|
||||
@@ -918,6 +951,7 @@ func (api *API) convertWorkspaceBuild(
|
||||
Resources: apiResources,
|
||||
Status: codersdk.ConvertWorkspaceStatus(apiJob.Status, transition),
|
||||
DailyCost: build.DailyCost,
|
||||
MatchedProvisioners: &matchedProvisioners,
|
||||
}, nil
|
||||
}
|
||||
|
||||
|
||||
@@ -14,6 +14,7 @@ import (
|
||||
"github.com/stretchr/testify/require"
|
||||
"go.opentelemetry.io/otel"
|
||||
"go.opentelemetry.io/otel/propagation"
|
||||
"golang.org/x/exp/slices"
|
||||
"golang.org/x/xerrors"
|
||||
|
||||
"cdr.dev/slog"
|
||||
@@ -1097,6 +1098,12 @@ func TestPostWorkspaceBuild(t *testing.T) {
|
||||
Transition: codersdk.WorkspaceTransitionStart,
|
||||
})
|
||||
require.NoError(t, err)
|
||||
if assert.NotNil(t, build.MatchedProvisioners) {
|
||||
require.Equal(t, 1, build.MatchedProvisioners.Count)
|
||||
require.Equal(t, 1, build.MatchedProvisioners.Available)
|
||||
require.NotZero(t, build.MatchedProvisioners.MostRecentlySeen.Time)
|
||||
}
|
||||
|
||||
coderdtest.AwaitWorkspaceBuildJobCompleted(t, client, build.ID)
|
||||
|
||||
require.Eventually(t, func() bool {
|
||||
@@ -1124,6 +1131,12 @@ func TestPostWorkspaceBuild(t *testing.T) {
|
||||
Transition: codersdk.WorkspaceTransitionStart,
|
||||
})
|
||||
require.NoError(t, err)
|
||||
if assert.NotNil(t, build.MatchedProvisioners) {
|
||||
require.Equal(t, 1, build.MatchedProvisioners.Count)
|
||||
require.Equal(t, 1, build.MatchedProvisioners.Available)
|
||||
require.NotZero(t, build.MatchedProvisioners.MostRecentlySeen.Time)
|
||||
}
|
||||
|
||||
require.Equal(t, workspace.LatestBuild.BuildNumber+1, build.BuildNumber)
|
||||
})
|
||||
|
||||
@@ -1150,6 +1163,12 @@ func TestPostWorkspaceBuild(t *testing.T) {
|
||||
ProvisionerState: wantState,
|
||||
})
|
||||
require.NoError(t, err)
|
||||
if assert.NotNil(t, build.MatchedProvisioners) {
|
||||
require.Equal(t, 1, build.MatchedProvisioners.Count)
|
||||
require.Equal(t, 1, build.MatchedProvisioners.Available)
|
||||
require.NotZero(t, build.MatchedProvisioners.MostRecentlySeen.Time)
|
||||
}
|
||||
|
||||
gotState, err := client.WorkspaceBuildState(ctx, build.ID)
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, wantState, gotState)
|
||||
@@ -1173,6 +1192,12 @@ func TestPostWorkspaceBuild(t *testing.T) {
|
||||
})
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, workspace.LatestBuild.BuildNumber+1, build.BuildNumber)
|
||||
if assert.NotNil(t, build.MatchedProvisioners) {
|
||||
require.Equal(t, 1, build.MatchedProvisioners.Count)
|
||||
require.Equal(t, 1, build.MatchedProvisioners.Available)
|
||||
require.NotZero(t, build.MatchedProvisioners.MostRecentlySeen.Time)
|
||||
}
|
||||
|
||||
coderdtest.AwaitWorkspaceBuildJobCompleted(t, client, build.ID)
|
||||
|
||||
res, err := client.Workspaces(ctx, codersdk.WorkspaceFilter{
|
||||
@@ -1181,6 +1206,102 @@ func TestPostWorkspaceBuild(t *testing.T) {
|
||||
require.NoError(t, err)
|
||||
require.Len(t, res.Workspaces, 0)
|
||||
})
|
||||
|
||||
t.Run("NoProvisionersAvailable", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
if !dbtestutil.WillUsePostgres() {
|
||||
t.Skip("this test requires postgres")
|
||||
}
|
||||
// Given: a coderd instance with a provisioner daemon
|
||||
store, ps, db := dbtestutil.NewDBWithSQLDB(t)
|
||||
client, closeDaemon := coderdtest.NewWithProvisionerCloser(t, &coderdtest.Options{
|
||||
Database: store,
|
||||
Pubsub: ps,
|
||||
IncludeProvisionerDaemon: true,
|
||||
})
|
||||
defer closeDaemon.Close()
|
||||
// Given: a user, template, and workspace
|
||||
user := coderdtest.CreateFirstUser(t, client)
|
||||
version := coderdtest.CreateTemplateVersion(t, client, user.OrganizationID, nil)
|
||||
coderdtest.AwaitTemplateVersionJobCompleted(t, client, version.ID)
|
||||
template := coderdtest.CreateTemplate(t, client, user.OrganizationID, version.ID)
|
||||
workspace := coderdtest.CreateWorkspace(t, client, template.ID)
|
||||
coderdtest.AwaitWorkspaceBuildJobCompleted(t, client, workspace.LatestBuild.ID)
|
||||
|
||||
// Stop the provisioner daemon.
|
||||
require.NoError(t, closeDaemon.Close())
|
||||
ctx := testutil.Context(t, testutil.WaitLong)
|
||||
// Given: no provisioner daemons exist.
|
||||
_, err := db.ExecContext(ctx, `DELETE FROM provisioner_daemons;`)
|
||||
require.NoError(t, err)
|
||||
|
||||
// When: a new workspace build is created
|
||||
build, err := client.CreateWorkspaceBuild(ctx, workspace.ID, codersdk.CreateWorkspaceBuildRequest{
|
||||
TemplateVersionID: template.ActiveVersionID,
|
||||
Transition: codersdk.WorkspaceTransitionStart,
|
||||
})
|
||||
// Then: the request should succeed.
|
||||
require.NoError(t, err)
|
||||
// Then: the provisioner job should remain pending.
|
||||
require.Equal(t, codersdk.ProvisionerJobPending, build.Job.Status)
|
||||
// Then: the response should indicate no provisioners are available.
|
||||
if assert.NotNil(t, build.MatchedProvisioners) {
|
||||
assert.Zero(t, build.MatchedProvisioners.Count)
|
||||
assert.Zero(t, build.MatchedProvisioners.Available)
|
||||
assert.Zero(t, build.MatchedProvisioners.MostRecentlySeen.Time)
|
||||
assert.False(t, build.MatchedProvisioners.MostRecentlySeen.Valid)
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("AllProvisionersStale", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
if !dbtestutil.WillUsePostgres() {
|
||||
t.Skip("this test requires postgres")
|
||||
}
|
||||
// Given: a coderd instance with a provisioner daemon
|
||||
store, ps, db := dbtestutil.NewDBWithSQLDB(t)
|
||||
client, closeDaemon := coderdtest.NewWithProvisionerCloser(t, &coderdtest.Options{
|
||||
Database: store,
|
||||
Pubsub: ps,
|
||||
IncludeProvisionerDaemon: true,
|
||||
})
|
||||
defer closeDaemon.Close()
|
||||
// Given: a user, template, and workspace
|
||||
user := coderdtest.CreateFirstUser(t, client)
|
||||
version := coderdtest.CreateTemplateVersion(t, client, user.OrganizationID, nil)
|
||||
coderdtest.AwaitTemplateVersionJobCompleted(t, client, version.ID)
|
||||
template := coderdtest.CreateTemplate(t, client, user.OrganizationID, version.ID)
|
||||
workspace := coderdtest.CreateWorkspace(t, client, template.ID)
|
||||
coderdtest.AwaitWorkspaceBuildJobCompleted(t, client, workspace.LatestBuild.ID)
|
||||
|
||||
ctx := testutil.Context(t, testutil.WaitLong)
|
||||
// Given: all provisioner daemons are stale
|
||||
// First stop the provisioner
|
||||
require.NoError(t, closeDaemon.Close())
|
||||
newLastSeenAt := dbtime.Now().Add(-time.Hour)
|
||||
// Update the last seen at for all provisioner daemons. We have to use the
|
||||
// SQL db directly because store.UpdateProvisionerDaemonLastSeenAt has a
|
||||
// built-in check to prevent updating the last seen at to a time in the past.
|
||||
_, err := db.ExecContext(ctx, `UPDATE provisioner_daemons SET last_seen_at = $1;`, newLastSeenAt)
|
||||
require.NoError(t, err)
|
||||
|
||||
// When: a new workspace build is created
|
||||
build, err := client.CreateWorkspaceBuild(ctx, workspace.ID, codersdk.CreateWorkspaceBuildRequest{
|
||||
TemplateVersionID: template.ActiveVersionID,
|
||||
Transition: codersdk.WorkspaceTransitionStart,
|
||||
})
|
||||
// Then: the request should succeed
|
||||
require.NoError(t, err)
|
||||
// Then: the provisioner job should remain pending
|
||||
require.Equal(t, codersdk.ProvisionerJobPending, build.Job.Status)
|
||||
// Then: the response should indicate no provisioners are available
|
||||
if assert.NotNil(t, build.MatchedProvisioners) {
|
||||
assert.Zero(t, build.MatchedProvisioners.Available)
|
||||
assert.Equal(t, 1, build.MatchedProvisioners.Count)
|
||||
assert.Equal(t, newLastSeenAt.UTC(), build.MatchedProvisioners.MostRecentlySeen.Time.UTC())
|
||||
assert.True(t, build.MatchedProvisioners.MostRecentlySeen.Valid)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
func TestWorkspaceBuildTimings(t *testing.T) {
|
||||
@@ -1301,6 +1422,47 @@ func TestWorkspaceBuildTimings(t *testing.T) {
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("MultipleTimingsForSameAgentScript", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
// Given: a build with multiple timings for the same script
|
||||
build := makeBuild(t)
|
||||
resource := dbgen.WorkspaceResource(t, db, database.WorkspaceResource{
|
||||
JobID: build.JobID,
|
||||
})
|
||||
agent := dbgen.WorkspaceAgent(t, db, database.WorkspaceAgent{
|
||||
ResourceID: resource.ID,
|
||||
})
|
||||
script := dbgen.WorkspaceAgentScript(t, db, database.WorkspaceAgentScript{
|
||||
WorkspaceAgentID: agent.ID,
|
||||
})
|
||||
timings := make([]database.WorkspaceAgentScriptTiming, 3)
|
||||
scriptStartedAt := dbtime.Now()
|
||||
for i := range timings {
|
||||
timings[i] = dbgen.WorkspaceAgentScriptTiming(t, db, database.WorkspaceAgentScriptTiming{
|
||||
StartedAt: scriptStartedAt,
|
||||
EndedAt: scriptStartedAt.Add(1 * time.Minute),
|
||||
ScriptID: script.ID,
|
||||
})
|
||||
|
||||
// Add an hour to the previous "started at" so we can
|
||||
// reliably differentiate the scripts from each other.
|
||||
scriptStartedAt = scriptStartedAt.Add(1 * time.Hour)
|
||||
}
|
||||
|
||||
// When: fetching timings for the build
|
||||
ctx, cancel := context.WithTimeout(context.Background(), testutil.WaitLong)
|
||||
t.Cleanup(cancel)
|
||||
res, err := client.WorkspaceBuildTimings(ctx, build.ID)
|
||||
require.NoError(t, err)
|
||||
|
||||
// Then: return a response with the first agent script timing
|
||||
require.Len(t, res.AgentScriptTimings, 1)
|
||||
|
||||
require.Equal(t, timings[0].StartedAt.UnixMilli(), res.AgentScriptTimings[0].StartedAt.UnixMilli())
|
||||
require.Equal(t, timings[0].EndedAt.UnixMilli(), res.AgentScriptTimings[0].EndedAt.UnixMilli())
|
||||
})
|
||||
|
||||
t.Run("AgentScriptTimings", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
@@ -1312,10 +1474,10 @@ func TestWorkspaceBuildTimings(t *testing.T) {
|
||||
agent := dbgen.WorkspaceAgent(t, db, database.WorkspaceAgent{
|
||||
ResourceID: resource.ID,
|
||||
})
|
||||
script := dbgen.WorkspaceAgentScript(t, db, database.WorkspaceAgentScript{
|
||||
scripts := dbgen.WorkspaceAgentScripts(t, db, 5, database.WorkspaceAgentScript{
|
||||
WorkspaceAgentID: agent.ID,
|
||||
})
|
||||
agentScriptTimings := dbgen.WorkspaceAgentScriptTimings(t, db, script, 5)
|
||||
agentScriptTimings := dbgen.WorkspaceAgentScriptTimings(t, db, scripts)
|
||||
|
||||
// When: fetching timings for the build
|
||||
ctx, cancel := context.WithTimeout(context.Background(), testutil.WaitLong)
|
||||
@@ -1325,6 +1487,12 @@ func TestWorkspaceBuildTimings(t *testing.T) {
|
||||
|
||||
// Then: return a response with the expected timings
|
||||
require.Len(t, res.AgentScriptTimings, 5)
|
||||
slices.SortFunc(res.AgentScriptTimings, func(a, b codersdk.AgentScriptTiming) int {
|
||||
return a.StartedAt.Compare(b.StartedAt)
|
||||
})
|
||||
slices.SortFunc(agentScriptTimings, func(a, b database.WorkspaceAgentScriptTiming) int {
|
||||
return a.StartedAt.Compare(b.StartedAt)
|
||||
})
|
||||
for i := range res.AgentScriptTimings {
|
||||
timingRes := res.AgentScriptTimings[i]
|
||||
genTiming := agentScriptTimings[i]
|
||||
|
||||
@@ -593,8 +593,7 @@ func createWorkspace(
|
||||
}},
|
||||
})
|
||||
return
|
||||
}
|
||||
if err != nil && !errors.Is(err, sql.ErrNoRows) {
|
||||
} else if !errors.Is(err, sql.ErrNoRows) {
|
||||
httpapi.Write(ctx, rw, http.StatusInternalServerError, codersdk.Response{
|
||||
Message: fmt.Sprintf("Internal error fetching workspace by name %q.", req.Name),
|
||||
Detail: err.Error(),
|
||||
@@ -603,8 +602,9 @@ func createWorkspace(
|
||||
}
|
||||
|
||||
var (
|
||||
provisionerJob *database.ProvisionerJob
|
||||
workspaceBuild *database.WorkspaceBuild
|
||||
provisionerJob *database.ProvisionerJob
|
||||
workspaceBuild *database.WorkspaceBuild
|
||||
provisionerDaemons []database.GetEligibleProvisionerDaemonsByProvisionerJobIDsRow
|
||||
)
|
||||
err = api.Database.InTx(func(db database.Store) error {
|
||||
now := dbtime.Now()
|
||||
@@ -645,7 +645,7 @@ func createWorkspace(
|
||||
builder = builder.VersionID(req.TemplateVersionID)
|
||||
}
|
||||
|
||||
workspaceBuild, provisionerJob, err = builder.Build(
|
||||
workspaceBuild, provisionerJob, provisionerDaemons, err = builder.Build(
|
||||
ctx,
|
||||
db,
|
||||
func(action policy.Action, object rbac.Objecter) bool {
|
||||
@@ -655,6 +655,7 @@ func createWorkspace(
|
||||
)
|
||||
return err
|
||||
}, nil)
|
||||
|
||||
var bldErr wsbuilder.BuildError
|
||||
if xerrors.As(err, &bldErr) {
|
||||
httpapi.Write(ctx, rw, bldErr.Status, codersdk.Response{
|
||||
@@ -675,6 +676,7 @@ func createWorkspace(
|
||||
// Client probably doesn't care about this error, so just log it.
|
||||
api.Logger.Error(ctx, "failed to post provisioner job to pubsub", slog.Error(err))
|
||||
}
|
||||
|
||||
auditReq.New = workspace.WorkspaceTable()
|
||||
|
||||
api.Telemetry.Report(&telemetry.Snapshot{
|
||||
@@ -696,6 +698,7 @@ func createWorkspace(
|
||||
[]database.WorkspaceAgentScript{},
|
||||
[]database.WorkspaceAgentLogSource{},
|
||||
database.TemplateVersion{},
|
||||
provisionerDaemons,
|
||||
)
|
||||
if err != nil {
|
||||
httpapi.Write(ctx, rw, http.StatusInternalServerError, codersdk.Response{
|
||||
@@ -1816,6 +1819,7 @@ func (api *API) workspaceData(ctx context.Context, workspaces []database.Workspa
|
||||
data.scripts,
|
||||
data.logSources,
|
||||
data.templateVersions,
|
||||
data.provisionerDaemons,
|
||||
)
|
||||
if err != nil {
|
||||
return workspaceData{}, xerrors.Errorf("convert workspace builds: %w", err)
|
||||
|
||||
@@ -766,6 +766,94 @@ func TestPostWorkspacesByOrganization(t *testing.T) {
|
||||
require.NoError(t, err)
|
||||
require.EqualValues(t, exp, *ws.TTLMillis)
|
||||
})
|
||||
|
||||
t.Run("NoProvisionersAvailable", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
if !dbtestutil.WillUsePostgres() {
|
||||
t.Skip("this test requires postgres")
|
||||
}
|
||||
// Given: a coderd instance with a provisioner daemon
|
||||
store, ps, db := dbtestutil.NewDBWithSQLDB(t)
|
||||
client, closeDaemon := coderdtest.NewWithProvisionerCloser(t, &coderdtest.Options{
|
||||
Database: store,
|
||||
Pubsub: ps,
|
||||
IncludeProvisionerDaemon: true,
|
||||
})
|
||||
defer closeDaemon.Close()
|
||||
|
||||
// Given: a user, template, and workspace
|
||||
user := coderdtest.CreateFirstUser(t, client)
|
||||
version := coderdtest.CreateTemplateVersion(t, client, user.OrganizationID, nil)
|
||||
coderdtest.AwaitTemplateVersionJobCompleted(t, client, version.ID)
|
||||
template := coderdtest.CreateTemplate(t, client, user.OrganizationID, version.ID)
|
||||
|
||||
// Given: all the provisioner daemons disappear
|
||||
ctx := testutil.Context(t, testutil.WaitLong)
|
||||
_, err := db.ExecContext(ctx, `DELETE FROM provisioner_daemons;`)
|
||||
require.NoError(t, err)
|
||||
|
||||
// When: a new workspace is created
|
||||
ws, err := client.CreateUserWorkspace(ctx, codersdk.Me, codersdk.CreateWorkspaceRequest{
|
||||
TemplateID: template.ID,
|
||||
Name: "testing",
|
||||
})
|
||||
// Then: the request succeeds
|
||||
require.NoError(t, err)
|
||||
// Then: the workspace build is pending
|
||||
require.Equal(t, codersdk.ProvisionerJobPending, ws.LatestBuild.Job.Status)
|
||||
// Then: the workspace build has no matched provisioners
|
||||
if assert.NotNil(t, ws.LatestBuild.MatchedProvisioners) {
|
||||
assert.Zero(t, ws.LatestBuild.MatchedProvisioners.Count)
|
||||
assert.Zero(t, ws.LatestBuild.MatchedProvisioners.Available)
|
||||
assert.Zero(t, ws.LatestBuild.MatchedProvisioners.MostRecentlySeen.Time)
|
||||
assert.False(t, ws.LatestBuild.MatchedProvisioners.MostRecentlySeen.Valid)
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("AllProvisionersStale", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
if !dbtestutil.WillUsePostgres() {
|
||||
t.Skip("this test requires postgres")
|
||||
}
|
||||
|
||||
// Given: a coderd instance with a provisioner daemon
|
||||
store, ps, db := dbtestutil.NewDBWithSQLDB(t)
|
||||
client, closeDaemon := coderdtest.NewWithProvisionerCloser(t, &coderdtest.Options{
|
||||
Database: store,
|
||||
Pubsub: ps,
|
||||
IncludeProvisionerDaemon: true,
|
||||
})
|
||||
defer closeDaemon.Close()
|
||||
|
||||
// Given: a user, template, and workspace
|
||||
user := coderdtest.CreateFirstUser(t, client)
|
||||
version := coderdtest.CreateTemplateVersion(t, client, user.OrganizationID, nil)
|
||||
coderdtest.AwaitTemplateVersionJobCompleted(t, client, version.ID)
|
||||
template := coderdtest.CreateTemplate(t, client, user.OrganizationID, version.ID)
|
||||
|
||||
// Given: all the provisioner daemons have not been seen for a while
|
||||
ctx := testutil.Context(t, testutil.WaitLong)
|
||||
newLastSeenAt := dbtime.Now().Add(-time.Hour)
|
||||
_, err := db.ExecContext(ctx, `UPDATE provisioner_daemons SET last_seen_at = $1;`, newLastSeenAt)
|
||||
require.NoError(t, err)
|
||||
|
||||
// When: a new workspace is created
|
||||
ws, err := client.CreateUserWorkspace(ctx, codersdk.Me, codersdk.CreateWorkspaceRequest{
|
||||
TemplateID: template.ID,
|
||||
Name: "testing",
|
||||
})
|
||||
// Then: the request succeeds
|
||||
require.NoError(t, err)
|
||||
// Then: the workspace build is pending
|
||||
require.Equal(t, codersdk.ProvisionerJobPending, ws.LatestBuild.Job.Status)
|
||||
// Then: we can see that there are some provisioners that are stale
|
||||
if assert.NotNil(t, ws.LatestBuild.MatchedProvisioners) {
|
||||
assert.Equal(t, 1, ws.LatestBuild.MatchedProvisioners.Count)
|
||||
assert.Zero(t, ws.LatestBuild.MatchedProvisioners.Available)
|
||||
assert.Equal(t, newLastSeenAt.UTC(), ws.LatestBuild.MatchedProvisioners.MostRecentlySeen.Time.UTC())
|
||||
assert.True(t, ws.LatestBuild.MatchedProvisioners.MostRecentlySeen.Valid)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
func TestWorkspaceByOwnerAndName(t *testing.T) {
|
||||
@@ -3669,10 +3757,10 @@ func TestWorkspaceTimings(t *testing.T) {
|
||||
agent := dbgen.WorkspaceAgent(t, db, database.WorkspaceAgent{
|
||||
ResourceID: resource.ID,
|
||||
})
|
||||
script := dbgen.WorkspaceAgentScript(t, db, database.WorkspaceAgentScript{
|
||||
scripts := dbgen.WorkspaceAgentScripts(t, db, 3, database.WorkspaceAgentScript{
|
||||
WorkspaceAgentID: agent.ID,
|
||||
})
|
||||
dbgen.WorkspaceAgentScriptTimings(t, db, script, 3)
|
||||
dbgen.WorkspaceAgentScriptTimings(t, db, scripts)
|
||||
|
||||
// When: fetching the timings
|
||||
ctx, cancel := context.WithTimeout(context.Background(), testutil.WaitLong)
|
||||
|
||||
@@ -12,9 +12,9 @@ import (
|
||||
|
||||
"github.com/hashicorp/hcl/v2"
|
||||
"github.com/hashicorp/hcl/v2/hclsyntax"
|
||||
"github.com/zclconf/go-cty/cty"
|
||||
|
||||
"github.com/coder/coder/v2/coderd/rbac/policy"
|
||||
"github.com/coder/coder/v2/provisioner/terraform/tfparse"
|
||||
"github.com/coder/coder/v2/provisionersdk"
|
||||
|
||||
"github.com/google/uuid"
|
||||
@@ -24,6 +24,7 @@ import (
|
||||
"github.com/coder/coder/v2/coderd/audit"
|
||||
"github.com/coder/coder/v2/coderd/database"
|
||||
"github.com/coder/coder/v2/coderd/database/db2sdk"
|
||||
"github.com/coder/coder/v2/coderd/database/dbauthz"
|
||||
"github.com/coder/coder/v2/coderd/database/dbtime"
|
||||
"github.com/coder/coder/v2/coderd/httpapi"
|
||||
"github.com/coder/coder/v2/coderd/provisionerdserver"
|
||||
@@ -63,6 +64,7 @@ type Builder struct {
|
||||
templateVersion *database.TemplateVersion
|
||||
templateVersionJob *database.ProvisionerJob
|
||||
templateVersionParameters *[]database.TemplateVersionParameter
|
||||
templateVersionVariables *[]database.TemplateVersionVariable
|
||||
templateVersionWorkspaceTags *[]database.TemplateVersionWorkspaceTag
|
||||
lastBuild *database.WorkspaceBuild
|
||||
lastBuildErr *error
|
||||
@@ -213,12 +215,12 @@ func (b *Builder) Build(
|
||||
authFunc func(action policy.Action, object rbac.Objecter) bool,
|
||||
auditBaggage audit.WorkspaceBuildBaggage,
|
||||
) (
|
||||
*database.WorkspaceBuild, *database.ProvisionerJob, error,
|
||||
*database.WorkspaceBuild, *database.ProvisionerJob, []database.GetEligibleProvisionerDaemonsByProvisionerJobIDsRow, error,
|
||||
) {
|
||||
var err error
|
||||
b.ctx, err = audit.BaggageToContext(ctx, auditBaggage)
|
||||
if err != nil {
|
||||
return nil, nil, xerrors.Errorf("create audit baggage: %w", err)
|
||||
return nil, nil, nil, xerrors.Errorf("create audit baggage: %w", err)
|
||||
}
|
||||
|
||||
// Run the build in a transaction with RepeatableRead isolation, and retries.
|
||||
@@ -227,16 +229,17 @@ func (b *Builder) Build(
|
||||
// later reads are consistent with earlier ones.
|
||||
var workspaceBuild *database.WorkspaceBuild
|
||||
var provisionerJob *database.ProvisionerJob
|
||||
var provisionerDaemons []database.GetEligibleProvisionerDaemonsByProvisionerJobIDsRow
|
||||
err = database.ReadModifyUpdate(store, func(tx database.Store) error {
|
||||
var err error
|
||||
b.store = tx
|
||||
workspaceBuild, provisionerJob, err = b.buildTx(authFunc)
|
||||
workspaceBuild, provisionerJob, provisionerDaemons, err = b.buildTx(authFunc)
|
||||
return err
|
||||
})
|
||||
if err != nil {
|
||||
return nil, nil, xerrors.Errorf("build tx: %w", err)
|
||||
return nil, nil, nil, xerrors.Errorf("build tx: %w", err)
|
||||
}
|
||||
return workspaceBuild, provisionerJob, nil
|
||||
return workspaceBuild, provisionerJob, provisionerDaemons, nil
|
||||
}
|
||||
|
||||
// buildTx contains the business logic of computing a new build. Attributes of the new database objects are computed
|
||||
@@ -246,35 +249,35 @@ func (b *Builder) Build(
|
||||
//
|
||||
// In order to utilize this cache, the functions that compute build attributes use a pointer receiver type.
|
||||
func (b *Builder) buildTx(authFunc func(action policy.Action, object rbac.Objecter) bool) (
|
||||
*database.WorkspaceBuild, *database.ProvisionerJob, error,
|
||||
*database.WorkspaceBuild, *database.ProvisionerJob, []database.GetEligibleProvisionerDaemonsByProvisionerJobIDsRow, error,
|
||||
) {
|
||||
if authFunc != nil {
|
||||
err := b.authorize(authFunc)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
return nil, nil, nil, err
|
||||
}
|
||||
}
|
||||
err := b.checkTemplateVersionMatchesTemplate()
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
return nil, nil, nil, err
|
||||
}
|
||||
err = b.checkTemplateJobStatus()
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
return nil, nil, nil, err
|
||||
}
|
||||
err = b.checkRunningBuild()
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
return nil, nil, nil, err
|
||||
}
|
||||
|
||||
template, err := b.getTemplate()
|
||||
if err != nil {
|
||||
return nil, nil, BuildError{http.StatusInternalServerError, "failed to fetch template", err}
|
||||
return nil, nil, nil, BuildError{http.StatusInternalServerError, "failed to fetch template", err}
|
||||
}
|
||||
|
||||
templateVersionJob, err := b.getTemplateVersionJob()
|
||||
if err != nil {
|
||||
return nil, nil, BuildError{
|
||||
return nil, nil, nil, BuildError{
|
||||
http.StatusInternalServerError, "failed to fetch template version job", err,
|
||||
}
|
||||
}
|
||||
@@ -294,7 +297,7 @@ func (b *Builder) buildTx(authFunc func(action policy.Action, object rbac.Object
|
||||
LogLevel: b.logLevel,
|
||||
})
|
||||
if err != nil {
|
||||
return nil, nil, BuildError{
|
||||
return nil, nil, nil, BuildError{
|
||||
http.StatusInternalServerError,
|
||||
"marshal provision job",
|
||||
err,
|
||||
@@ -302,12 +305,12 @@ func (b *Builder) buildTx(authFunc func(action policy.Action, object rbac.Object
|
||||
}
|
||||
traceMetadataRaw, err := json.Marshal(tracing.MetadataFromContext(b.ctx))
|
||||
if err != nil {
|
||||
return nil, nil, BuildError{http.StatusInternalServerError, "marshal metadata", err}
|
||||
return nil, nil, nil, BuildError{http.StatusInternalServerError, "marshal metadata", err}
|
||||
}
|
||||
|
||||
tags, err := b.getProvisionerTags()
|
||||
if err != nil {
|
||||
return nil, nil, err // already wrapped BuildError
|
||||
return nil, nil, nil, err // already wrapped BuildError
|
||||
}
|
||||
|
||||
now := dbtime.Now()
|
||||
@@ -329,20 +332,32 @@ func (b *Builder) buildTx(authFunc func(action policy.Action, object rbac.Object
|
||||
},
|
||||
})
|
||||
if err != nil {
|
||||
return nil, nil, BuildError{http.StatusInternalServerError, "insert provisioner job", err}
|
||||
return nil, nil, nil, BuildError{http.StatusInternalServerError, "insert provisioner job", err}
|
||||
}
|
||||
|
||||
// nolint:gocritic // The user performing this request may not have permission
|
||||
// to read all provisioner daemons. We need to retrieve the eligible
|
||||
// provisioner daemons for this job to show in the UI if there is no
|
||||
// matching provisioner daemon.
|
||||
provisionerDaemons, err := b.store.GetEligibleProvisionerDaemonsByProvisionerJobIDs(dbauthz.AsSystemReadProvisionerDaemons(b.ctx), []uuid.UUID{provisionerJob.ID})
|
||||
if err != nil {
|
||||
// NOTE: we do **not** want to fail a workspace build if we fail to
|
||||
// retrieve provisioner daemons. This is just to show in the UI if there
|
||||
// is no matching provisioner daemon for the job.
|
||||
provisionerDaemons = []database.GetEligibleProvisionerDaemonsByProvisionerJobIDsRow{}
|
||||
}
|
||||
|
||||
templateVersionID, err := b.getTemplateVersionID()
|
||||
if err != nil {
|
||||
return nil, nil, BuildError{http.StatusInternalServerError, "compute template version ID", err}
|
||||
return nil, nil, nil, BuildError{http.StatusInternalServerError, "compute template version ID", err}
|
||||
}
|
||||
buildNum, err := b.getBuildNumber()
|
||||
if err != nil {
|
||||
return nil, nil, BuildError{http.StatusInternalServerError, "compute build number", err}
|
||||
return nil, nil, nil, BuildError{http.StatusInternalServerError, "compute build number", err}
|
||||
}
|
||||
state, err := b.getState()
|
||||
if err != nil {
|
||||
return nil, nil, BuildError{http.StatusInternalServerError, "compute build state", err}
|
||||
return nil, nil, nil, BuildError{http.StatusInternalServerError, "compute build state", err}
|
||||
}
|
||||
|
||||
var workspaceBuild database.WorkspaceBuild
|
||||
@@ -393,10 +408,10 @@ func (b *Builder) buildTx(authFunc func(action policy.Action, object rbac.Object
|
||||
return nil
|
||||
}, nil)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
return nil, nil, nil, err
|
||||
}
|
||||
|
||||
return &workspaceBuild, &provisionerJob, nil
|
||||
return &workspaceBuild, &provisionerJob, provisionerDaemons, nil
|
||||
}
|
||||
|
||||
func (b *Builder) getTemplate() (*database.Template, error) {
|
||||
@@ -603,6 +618,22 @@ func (b *Builder) getTemplateVersionParameters() ([]database.TemplateVersionPara
|
||||
return tvp, nil
|
||||
}
|
||||
|
||||
func (b *Builder) getTemplateVersionVariables() ([]database.TemplateVersionVariable, error) {
|
||||
if b.templateVersionVariables != nil {
|
||||
return *b.templateVersionVariables, nil
|
||||
}
|
||||
tvID, err := b.getTemplateVersionID()
|
||||
if err != nil {
|
||||
return nil, xerrors.Errorf("get template version ID to get variables: %w", err)
|
||||
}
|
||||
tvs, err := b.store.GetTemplateVersionVariables(b.ctx, tvID)
|
||||
if err != nil && !xerrors.Is(err, sql.ErrNoRows) {
|
||||
return nil, xerrors.Errorf("get template version %s variables: %w", tvID, err)
|
||||
}
|
||||
b.templateVersionVariables = &tvs
|
||||
return tvs, nil
|
||||
}
|
||||
|
||||
// verifyNoLegacyParameters verifies that initiator can't start the workspace build
|
||||
// if it uses legacy parameters (database.ParameterSchemas).
|
||||
func (b *Builder) verifyNoLegacyParameters() error {
|
||||
@@ -664,17 +695,40 @@ func (b *Builder) getProvisionerTags() (map[string]string, error) {
|
||||
tags[name] = value
|
||||
}
|
||||
|
||||
// Step 2: Mutate workspace tags
|
||||
// Step 2: Mutate workspace tags:
|
||||
// - Get workspace tags from the template version job
|
||||
// - Get template version variables from the template version as they can be
|
||||
// referenced in workspace tags
|
||||
// - Get parameters from the workspace build as they can also be referenced
|
||||
// in workspace tags
|
||||
// - Evaluate workspace tags given the above inputs
|
||||
workspaceTags, err := b.getTemplateVersionWorkspaceTags()
|
||||
if err != nil {
|
||||
return nil, BuildError{http.StatusInternalServerError, "failed to fetch template version workspace tags", err}
|
||||
}
|
||||
tvs, err := b.getTemplateVersionVariables()
|
||||
if err != nil {
|
||||
return nil, BuildError{http.StatusInternalServerError, "failed to fetch template version variables", err}
|
||||
}
|
||||
varsM := make(map[string]string)
|
||||
for _, tv := range tvs {
|
||||
// FIXME: do this in Terraform? This is a bit of a hack.
|
||||
if tv.Value == "" {
|
||||
varsM[tv.Name] = tv.DefaultValue
|
||||
} else {
|
||||
varsM[tv.Name] = tv.Value
|
||||
}
|
||||
}
|
||||
parameterNames, parameterValues, err := b.getParameters()
|
||||
if err != nil {
|
||||
return nil, err // already wrapped BuildError
|
||||
}
|
||||
paramsM := make(map[string]string)
|
||||
for i, name := range parameterNames {
|
||||
paramsM[name] = parameterValues[i]
|
||||
}
|
||||
|
||||
evalCtx := buildParametersEvalContext(parameterNames, parameterValues)
|
||||
evalCtx := tfparse.BuildEvalContext(varsM, paramsM)
|
||||
for _, workspaceTag := range workspaceTags {
|
||||
expr, diags := hclsyntax.ParseExpression([]byte(workspaceTag.Value), "expression.hcl", hcl.InitialPos)
|
||||
if diags.HasErrors() {
|
||||
@@ -687,7 +741,7 @@ func (b *Builder) getProvisionerTags() (map[string]string, error) {
|
||||
}
|
||||
|
||||
// Do not use "val.AsString()" as it can panic
|
||||
str, err := ctyValueString(val)
|
||||
str, err := tfparse.CtyValueString(val)
|
||||
if err != nil {
|
||||
return nil, BuildError{http.StatusBadRequest, "failed to marshal cty.Value as string", err}
|
||||
}
|
||||
@@ -696,44 +750,6 @@ func (b *Builder) getProvisionerTags() (map[string]string, error) {
|
||||
return tags, nil
|
||||
}
|
||||
|
||||
func buildParametersEvalContext(names, values []string) *hcl.EvalContext {
|
||||
m := map[string]cty.Value{}
|
||||
for i, name := range names {
|
||||
m[name] = cty.MapVal(map[string]cty.Value{
|
||||
"value": cty.StringVal(values[i]),
|
||||
})
|
||||
}
|
||||
|
||||
if len(m) == 0 {
|
||||
return nil // otherwise, panic: must not call MapVal with empty map
|
||||
}
|
||||
|
||||
return &hcl.EvalContext{
|
||||
Variables: map[string]cty.Value{
|
||||
"data": cty.MapVal(map[string]cty.Value{
|
||||
"coder_parameter": cty.MapVal(m),
|
||||
}),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func ctyValueString(val cty.Value) (string, error) {
|
||||
switch val.Type() {
|
||||
case cty.Bool:
|
||||
if val.True() {
|
||||
return "true", nil
|
||||
} else {
|
||||
return "false", nil
|
||||
}
|
||||
case cty.Number:
|
||||
return val.AsBigFloat().String(), nil
|
||||
case cty.String:
|
||||
return val.AsString(), nil
|
||||
default:
|
||||
return "", xerrors.Errorf("only primitive types are supported - bool, number, and string")
|
||||
}
|
||||
}
|
||||
|
||||
func (b *Builder) getTemplateVersionWorkspaceTags() ([]database.TemplateVersionWorkspaceTag, error) {
|
||||
if b.templateVersionWorkspaceTags != nil {
|
||||
return *b.templateVersionWorkspaceTags, nil
|
||||
|
||||
@@ -58,9 +58,11 @@ func TestBuilder_NoOptions(t *testing.T) {
|
||||
withTemplate,
|
||||
withInactiveVersion(nil),
|
||||
withLastBuildFound,
|
||||
withTemplateVersionVariables(inactiveVersionID, nil),
|
||||
withRichParameters(nil),
|
||||
withParameterSchemas(inactiveJobID, nil),
|
||||
withWorkspaceTags(inactiveVersionID, nil),
|
||||
withProvisionerDaemons([]database.GetEligibleProvisionerDaemonsByProvisionerJobIDsRow{}),
|
||||
|
||||
// Outputs
|
||||
expectProvisionerJob(func(job database.InsertProvisionerJobParams) {
|
||||
@@ -94,7 +96,8 @@ func TestBuilder_NoOptions(t *testing.T) {
|
||||
|
||||
ws := database.Workspace{ID: workspaceID, TemplateID: templateID, OwnerID: userID}
|
||||
uut := wsbuilder.New(ws, database.WorkspaceTransitionStart)
|
||||
_, _, err := uut.Build(ctx, mDB, nil, audit.WorkspaceBuildBaggage{})
|
||||
// nolint: dogsled
|
||||
_, _, _, err := uut.Build(ctx, mDB, nil, audit.WorkspaceBuildBaggage{})
|
||||
req.NoError(err)
|
||||
}
|
||||
|
||||
@@ -111,9 +114,11 @@ func TestBuilder_Initiator(t *testing.T) {
|
||||
withTemplate,
|
||||
withInactiveVersion(nil),
|
||||
withLastBuildFound,
|
||||
withTemplateVersionVariables(inactiveVersionID, nil),
|
||||
withRichParameters(nil),
|
||||
withParameterSchemas(inactiveJobID, nil),
|
||||
withWorkspaceTags(inactiveVersionID, nil),
|
||||
withProvisionerDaemons([]database.GetEligibleProvisionerDaemonsByProvisionerJobIDsRow{}),
|
||||
|
||||
// Outputs
|
||||
expectProvisionerJob(func(job database.InsertProvisionerJobParams) {
|
||||
@@ -130,7 +135,8 @@ func TestBuilder_Initiator(t *testing.T) {
|
||||
|
||||
ws := database.Workspace{ID: workspaceID, TemplateID: templateID, OwnerID: userID}
|
||||
uut := wsbuilder.New(ws, database.WorkspaceTransitionStart).Initiator(otherUserID)
|
||||
_, _, err := uut.Build(ctx, mDB, nil, audit.WorkspaceBuildBaggage{})
|
||||
// nolint: dogsled
|
||||
_, _, _, err := uut.Build(ctx, mDB, nil, audit.WorkspaceBuildBaggage{})
|
||||
req.NoError(err)
|
||||
}
|
||||
|
||||
@@ -154,9 +160,11 @@ func TestBuilder_Baggage(t *testing.T) {
|
||||
withTemplate,
|
||||
withInactiveVersion(nil),
|
||||
withLastBuildFound,
|
||||
withTemplateVersionVariables(inactiveVersionID, nil),
|
||||
withRichParameters(nil),
|
||||
withParameterSchemas(inactiveJobID, nil),
|
||||
withWorkspaceTags(inactiveVersionID, nil),
|
||||
withProvisionerDaemons([]database.GetEligibleProvisionerDaemonsByProvisionerJobIDsRow{}),
|
||||
|
||||
// Outputs
|
||||
expectProvisionerJob(func(job database.InsertProvisionerJobParams) {
|
||||
@@ -172,7 +180,8 @@ func TestBuilder_Baggage(t *testing.T) {
|
||||
|
||||
ws := database.Workspace{ID: workspaceID, TemplateID: templateID, OwnerID: userID}
|
||||
uut := wsbuilder.New(ws, database.WorkspaceTransitionStart).Initiator(otherUserID)
|
||||
_, _, err := uut.Build(ctx, mDB, nil, audit.WorkspaceBuildBaggage{IP: "127.0.0.1"})
|
||||
// nolint: dogsled
|
||||
_, _, _, err := uut.Build(ctx, mDB, nil, audit.WorkspaceBuildBaggage{IP: "127.0.0.1"})
|
||||
req.NoError(err)
|
||||
}
|
||||
|
||||
@@ -189,9 +198,11 @@ func TestBuilder_Reason(t *testing.T) {
|
||||
withTemplate,
|
||||
withInactiveVersion(nil),
|
||||
withLastBuildFound,
|
||||
withTemplateVersionVariables(inactiveVersionID, nil),
|
||||
withRichParameters(nil),
|
||||
withParameterSchemas(inactiveJobID, nil),
|
||||
withWorkspaceTags(inactiveVersionID, nil),
|
||||
withProvisionerDaemons([]database.GetEligibleProvisionerDaemonsByProvisionerJobIDsRow{}),
|
||||
|
||||
// Outputs
|
||||
expectProvisionerJob(func(_ database.InsertProvisionerJobParams) {
|
||||
@@ -207,7 +218,8 @@ func TestBuilder_Reason(t *testing.T) {
|
||||
|
||||
ws := database.Workspace{ID: workspaceID, TemplateID: templateID, OwnerID: userID}
|
||||
uut := wsbuilder.New(ws, database.WorkspaceTransitionStart).Reason(database.BuildReasonAutostart)
|
||||
_, _, err := uut.Build(ctx, mDB, nil, audit.WorkspaceBuildBaggage{})
|
||||
// nolint: dogsled
|
||||
_, _, _, err := uut.Build(ctx, mDB, nil, audit.WorkspaceBuildBaggage{})
|
||||
req.NoError(err)
|
||||
}
|
||||
|
||||
@@ -224,8 +236,10 @@ func TestBuilder_ActiveVersion(t *testing.T) {
|
||||
withTemplate,
|
||||
withActiveVersion(nil),
|
||||
withLastBuildNotFound,
|
||||
withTemplateVersionVariables(activeVersionID, nil),
|
||||
withParameterSchemas(activeJobID, nil),
|
||||
withWorkspaceTags(activeVersionID, nil),
|
||||
withProvisionerDaemons([]database.GetEligibleProvisionerDaemonsByProvisionerJobIDsRow{}),
|
||||
// previous rich parameters are not queried because there is no previous build.
|
||||
|
||||
// Outputs
|
||||
@@ -247,7 +261,8 @@ func TestBuilder_ActiveVersion(t *testing.T) {
|
||||
|
||||
ws := database.Workspace{ID: workspaceID, TemplateID: templateID, OwnerID: userID}
|
||||
uut := wsbuilder.New(ws, database.WorkspaceTransitionStart).ActiveVersion()
|
||||
_, _, err := uut.Build(ctx, mDB, nil, audit.WorkspaceBuildBaggage{})
|
||||
// nolint: dogsled
|
||||
_, _, _, err := uut.Build(ctx, mDB, nil, audit.WorkspaceBuildBaggage{})
|
||||
req.NoError(err)
|
||||
}
|
||||
|
||||
@@ -286,6 +301,14 @@ func TestWorkspaceBuildWithTags(t *testing.T) {
|
||||
Key: "is_debug_build",
|
||||
Value: `data.coder_parameter.is_debug_build.value == "true" ? "in-debug-mode" : "no-debug"`,
|
||||
},
|
||||
{
|
||||
Key: "variable_tag",
|
||||
Value: `var.tag`,
|
||||
},
|
||||
{
|
||||
Key: "another_variable_tag",
|
||||
Value: `var.tag2`,
|
||||
},
|
||||
}
|
||||
|
||||
richParameters := []database.TemplateVersionParameter{
|
||||
@@ -297,6 +320,11 @@ func TestWorkspaceBuildWithTags(t *testing.T) {
|
||||
{Name: "number_of_oranges", Type: "number", Description: "This is fifth parameter", Mutable: false, DefaultValue: "6", Options: json.RawMessage("[]")},
|
||||
}
|
||||
|
||||
templateVersionVariables := []database.TemplateVersionVariable{
|
||||
{Name: "tag", Description: "This is a variable tag", TemplateVersionID: inactiveVersionID, Type: "string", DefaultValue: "default-value", Value: "my-value"},
|
||||
{Name: "tag2", Description: "This is another variable tag", TemplateVersionID: inactiveVersionID, Type: "string", DefaultValue: "default-value-2", Value: ""},
|
||||
}
|
||||
|
||||
buildParameters := []codersdk.WorkspaceBuildParameter{
|
||||
{Name: "project", Value: "foobar-foobaz"},
|
||||
{Name: "is_debug_build", Value: "true"},
|
||||
@@ -311,22 +339,26 @@ func TestWorkspaceBuildWithTags(t *testing.T) {
|
||||
withTemplate,
|
||||
withInactiveVersion(richParameters),
|
||||
withLastBuildFound,
|
||||
withTemplateVersionVariables(inactiveVersionID, templateVersionVariables),
|
||||
withRichParameters(nil),
|
||||
withParameterSchemas(inactiveJobID, nil),
|
||||
withWorkspaceTags(inactiveVersionID, workspaceTags),
|
||||
withProvisionerDaemons([]database.GetEligibleProvisionerDaemonsByProvisionerJobIDsRow{}),
|
||||
|
||||
// Outputs
|
||||
expectProvisionerJob(func(job database.InsertProvisionerJobParams) {
|
||||
asrt.Len(job.Tags, 10)
|
||||
asrt.Len(job.Tags, 12)
|
||||
|
||||
expected := database.StringMap{
|
||||
"actually_no": "false",
|
||||
"cluster_tag": "best_developers",
|
||||
"fruits_tag": "10",
|
||||
"is_debug_build": "in-debug-mode",
|
||||
"project_tag": "foobar-foobaz+12345",
|
||||
"team_tag": "godzilla",
|
||||
"yes_or_no": "true",
|
||||
"actually_no": "false",
|
||||
"cluster_tag": "best_developers",
|
||||
"fruits_tag": "10",
|
||||
"is_debug_build": "in-debug-mode",
|
||||
"project_tag": "foobar-foobaz+12345",
|
||||
"team_tag": "godzilla",
|
||||
"yes_or_no": "true",
|
||||
"variable_tag": "my-value",
|
||||
"another_variable_tag": "default-value-2",
|
||||
|
||||
"scope": "user",
|
||||
"version": "inactive",
|
||||
@@ -343,7 +375,8 @@ func TestWorkspaceBuildWithTags(t *testing.T) {
|
||||
|
||||
ws := database.Workspace{ID: workspaceID, TemplateID: templateID, OwnerID: userID}
|
||||
uut := wsbuilder.New(ws, database.WorkspaceTransitionStart).RichParameterValues(buildParameters)
|
||||
_, _, err := uut.Build(ctx, mDB, nil, audit.WorkspaceBuildBaggage{})
|
||||
// nolint: dogsled
|
||||
_, _, _, err := uut.Build(ctx, mDB, nil, audit.WorkspaceBuildBaggage{})
|
||||
req.NoError(err)
|
||||
}
|
||||
|
||||
@@ -401,9 +434,11 @@ func TestWorkspaceBuildWithRichParameters(t *testing.T) {
|
||||
withTemplate,
|
||||
withInactiveVersion(richParameters),
|
||||
withLastBuildFound,
|
||||
withTemplateVersionVariables(inactiveVersionID, nil),
|
||||
withRichParameters(initialBuildParameters),
|
||||
withParameterSchemas(inactiveJobID, nil),
|
||||
withWorkspaceTags(inactiveVersionID, nil),
|
||||
withProvisionerDaemons([]database.GetEligibleProvisionerDaemonsByProvisionerJobIDsRow{}),
|
||||
|
||||
// Outputs
|
||||
expectProvisionerJob(func(job database.InsertProvisionerJobParams) {}),
|
||||
@@ -422,7 +457,8 @@ func TestWorkspaceBuildWithRichParameters(t *testing.T) {
|
||||
|
||||
ws := database.Workspace{ID: workspaceID, TemplateID: templateID, OwnerID: userID}
|
||||
uut := wsbuilder.New(ws, database.WorkspaceTransitionStart).RichParameterValues(nextBuildParameters)
|
||||
_, _, err := uut.Build(ctx, mDB, nil, audit.WorkspaceBuildBaggage{})
|
||||
// nolint: dogsled
|
||||
_, _, _, err := uut.Build(ctx, mDB, nil, audit.WorkspaceBuildBaggage{})
|
||||
req.NoError(err)
|
||||
})
|
||||
t.Run("UsePreviousParameterValues", func(t *testing.T) {
|
||||
@@ -445,9 +481,11 @@ func TestWorkspaceBuildWithRichParameters(t *testing.T) {
|
||||
withTemplate,
|
||||
withInactiveVersion(richParameters),
|
||||
withLastBuildFound,
|
||||
withTemplateVersionVariables(inactiveVersionID, nil),
|
||||
withRichParameters(initialBuildParameters),
|
||||
withParameterSchemas(inactiveJobID, nil),
|
||||
withWorkspaceTags(inactiveVersionID, nil),
|
||||
withProvisionerDaemons([]database.GetEligibleProvisionerDaemonsByProvisionerJobIDsRow{}),
|
||||
|
||||
// Outputs
|
||||
expectProvisionerJob(func(job database.InsertProvisionerJobParams) {}),
|
||||
@@ -466,7 +504,8 @@ func TestWorkspaceBuildWithRichParameters(t *testing.T) {
|
||||
|
||||
ws := database.Workspace{ID: workspaceID, TemplateID: templateID, OwnerID: userID}
|
||||
uut := wsbuilder.New(ws, database.WorkspaceTransitionStart).RichParameterValues(nextBuildParameters)
|
||||
_, _, err := uut.Build(ctx, mDB, nil, audit.WorkspaceBuildBaggage{})
|
||||
// nolint: dogsled
|
||||
_, _, _, err := uut.Build(ctx, mDB, nil, audit.WorkspaceBuildBaggage{})
|
||||
req.NoError(err)
|
||||
})
|
||||
|
||||
@@ -495,6 +534,7 @@ func TestWorkspaceBuildWithRichParameters(t *testing.T) {
|
||||
withTemplate,
|
||||
withInactiveVersion(richParameters),
|
||||
withLastBuildFound,
|
||||
withTemplateVersionVariables(inactiveVersionID, nil),
|
||||
withRichParameters(nil),
|
||||
withParameterSchemas(inactiveJobID, schemas),
|
||||
withWorkspaceTags(inactiveVersionID, nil),
|
||||
@@ -502,7 +542,7 @@ func TestWorkspaceBuildWithRichParameters(t *testing.T) {
|
||||
|
||||
ws := database.Workspace{ID: workspaceID, TemplateID: templateID, OwnerID: userID}
|
||||
uut := wsbuilder.New(ws, database.WorkspaceTransitionStart)
|
||||
_, _, err := uut.Build(ctx, mDB, nil, audit.WorkspaceBuildBaggage{})
|
||||
_, _, _, err := uut.Build(ctx, mDB, nil, audit.WorkspaceBuildBaggage{})
|
||||
bldErr := wsbuilder.BuildError{}
|
||||
req.ErrorAs(err, &bldErr)
|
||||
asrt.Equal(http.StatusBadRequest, bldErr.Status)
|
||||
@@ -526,6 +566,7 @@ func TestWorkspaceBuildWithRichParameters(t *testing.T) {
|
||||
withTemplate,
|
||||
withInactiveVersion(richParameters),
|
||||
withLastBuildFound,
|
||||
withTemplateVersionVariables(inactiveVersionID, nil),
|
||||
withRichParameters(initialBuildParameters),
|
||||
withParameterSchemas(inactiveJobID, nil),
|
||||
withWorkspaceTags(inactiveVersionID, nil),
|
||||
@@ -536,7 +577,8 @@ func TestWorkspaceBuildWithRichParameters(t *testing.T) {
|
||||
|
||||
ws := database.Workspace{ID: workspaceID, TemplateID: templateID, OwnerID: userID}
|
||||
uut := wsbuilder.New(ws, database.WorkspaceTransitionStart).RichParameterValues(nextBuildParameters)
|
||||
_, _, err := uut.Build(ctx, mDB, nil, audit.WorkspaceBuildBaggage{})
|
||||
// nolint: dogsled
|
||||
_, _, _, err := uut.Build(ctx, mDB, nil, audit.WorkspaceBuildBaggage{})
|
||||
bldErr := wsbuilder.BuildError{}
|
||||
req.ErrorAs(err, &bldErr)
|
||||
asrt.Equal(http.StatusBadRequest, bldErr.Status)
|
||||
@@ -576,9 +618,11 @@ func TestWorkspaceBuildWithRichParameters(t *testing.T) {
|
||||
withTemplate,
|
||||
withActiveVersion(version2params),
|
||||
withLastBuildFound,
|
||||
withTemplateVersionVariables(activeVersionID, nil),
|
||||
withRichParameters(initialBuildParameters),
|
||||
withParameterSchemas(activeJobID, nil),
|
||||
withWorkspaceTags(activeVersionID, nil),
|
||||
withProvisionerDaemons([]database.GetEligibleProvisionerDaemonsByProvisionerJobIDsRow{}),
|
||||
|
||||
// Outputs
|
||||
expectProvisionerJob(func(job database.InsertProvisionerJobParams) {}),
|
||||
@@ -599,7 +643,7 @@ func TestWorkspaceBuildWithRichParameters(t *testing.T) {
|
||||
uut := wsbuilder.New(ws, database.WorkspaceTransitionStart).
|
||||
RichParameterValues(nextBuildParameters).
|
||||
VersionID(activeVersionID)
|
||||
_, _, err := uut.Build(ctx, mDB, nil, audit.WorkspaceBuildBaggage{})
|
||||
_, _, _, err := uut.Build(ctx, mDB, nil, audit.WorkspaceBuildBaggage{})
|
||||
req.NoError(err)
|
||||
})
|
||||
|
||||
@@ -637,9 +681,11 @@ func TestWorkspaceBuildWithRichParameters(t *testing.T) {
|
||||
withTemplate,
|
||||
withActiveVersion(version2params),
|
||||
withLastBuildFound,
|
||||
withTemplateVersionVariables(activeVersionID, nil),
|
||||
withRichParameters(initialBuildParameters),
|
||||
withParameterSchemas(activeJobID, nil),
|
||||
withWorkspaceTags(activeVersionID, nil),
|
||||
withProvisionerDaemons([]database.GetEligibleProvisionerDaemonsByProvisionerJobIDsRow{}),
|
||||
|
||||
// Outputs
|
||||
expectProvisionerJob(func(job database.InsertProvisionerJobParams) {}),
|
||||
@@ -660,7 +706,7 @@ func TestWorkspaceBuildWithRichParameters(t *testing.T) {
|
||||
uut := wsbuilder.New(ws, database.WorkspaceTransitionStart).
|
||||
RichParameterValues(nextBuildParameters).
|
||||
VersionID(activeVersionID)
|
||||
_, _, err := uut.Build(ctx, mDB, nil, audit.WorkspaceBuildBaggage{})
|
||||
_, _, _, err := uut.Build(ctx, mDB, nil, audit.WorkspaceBuildBaggage{})
|
||||
req.NoError(err)
|
||||
})
|
||||
|
||||
@@ -696,9 +742,11 @@ func TestWorkspaceBuildWithRichParameters(t *testing.T) {
|
||||
withTemplate,
|
||||
withActiveVersion(version2params),
|
||||
withLastBuildFound,
|
||||
withTemplateVersionVariables(activeVersionID, nil),
|
||||
withRichParameters(initialBuildParameters),
|
||||
withParameterSchemas(activeJobID, nil),
|
||||
withWorkspaceTags(activeVersionID, nil),
|
||||
withProvisionerDaemons([]database.GetEligibleProvisionerDaemonsByProvisionerJobIDsRow{}),
|
||||
|
||||
// Outputs
|
||||
expectProvisionerJob(func(job database.InsertProvisionerJobParams) {}),
|
||||
@@ -719,7 +767,8 @@ func TestWorkspaceBuildWithRichParameters(t *testing.T) {
|
||||
uut := wsbuilder.New(ws, database.WorkspaceTransitionStart).
|
||||
RichParameterValues(nextBuildParameters).
|
||||
VersionID(activeVersionID)
|
||||
_, _, err := uut.Build(ctx, mDB, nil, audit.WorkspaceBuildBaggage{})
|
||||
// nolint: dogsled
|
||||
_, _, _, err := uut.Build(ctx, mDB, nil, audit.WorkspaceBuildBaggage{})
|
||||
req.NoError(err)
|
||||
})
|
||||
}
|
||||
@@ -900,6 +949,18 @@ func withParameterSchemas(jobID uuid.UUID, schemas []database.ParameterSchema) f
|
||||
}
|
||||
}
|
||||
|
||||
func withTemplateVersionVariables(versionID uuid.UUID, params []database.TemplateVersionVariable) func(mTx *dbmock.MockStore) {
|
||||
return func(mTx *dbmock.MockStore) {
|
||||
c := mTx.EXPECT().GetTemplateVersionVariables(gomock.Any(), versionID).
|
||||
Times(1)
|
||||
if len(params) > 0 {
|
||||
c.Return(params, nil)
|
||||
} else {
|
||||
c.Return(nil, sql.ErrNoRows)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func withRichParameters(params []database.WorkspaceBuildParameter) func(mTx *dbmock.MockStore) {
|
||||
return func(mTx *dbmock.MockStore) {
|
||||
c := mTx.EXPECT().GetWorkspaceBuildParameters(gomock.Any(), lastBuildID).
|
||||
@@ -987,3 +1048,9 @@ func expectBuildParameters(
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
func withProvisionerDaemons(provisionerDaemons []database.GetEligibleProvisionerDaemonsByProvisionerJobIDsRow) func(mTx *dbmock.MockStore) {
|
||||
return func(mTx *dbmock.MockStore) {
|
||||
mTx.EXPECT().GetEligibleProvisionerDaemonsByProvisionerJobIDs(gomock.Any(), gomock.Any()).Return(provisionerDaemons, nil)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -19,6 +19,7 @@ import (
|
||||
|
||||
"github.com/coder/coder/v2/buildinfo"
|
||||
"github.com/coder/coder/v2/codersdk/drpc"
|
||||
"github.com/coder/coder/v2/codersdk/wsjson"
|
||||
"github.com/coder/coder/v2/provisionerd/proto"
|
||||
"github.com/coder/coder/v2/provisionerd/runner"
|
||||
)
|
||||
@@ -53,6 +54,7 @@ type ProvisionerDaemon struct {
|
||||
|
||||
// MatchedProvisioners represents the number of provisioner daemons
|
||||
// available to take a job at a specific point in time.
|
||||
// Introduced in Coder version 2.18.0.
|
||||
type MatchedProvisioners struct {
|
||||
// Count is the number of provisioner daemons that matched the given
|
||||
// tags. If the count is 0, it means no provisioner daemons matched the
|
||||
@@ -161,36 +163,8 @@ func (c *Client) provisionerJobLogsAfter(ctx context.Context, path string, after
|
||||
}
|
||||
return nil, nil, ReadBodyAsError(res)
|
||||
}
|
||||
logs := make(chan ProvisionerJobLog)
|
||||
closed := make(chan struct{})
|
||||
go func() {
|
||||
defer close(closed)
|
||||
defer close(logs)
|
||||
defer conn.Close(websocket.StatusGoingAway, "")
|
||||
var log ProvisionerJobLog
|
||||
for {
|
||||
msgType, msg, err := conn.Read(ctx)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
if msgType != websocket.MessageText {
|
||||
return
|
||||
}
|
||||
err = json.Unmarshal(msg, &log)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
select {
|
||||
case <-ctx.Done():
|
||||
return
|
||||
case logs <- log:
|
||||
}
|
||||
}
|
||||
}()
|
||||
return logs, closeFunc(func() error {
|
||||
<-closed
|
||||
return nil
|
||||
}), nil
|
||||
d := wsjson.NewDecoder[ProvisionerJobLog](conn, websocket.MessageText, c.logger)
|
||||
return d.Chan(), d, nil
|
||||
}
|
||||
|
||||
// ServeProvisionerDaemonRequest are the parameters to call ServeProvisionerDaemon with
|
||||
|
||||
@@ -32,7 +32,7 @@ type TemplateVersion struct {
|
||||
Archived bool `json:"archived"`
|
||||
|
||||
Warnings []TemplateVersionWarning `json:"warnings,omitempty" enums:"DEPRECATED_PARAMETERS"`
|
||||
MatchedProvisioners MatchedProvisioners `json:"matched_provisioners,omitempty"`
|
||||
MatchedProvisioners *MatchedProvisioners `json:"matched_provisioners,omitempty"`
|
||||
}
|
||||
|
||||
type TemplateVersionExternalAuth struct {
|
||||
@@ -224,6 +224,22 @@ func (c *Client) TemplateVersionDryRun(ctx context.Context, version, job uuid.UU
|
||||
return j, json.NewDecoder(res.Body).Decode(&j)
|
||||
}
|
||||
|
||||
// TemplateVersionDryRunMatchedProvisioners returns the matched provisioners for a
|
||||
// template version dry-run job.
|
||||
func (c *Client) TemplateVersionDryRunMatchedProvisioners(ctx context.Context, version, job uuid.UUID) (MatchedProvisioners, error) {
|
||||
res, err := c.Request(ctx, http.MethodGet, fmt.Sprintf("/api/v2/templateversions/%s/dry-run/%s/matched-provisioners", version, job), nil)
|
||||
if err != nil {
|
||||
return MatchedProvisioners{}, err
|
||||
}
|
||||
defer res.Body.Close()
|
||||
if res.StatusCode != http.StatusOK {
|
||||
return MatchedProvisioners{}, ReadBodyAsError(res)
|
||||
}
|
||||
|
||||
var matched MatchedProvisioners
|
||||
return matched, json.NewDecoder(res.Body).Decode(&matched)
|
||||
}
|
||||
|
||||
// TemplateVersionDryRunResources returns the resources of a finished template
|
||||
// version dry-run job.
|
||||
func (c *Client) TemplateVersionDryRunResources(ctx context.Context, version, job uuid.UUID) ([]WorkspaceResource, error) {
|
||||
|
||||
@@ -15,6 +15,7 @@ import (
|
||||
"nhooyr.io/websocket"
|
||||
|
||||
"github.com/coder/coder/v2/coderd/tracing"
|
||||
"github.com/coder/coder/v2/codersdk/wsjson"
|
||||
)
|
||||
|
||||
type WorkspaceAgentStatus string
|
||||
@@ -454,30 +455,6 @@ func (c *Client) WorkspaceAgentLogsAfter(ctx context.Context, agentID uuid.UUID,
|
||||
}
|
||||
return nil, nil, ReadBodyAsError(res)
|
||||
}
|
||||
logChunks := make(chan []WorkspaceAgentLog, 1)
|
||||
closed := make(chan struct{})
|
||||
ctx, wsNetConn := WebsocketNetConn(ctx, conn, websocket.MessageText)
|
||||
decoder := json.NewDecoder(wsNetConn)
|
||||
go func() {
|
||||
defer close(closed)
|
||||
defer close(logChunks)
|
||||
defer conn.Close(websocket.StatusGoingAway, "")
|
||||
for {
|
||||
var logs []WorkspaceAgentLog
|
||||
err = decoder.Decode(&logs)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
select {
|
||||
case <-ctx.Done():
|
||||
return
|
||||
case logChunks <- logs:
|
||||
}
|
||||
}
|
||||
}()
|
||||
return logChunks, closeFunc(func() error {
|
||||
_ = wsNetConn.Close()
|
||||
<-closed
|
||||
return nil
|
||||
}), nil
|
||||
d := wsjson.NewDecoder[[]WorkspaceAgentLog](conn, websocket.MessageText, c.logger)
|
||||
return d.Chan(), d, nil
|
||||
}
|
||||
|
||||
+22
-21
@@ -51,27 +51,28 @@ const (
|
||||
// WorkspaceBuild is an at-point representation of a workspace state.
|
||||
// BuildNumbers start at 1 and increase by 1 for each subsequent build
|
||||
type WorkspaceBuild struct {
|
||||
ID uuid.UUID `json:"id" format:"uuid"`
|
||||
CreatedAt time.Time `json:"created_at" format:"date-time"`
|
||||
UpdatedAt time.Time `json:"updated_at" format:"date-time"`
|
||||
WorkspaceID uuid.UUID `json:"workspace_id" format:"uuid"`
|
||||
WorkspaceName string `json:"workspace_name"`
|
||||
WorkspaceOwnerID uuid.UUID `json:"workspace_owner_id" format:"uuid"`
|
||||
WorkspaceOwnerName string `json:"workspace_owner_name"`
|
||||
WorkspaceOwnerAvatarURL string `json:"workspace_owner_avatar_url"`
|
||||
TemplateVersionID uuid.UUID `json:"template_version_id" format:"uuid"`
|
||||
TemplateVersionName string `json:"template_version_name"`
|
||||
BuildNumber int32 `json:"build_number"`
|
||||
Transition WorkspaceTransition `json:"transition" enums:"start,stop,delete"`
|
||||
InitiatorID uuid.UUID `json:"initiator_id" format:"uuid"`
|
||||
InitiatorUsername string `json:"initiator_name"`
|
||||
Job ProvisionerJob `json:"job"`
|
||||
Reason BuildReason `db:"reason" json:"reason" enums:"initiator,autostart,autostop"`
|
||||
Resources []WorkspaceResource `json:"resources"`
|
||||
Deadline NullTime `json:"deadline,omitempty" format:"date-time"`
|
||||
MaxDeadline NullTime `json:"max_deadline,omitempty" format:"date-time"`
|
||||
Status WorkspaceStatus `json:"status" enums:"pending,starting,running,stopping,stopped,failed,canceling,canceled,deleting,deleted"`
|
||||
DailyCost int32 `json:"daily_cost"`
|
||||
ID uuid.UUID `json:"id" format:"uuid"`
|
||||
CreatedAt time.Time `json:"created_at" format:"date-time"`
|
||||
UpdatedAt time.Time `json:"updated_at" format:"date-time"`
|
||||
WorkspaceID uuid.UUID `json:"workspace_id" format:"uuid"`
|
||||
WorkspaceName string `json:"workspace_name"`
|
||||
WorkspaceOwnerID uuid.UUID `json:"workspace_owner_id" format:"uuid"`
|
||||
WorkspaceOwnerName string `json:"workspace_owner_name"`
|
||||
WorkspaceOwnerAvatarURL string `json:"workspace_owner_avatar_url"`
|
||||
TemplateVersionID uuid.UUID `json:"template_version_id" format:"uuid"`
|
||||
TemplateVersionName string `json:"template_version_name"`
|
||||
BuildNumber int32 `json:"build_number"`
|
||||
Transition WorkspaceTransition `json:"transition" enums:"start,stop,delete"`
|
||||
InitiatorID uuid.UUID `json:"initiator_id" format:"uuid"`
|
||||
InitiatorUsername string `json:"initiator_name"`
|
||||
Job ProvisionerJob `json:"job"`
|
||||
Reason BuildReason `db:"reason" json:"reason" enums:"initiator,autostart,autostop"`
|
||||
Resources []WorkspaceResource `json:"resources"`
|
||||
Deadline NullTime `json:"deadline,omitempty" format:"date-time"`
|
||||
MaxDeadline NullTime `json:"max_deadline,omitempty" format:"date-time"`
|
||||
Status WorkspaceStatus `json:"status" enums:"pending,starting,running,stopping,stopped,failed,canceling,canceled,deleting,deleted"`
|
||||
DailyCost int32 `json:"daily_cost"`
|
||||
MatchedProvisioners *MatchedProvisioners `json:"matched_provisioners,omitempty"`
|
||||
}
|
||||
|
||||
// WorkspaceResource describes resources used to create a workspace, for instance:
|
||||
|
||||
@@ -0,0 +1,75 @@
|
||||
package wsjson
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"sync/atomic"
|
||||
|
||||
"nhooyr.io/websocket"
|
||||
|
||||
"cdr.dev/slog"
|
||||
)
|
||||
|
||||
type Decoder[T any] struct {
|
||||
conn *websocket.Conn
|
||||
typ websocket.MessageType
|
||||
ctx context.Context
|
||||
cancel context.CancelFunc
|
||||
chanCalled atomic.Bool
|
||||
logger slog.Logger
|
||||
}
|
||||
|
||||
// Chan starts the decoder reading from the websocket and returns a channel for reading the
|
||||
// resulting values. The chan T is closed if the underlying websocket is closed, or we encounter an
|
||||
// error. We also close the underlying websocket if we encounter an error reading or decoding.
|
||||
func (d *Decoder[T]) Chan() <-chan T {
|
||||
if !d.chanCalled.CompareAndSwap(false, true) {
|
||||
panic("chan called more than once")
|
||||
}
|
||||
values := make(chan T, 1)
|
||||
go func() {
|
||||
defer close(values)
|
||||
defer d.conn.Close(websocket.StatusGoingAway, "")
|
||||
for {
|
||||
// we don't use d.ctx here because it only gets canceled after closing the connection
|
||||
// and a "connection closed" type error is more clear than context canceled.
|
||||
typ, b, err := d.conn.Read(context.Background())
|
||||
if err != nil {
|
||||
// might be benign like EOF, so just log at debug
|
||||
d.logger.Debug(d.ctx, "error reading from websocket", slog.Error(err))
|
||||
return
|
||||
}
|
||||
if typ != d.typ {
|
||||
d.logger.Error(d.ctx, "websocket type mismatch while decoding")
|
||||
return
|
||||
}
|
||||
var value T
|
||||
err = json.Unmarshal(b, &value)
|
||||
if err != nil {
|
||||
d.logger.Error(d.ctx, "error unmarshalling", slog.Error(err))
|
||||
return
|
||||
}
|
||||
select {
|
||||
case values <- value:
|
||||
// OK
|
||||
case <-d.ctx.Done():
|
||||
return
|
||||
}
|
||||
}
|
||||
}()
|
||||
return values
|
||||
}
|
||||
|
||||
// nolint: revive // complains that Encoder has the same function name
|
||||
func (d *Decoder[T]) Close() error {
|
||||
err := d.conn.Close(websocket.StatusNormalClosure, "")
|
||||
d.cancel()
|
||||
return err
|
||||
}
|
||||
|
||||
// NewDecoder creates a JSON-over-websocket decoder for type T, which must be deserializable from
|
||||
// JSON.
|
||||
func NewDecoder[T any](conn *websocket.Conn, typ websocket.MessageType, logger slog.Logger) *Decoder[T] {
|
||||
ctx, cancel := context.WithCancel(context.Background())
|
||||
return &Decoder[T]{conn: conn, ctx: ctx, cancel: cancel, typ: typ, logger: logger}
|
||||
}
|
||||
@@ -0,0 +1,42 @@
|
||||
package wsjson
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
|
||||
"golang.org/x/xerrors"
|
||||
"nhooyr.io/websocket"
|
||||
)
|
||||
|
||||
type Encoder[T any] struct {
|
||||
conn *websocket.Conn
|
||||
typ websocket.MessageType
|
||||
}
|
||||
|
||||
func (e *Encoder[T]) Encode(v T) error {
|
||||
w, err := e.conn.Writer(context.Background(), e.typ)
|
||||
if err != nil {
|
||||
return xerrors.Errorf("get websocket writer: %w", err)
|
||||
}
|
||||
defer w.Close()
|
||||
j := json.NewEncoder(w)
|
||||
err = j.Encode(v)
|
||||
if err != nil {
|
||||
return xerrors.Errorf("encode json: %w", err)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (e *Encoder[T]) Close(c websocket.StatusCode) error {
|
||||
return e.conn.Close(c, "")
|
||||
}
|
||||
|
||||
// NewEncoder creates a JSON-over websocket encoder for the type T, which must be JSON-serializable.
|
||||
// You may then call Encode() to send objects over the websocket. Creating an Encoder closes the
|
||||
// websocket for reading, turning it into a unidirectional write stream of JSON-encoded objects.
|
||||
func NewEncoder[T any](conn *websocket.Conn, typ websocket.MessageType) *Encoder[T] {
|
||||
// Here we close the websocket for reading, so that the websocket library will handle pings and
|
||||
// close frames.
|
||||
_ = conn.CloseRead(context.Background())
|
||||
return &Encoder[T]{conn: conn, typ: typ}
|
||||
}
|
||||
@@ -24,7 +24,7 @@ We track the following resources:
|
||||
| OAuth2ProviderAppSecret<br><i></i> | <table><thead><tr><th>Field</th><th>Tracked</th></tr></thead><tbody><tr><td>app_id</td><td>false</td></tr><tr><td>created_at</td><td>false</td></tr><tr><td>display_secret</td><td>false</td></tr><tr><td>hashed_secret</td><td>false</td></tr><tr><td>id</td><td>false</td></tr><tr><td>last_used_at</td><td>false</td></tr><tr><td>secret_prefix</td><td>false</td></tr></tbody></table> |
|
||||
| Organization<br><i></i> | <table><thead><tr><th>Field</th><th>Tracked</th></tr></thead><tbody><tr><td>created_at</td><td>false</td></tr><tr><td>description</td><td>true</td></tr><tr><td>display_name</td><td>true</td></tr><tr><td>icon</td><td>true</td></tr><tr><td>id</td><td>false</td></tr><tr><td>is_default</td><td>true</td></tr><tr><td>name</td><td>true</td></tr><tr><td>updated_at</td><td>true</td></tr></tbody></table> |
|
||||
| Template<br><i>write, delete</i> | <table><thead><tr><th>Field</th><th>Tracked</th></tr></thead><tbody><tr><td>active_version_id</td><td>true</td></tr><tr><td>activity_bump</td><td>true</td></tr><tr><td>allow_user_autostart</td><td>true</td></tr><tr><td>allow_user_autostop</td><td>true</td></tr><tr><td>allow_user_cancel_workspace_jobs</td><td>true</td></tr><tr><td>autostart_block_days_of_week</td><td>true</td></tr><tr><td>autostop_requirement_days_of_week</td><td>true</td></tr><tr><td>autostop_requirement_weeks</td><td>true</td></tr><tr><td>created_at</td><td>false</td></tr><tr><td>created_by</td><td>true</td></tr><tr><td>created_by_avatar_url</td><td>false</td></tr><tr><td>created_by_username</td><td>false</td></tr><tr><td>default_ttl</td><td>true</td></tr><tr><td>deleted</td><td>false</td></tr><tr><td>deprecated</td><td>true</td></tr><tr><td>description</td><td>true</td></tr><tr><td>display_name</td><td>true</td></tr><tr><td>failure_ttl</td><td>true</td></tr><tr><td>group_acl</td><td>true</td></tr><tr><td>icon</td><td>true</td></tr><tr><td>id</td><td>true</td></tr><tr><td>max_port_sharing_level</td><td>true</td></tr><tr><td>name</td><td>true</td></tr><tr><td>organization_display_name</td><td>false</td></tr><tr><td>organization_icon</td><td>false</td></tr><tr><td>organization_id</td><td>false</td></tr><tr><td>organization_name</td><td>false</td></tr><tr><td>provisioner</td><td>true</td></tr><tr><td>require_active_version</td><td>true</td></tr><tr><td>time_til_dormant</td><td>true</td></tr><tr><td>time_til_dormant_autodelete</td><td>true</td></tr><tr><td>updated_at</td><td>false</td></tr><tr><td>user_acl</td><td>true</td></tr></tbody></table> |
|
||||
| TemplateVersion<br><i>create, write</i> | <table><thead><tr><th>Field</th><th>Tracked</th></tr></thead><tbody><tr><td>archived</td><td>true</td></tr><tr><td>created_at</td><td>false</td></tr><tr><td>created_by</td><td>true</td></tr><tr><td>created_by_avatar_url</td><td>false</td></tr><tr><td>created_by_username</td><td>false</td></tr><tr><td>external_auth_providers</td><td>false</td></tr><tr><td>id</td><td>true</td></tr><tr><td>job_id</td><td>false</td></tr><tr><td>message</td><td>false</td></tr><tr><td>name</td><td>true</td></tr><tr><td>organization_id</td><td>false</td></tr><tr><td>readme</td><td>true</td></tr><tr><td>template_id</td><td>true</td></tr><tr><td>updated_at</td><td>false</td></tr></tbody></table> |
|
||||
| TemplateVersion<br><i>create, write</i> | <table><thead><tr><th>Field</th><th>Tracked</th></tr></thead><tbody><tr><td>archived</td><td>true</td></tr><tr><td>created_at</td><td>false</td></tr><tr><td>created_by</td><td>true</td></tr><tr><td>created_by_avatar_url</td><td>false</td></tr><tr><td>created_by_username</td><td>false</td></tr><tr><td>external_auth_providers</td><td>false</td></tr><tr><td>id</td><td>true</td></tr><tr><td>job_id</td><td>false</td></tr><tr><td>message</td><td>false</td></tr><tr><td>name</td><td>true</td></tr><tr><td>organization_id</td><td>false</td></tr><tr><td>readme</td><td>true</td></tr><tr><td>source_example_id</td><td>false</td></tr><tr><td>template_id</td><td>true</td></tr><tr><td>updated_at</td><td>false</td></tr></tbody></table> |
|
||||
| User<br><i>create, write, delete</i> | <table><thead><tr><th>Field</th><th>Tracked</th></tr></thead><tbody><tr><td>avatar_url</td><td>false</td></tr><tr><td>created_at</td><td>false</td></tr><tr><td>deleted</td><td>true</td></tr><tr><td>email</td><td>true</td></tr><tr><td>github_com_user_id</td><td>false</td></tr><tr><td>hashed_one_time_passcode</td><td>false</td></tr><tr><td>hashed_password</td><td>true</td></tr><tr><td>id</td><td>true</td></tr><tr><td>last_seen_at</td><td>false</td></tr><tr><td>login_type</td><td>true</td></tr><tr><td>name</td><td>true</td></tr><tr><td>one_time_passcode_expires_at</td><td>true</td></tr><tr><td>quiet_hours_schedule</td><td>true</td></tr><tr><td>rbac_roles</td><td>true</td></tr><tr><td>status</td><td>true</td></tr><tr><td>theme_preference</td><td>false</td></tr><tr><td>updated_at</td><td>false</td></tr><tr><td>username</td><td>true</td></tr></tbody></table> |
|
||||
| WorkspaceBuild<br><i>start, stop</i> | <table><thead><tr><th>Field</th><th>Tracked</th></tr></thead><tbody><tr><td>build_number</td><td>false</td></tr><tr><td>created_at</td><td>false</td></tr><tr><td>daily_cost</td><td>false</td></tr><tr><td>deadline</td><td>false</td></tr><tr><td>id</td><td>false</td></tr><tr><td>initiator_by_avatar_url</td><td>false</td></tr><tr><td>initiator_by_username</td><td>false</td></tr><tr><td>initiator_id</td><td>false</td></tr><tr><td>job_id</td><td>false</td></tr><tr><td>max_deadline</td><td>false</td></tr><tr><td>provisioner_state</td><td>false</td></tr><tr><td>reason</td><td>false</td></tr><tr><td>template_version_id</td><td>true</td></tr><tr><td>transition</td><td>false</td></tr><tr><td>updated_at</td><td>false</td></tr><tr><td>workspace_id</td><td>false</td></tr></tbody></table> |
|
||||
| WorkspaceProxy<br><i></i> | <table><thead><tr><th>Field</th><th>Tracked</th></tr></thead><tbody><tr><td>created_at</td><td>true</td></tr><tr><td>deleted</td><td>false</td></tr><tr><td>derp_enabled</td><td>true</td></tr><tr><td>derp_only</td><td>true</td></tr><tr><td>display_name</td><td>true</td></tr><tr><td>icon</td><td>true</td></tr><tr><td>id</td><td>true</td></tr><tr><td>name</td><td>true</td></tr><tr><td>region_id</td><td>true</td></tr><tr><td>token_hashed_secret</td><td>true</td></tr><tr><td>updated_at</td><td>false</td></tr><tr><td>url</td><td>true</td></tr><tr><td>version</td><td>true</td></tr><tr><td>wildcard_hostname</td><td>true</td></tr></tbody></table> |
|
||||
|
||||
Generated
+29
@@ -52,6 +52,11 @@ curl -X GET http://coder-server:8080/api/v2/users/{user}/workspace/{workspacenam
|
||||
},
|
||||
"worker_id": "ae5fa6f7-c55b-40c1-b40a-b36ac467652b"
|
||||
},
|
||||
"matched_provisioners": {
|
||||
"available": 0,
|
||||
"count": 0,
|
||||
"most_recently_seen": "2019-08-24T14:15:22Z"
|
||||
},
|
||||
"max_deadline": "2019-08-24T14:15:22Z",
|
||||
"reason": "initiator",
|
||||
"resources": [
|
||||
@@ -237,6 +242,11 @@ curl -X GET http://coder-server:8080/api/v2/workspacebuilds/{workspacebuild} \
|
||||
},
|
||||
"worker_id": "ae5fa6f7-c55b-40c1-b40a-b36ac467652b"
|
||||
},
|
||||
"matched_provisioners": {
|
||||
"available": 0,
|
||||
"count": 0,
|
||||
"most_recently_seen": "2019-08-24T14:15:22Z"
|
||||
},
|
||||
"max_deadline": "2019-08-24T14:15:22Z",
|
||||
"reason": "initiator",
|
||||
"resources": [
|
||||
@@ -856,6 +866,11 @@ curl -X GET http://coder-server:8080/api/v2/workspacebuilds/{workspacebuild}/sta
|
||||
},
|
||||
"worker_id": "ae5fa6f7-c55b-40c1-b40a-b36ac467652b"
|
||||
},
|
||||
"matched_provisioners": {
|
||||
"available": 0,
|
||||
"count": 0,
|
||||
"most_recently_seen": "2019-08-24T14:15:22Z"
|
||||
},
|
||||
"max_deadline": "2019-08-24T14:15:22Z",
|
||||
"reason": "initiator",
|
||||
"resources": [
|
||||
@@ -1114,6 +1129,11 @@ curl -X GET http://coder-server:8080/api/v2/workspaces/{workspace}/builds \
|
||||
},
|
||||
"worker_id": "ae5fa6f7-c55b-40c1-b40a-b36ac467652b"
|
||||
},
|
||||
"matched_provisioners": {
|
||||
"available": 0,
|
||||
"count": 0,
|
||||
"most_recently_seen": "2019-08-24T14:15:22Z"
|
||||
},
|
||||
"max_deadline": "2019-08-24T14:15:22Z",
|
||||
"reason": "initiator",
|
||||
"resources": [
|
||||
@@ -1277,6 +1297,10 @@ Status Code **200**
|
||||
| `»» tags` | object | false | | |
|
||||
| `»»» [any property]` | string | false | | |
|
||||
| `»» worker_id` | string(uuid) | false | | |
|
||||
| `» matched_provisioners` | [codersdk.MatchedProvisioners](schemas.md#codersdkmatchedprovisioners) | false | | |
|
||||
| `»» available` | integer | false | | Available is the number of provisioner daemons that are available to take jobs. This may be less than the count if some provisioners are busy or have been stopped. |
|
||||
| `»» count` | integer | false | | Count is the number of provisioner daemons that matched the given tags. If the count is 0, it means no provisioner daemons matched the requested tags. |
|
||||
| `»» most_recently_seen` | string(date-time) | false | | Most recently seen is the most recently seen time of the set of matched provisioners. If no provisioners matched, this field will be null. |
|
||||
| `» max_deadline` | string(date-time) | false | | |
|
||||
| `» reason` | [codersdk.BuildReason](schemas.md#codersdkbuildreason) | false | | |
|
||||
| `» resources` | array | false | | |
|
||||
@@ -1500,6 +1524,11 @@ curl -X POST http://coder-server:8080/api/v2/workspaces/{workspace}/builds \
|
||||
},
|
||||
"worker_id": "ae5fa6f7-c55b-40c1-b40a-b36ac467652b"
|
||||
},
|
||||
"matched_provisioners": {
|
||||
"available": 0,
|
||||
"count": 0,
|
||||
"most_recently_seen": "2019-08-24T14:15:22Z"
|
||||
},
|
||||
"max_deadline": "2019-08-24T14:15:22Z",
|
||||
"reason": "initiator",
|
||||
"resources": [
|
||||
|
||||
Generated
+16
@@ -6602,6 +6602,11 @@ If the schedule is empty, the user will be updated to use the default schedule.|
|
||||
},
|
||||
"worker_id": "ae5fa6f7-c55b-40c1-b40a-b36ac467652b"
|
||||
},
|
||||
"matched_provisioners": {
|
||||
"available": 0,
|
||||
"count": 0,
|
||||
"most_recently_seen": "2019-08-24T14:15:22Z"
|
||||
},
|
||||
"max_deadline": "2019-08-24T14:15:22Z",
|
||||
"reason": "initiator",
|
||||
"resources": [
|
||||
@@ -7300,6 +7305,11 @@ If the schedule is empty, the user will be updated to use the default schedule.|
|
||||
},
|
||||
"worker_id": "ae5fa6f7-c55b-40c1-b40a-b36ac467652b"
|
||||
},
|
||||
"matched_provisioners": {
|
||||
"available": 0,
|
||||
"count": 0,
|
||||
"most_recently_seen": "2019-08-24T14:15:22Z"
|
||||
},
|
||||
"max_deadline": "2019-08-24T14:15:22Z",
|
||||
"reason": "initiator",
|
||||
"resources": [
|
||||
@@ -7439,6 +7449,7 @@ If the schedule is empty, the user will be updated to use the default schedule.|
|
||||
| `initiator_id` | string | false | | |
|
||||
| `initiator_name` | string | false | | |
|
||||
| `job` | [codersdk.ProvisionerJob](#codersdkprovisionerjob) | false | | |
|
||||
| `matched_provisioners` | [codersdk.MatchedProvisioners](#codersdkmatchedprovisioners) | false | | |
|
||||
| `max_deadline` | string | false | | |
|
||||
| `reason` | [codersdk.BuildReason](#codersdkbuildreason) | false | | |
|
||||
| `resources` | array of [codersdk.WorkspaceResource](#codersdkworkspaceresource) | false | | |
|
||||
@@ -7926,6 +7937,11 @@ If the schedule is empty, the user will be updated to use the default schedule.|
|
||||
},
|
||||
"worker_id": "ae5fa6f7-c55b-40c1-b40a-b36ac467652b"
|
||||
},
|
||||
"matched_provisioners": {
|
||||
"available": 0,
|
||||
"count": 0,
|
||||
"most_recently_seen": "2019-08-24T14:15:22Z"
|
||||
},
|
||||
"max_deadline": "2019-08-24T14:15:22Z",
|
||||
"reason": "initiator",
|
||||
"resources": [
|
||||
|
||||
Generated
+40
@@ -1944,6 +1944,46 @@ Status Code **200**
|
||||
|
||||
To perform this operation, you must be authenticated. [Learn more](authentication.md).
|
||||
|
||||
## Get template version dry-run matched provisioners
|
||||
|
||||
### Code samples
|
||||
|
||||
```shell
|
||||
# Example request using curl
|
||||
curl -X GET http://coder-server:8080/api/v2/templateversions/{templateversion}/dry-run/{jobID}/matched-provisioners \
|
||||
-H 'Accept: application/json' \
|
||||
-H 'Coder-Session-Token: API_KEY'
|
||||
```
|
||||
|
||||
`GET /templateversions/{templateversion}/dry-run/{jobID}/matched-provisioners`
|
||||
|
||||
### Parameters
|
||||
|
||||
| Name | In | Type | Required | Description |
|
||||
| ----------------- | ---- | ------------ | -------- | ------------------- |
|
||||
| `templateversion` | path | string(uuid) | true | Template version ID |
|
||||
| `jobID` | path | string(uuid) | true | Job ID |
|
||||
|
||||
### Example responses
|
||||
|
||||
> 200 Response
|
||||
|
||||
```json
|
||||
{
|
||||
"available": 0,
|
||||
"count": 0,
|
||||
"most_recently_seen": "2019-08-24T14:15:22Z"
|
||||
}
|
||||
```
|
||||
|
||||
### Responses
|
||||
|
||||
| Status | Meaning | Description | Schema |
|
||||
| ------ | ------------------------------------------------------- | ----------- | ---------------------------------------------------------------------- |
|
||||
| 200 | [OK](https://tools.ietf.org/html/rfc7231#section-6.3.1) | OK | [codersdk.MatchedProvisioners](schemas.md#codersdkmatchedprovisioners) |
|
||||
|
||||
To perform this operation, you must be authenticated. [Learn more](authentication.md).
|
||||
|
||||
## Get template version dry-run resources by job ID
|
||||
|
||||
### Code samples
|
||||
|
||||
Generated
+30
@@ -91,6 +91,11 @@ of the template will be used.
|
||||
},
|
||||
"worker_id": "ae5fa6f7-c55b-40c1-b40a-b36ac467652b"
|
||||
},
|
||||
"matched_provisioners": {
|
||||
"available": 0,
|
||||
"count": 0,
|
||||
"most_recently_seen": "2019-08-24T14:15:22Z"
|
||||
},
|
||||
"max_deadline": "2019-08-24T14:15:22Z",
|
||||
"reason": "initiator",
|
||||
"resources": [
|
||||
@@ -309,6 +314,11 @@ curl -X GET http://coder-server:8080/api/v2/users/{user}/workspace/{workspacenam
|
||||
},
|
||||
"worker_id": "ae5fa6f7-c55b-40c1-b40a-b36ac467652b"
|
||||
},
|
||||
"matched_provisioners": {
|
||||
"available": 0,
|
||||
"count": 0,
|
||||
"most_recently_seen": "2019-08-24T14:15:22Z"
|
||||
},
|
||||
"max_deadline": "2019-08-24T14:15:22Z",
|
||||
"reason": "initiator",
|
||||
"resources": [
|
||||
@@ -551,6 +561,11 @@ of the template will be used.
|
||||
},
|
||||
"worker_id": "ae5fa6f7-c55b-40c1-b40a-b36ac467652b"
|
||||
},
|
||||
"matched_provisioners": {
|
||||
"available": 0,
|
||||
"count": 0,
|
||||
"most_recently_seen": "2019-08-24T14:15:22Z"
|
||||
},
|
||||
"max_deadline": "2019-08-24T14:15:22Z",
|
||||
"reason": "initiator",
|
||||
"resources": [
|
||||
@@ -772,6 +787,11 @@ curl -X GET http://coder-server:8080/api/v2/workspaces \
|
||||
},
|
||||
"worker_id": "ae5fa6f7-c55b-40c1-b40a-b36ac467652b"
|
||||
},
|
||||
"matched_provisioners": {
|
||||
"available": 0,
|
||||
"count": 0,
|
||||
"most_recently_seen": "2019-08-24T14:15:22Z"
|
||||
},
|
||||
"max_deadline": "2019-08-24T14:15:22Z",
|
||||
"reason": "initiator",
|
||||
"resources": [
|
||||
@@ -987,6 +1007,11 @@ curl -X GET http://coder-server:8080/api/v2/workspaces/{workspace} \
|
||||
},
|
||||
"worker_id": "ae5fa6f7-c55b-40c1-b40a-b36ac467652b"
|
||||
},
|
||||
"matched_provisioners": {
|
||||
"available": 0,
|
||||
"count": 0,
|
||||
"most_recently_seen": "2019-08-24T14:15:22Z"
|
||||
},
|
||||
"max_deadline": "2019-08-24T14:15:22Z",
|
||||
"reason": "initiator",
|
||||
"resources": [
|
||||
@@ -1321,6 +1346,11 @@ curl -X PUT http://coder-server:8080/api/v2/workspaces/{workspace}/dormant \
|
||||
},
|
||||
"worker_id": "ae5fa6f7-c55b-40c1-b40a-b36ac467652b"
|
||||
},
|
||||
"matched_provisioners": {
|
||||
"available": 0,
|
||||
"count": 0,
|
||||
"most_recently_seen": "2019-08-24T14:15:22Z"
|
||||
},
|
||||
"max_deadline": "2019-08-24T14:15:22Z",
|
||||
"reason": "initiator",
|
||||
"resources": [
|
||||
|
||||
@@ -127,6 +127,7 @@ var auditableResourcesTypes = map[any]map[string]Action{
|
||||
"created_by_avatar_url": ActionIgnore,
|
||||
"created_by_username": ActionIgnore,
|
||||
"archived": ActionTrack,
|
||||
"source_example_id": ActionIgnore, // Never changes.
|
||||
},
|
||||
&database.User{}: {
|
||||
"id": ActionTrack,
|
||||
|
||||
@@ -7,6 +7,7 @@ import (
|
||||
"crypto/tls"
|
||||
"io"
|
||||
"net/http"
|
||||
"os/exec"
|
||||
"strings"
|
||||
"testing"
|
||||
"time"
|
||||
@@ -16,7 +17,8 @@ import (
|
||||
"github.com/moby/moby/pkg/namesgenerator"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
"golang.org/x/xerrors"
|
||||
|
||||
"cdr.dev/slog"
|
||||
|
||||
"github.com/coder/coder/v2/coderd/coderdtest"
|
||||
"github.com/coder/coder/v2/coderd/database"
|
||||
@@ -28,6 +30,7 @@ import (
|
||||
"github.com/coder/coder/v2/enterprise/coderd/license"
|
||||
"github.com/coder/coder/v2/enterprise/dbcrypt"
|
||||
"github.com/coder/coder/v2/provisioner/echo"
|
||||
"github.com/coder/coder/v2/provisioner/terraform"
|
||||
"github.com/coder/coder/v2/provisionerd"
|
||||
provisionerdproto "github.com/coder/coder/v2/provisionerd/proto"
|
||||
"github.com/coder/coder/v2/provisionersdk"
|
||||
@@ -304,14 +307,31 @@ func CreateOrganization(t *testing.T, client *codersdk.Client, opts CreateOrgani
|
||||
return org
|
||||
}
|
||||
|
||||
// NewExternalProvisionerDaemon runs an external provisioner daemon in a
|
||||
// goroutine and returns a closer to stop it. The echo provisioner is used
|
||||
// here. This is the default provisioner for tests and should be fine for
|
||||
// most use cases. If you need to test terraform-specific behaviors, use
|
||||
// NewExternalProvisionerDaemonTerraform instead.
|
||||
func NewExternalProvisionerDaemon(t testing.TB, client *codersdk.Client, org uuid.UUID, tags map[string]string) io.Closer {
|
||||
t.Helper()
|
||||
return newExternalProvisionerDaemon(t, client, org, tags, codersdk.ProvisionerTypeEcho)
|
||||
}
|
||||
|
||||
// NewExternalProvisionerDaemonTerraform runs an external provisioner daemon in
|
||||
// a goroutine and returns a closer to stop it. The terraform provisioner is
|
||||
// used here. Avoid using this unless you need to test terraform-specific
|
||||
// behaviors!
|
||||
func NewExternalProvisionerDaemonTerraform(t testing.TB, client *codersdk.Client, org uuid.UUID, tags map[string]string) io.Closer {
|
||||
t.Helper()
|
||||
return newExternalProvisionerDaemon(t, client, org, tags, codersdk.ProvisionerTypeTerraform)
|
||||
}
|
||||
|
||||
// nolint // This function is a helper for tests and should not be linted.
|
||||
func newExternalProvisionerDaemon(t testing.TB, client *codersdk.Client, org uuid.UUID, tags map[string]string, provisionerType codersdk.ProvisionerType) io.Closer {
|
||||
t.Helper()
|
||||
|
||||
// Without this check, the provisioner will silently fail.
|
||||
entitlements, err := client.Entitlements(context.Background())
|
||||
if err != nil {
|
||||
// AGPL instances will throw this error. They cannot use external
|
||||
// provisioners.
|
||||
t.Errorf("external provisioners requires a license with entitlements. The client failed to fetch the entitlements, is this an enterprise instance of coderd?")
|
||||
t.FailNow()
|
||||
return nil
|
||||
@@ -319,42 +339,67 @@ func NewExternalProvisionerDaemon(t testing.TB, client *codersdk.Client, org uui
|
||||
|
||||
feature := entitlements.Features[codersdk.FeatureExternalProvisionerDaemons]
|
||||
if !feature.Enabled || feature.Entitlement != codersdk.EntitlementEntitled {
|
||||
require.NoError(t, xerrors.Errorf("external provisioner daemons require an entitled license"))
|
||||
t.Errorf("external provisioner daemons require an entitled license")
|
||||
t.FailNow()
|
||||
return nil
|
||||
}
|
||||
|
||||
echoClient, echoServer := drpc.MemTransportPipe()
|
||||
provisionerClient, provisionerSrv := drpc.MemTransportPipe()
|
||||
ctx, cancelFunc := context.WithCancel(context.Background())
|
||||
serveDone := make(chan struct{})
|
||||
t.Cleanup(func() {
|
||||
_ = echoClient.Close()
|
||||
_ = echoServer.Close()
|
||||
_ = provisionerClient.Close()
|
||||
_ = provisionerSrv.Close()
|
||||
cancelFunc()
|
||||
<-serveDone
|
||||
})
|
||||
go func() {
|
||||
defer close(serveDone)
|
||||
err := echo.Serve(ctx, &provisionersdk.ServeOptions{
|
||||
Listener: echoServer,
|
||||
WorkDirectory: t.TempDir(),
|
||||
})
|
||||
assert.NoError(t, err)
|
||||
}()
|
||||
|
||||
switch provisionerType {
|
||||
case codersdk.ProvisionerTypeTerraform:
|
||||
// Ensure the Terraform binary is present in the path.
|
||||
// If not, we fail this test rather than downloading it.
|
||||
terraformPath, err := exec.LookPath("terraform")
|
||||
require.NoError(t, err, "terraform binary not found in PATH")
|
||||
t.Logf("using Terraform binary at %s", terraformPath)
|
||||
|
||||
go func() {
|
||||
defer close(serveDone)
|
||||
assert.NoError(t, terraform.Serve(ctx, &terraform.ServeOptions{
|
||||
BinaryPath: terraformPath,
|
||||
CachePath: t.TempDir(),
|
||||
ServeOptions: &provisionersdk.ServeOptions{
|
||||
Listener: provisionerSrv,
|
||||
WorkDirectory: t.TempDir(),
|
||||
},
|
||||
}))
|
||||
}()
|
||||
case codersdk.ProvisionerTypeEcho:
|
||||
go func() {
|
||||
defer close(serveDone)
|
||||
assert.NoError(t, echo.Serve(ctx, &provisionersdk.ServeOptions{
|
||||
Listener: provisionerSrv,
|
||||
WorkDirectory: t.TempDir(),
|
||||
}))
|
||||
}()
|
||||
default:
|
||||
t.Fatalf("unsupported provisioner type: %s", provisionerType)
|
||||
return nil
|
||||
}
|
||||
|
||||
daemon := provisionerd.New(func(ctx context.Context) (provisionerdproto.DRPCProvisionerDaemonClient, error) {
|
||||
return client.ServeProvisionerDaemon(ctx, codersdk.ServeProvisionerDaemonRequest{
|
||||
ID: uuid.New(),
|
||||
Name: t.Name(),
|
||||
Organization: org,
|
||||
Provisioners: []codersdk.ProvisionerType{codersdk.ProvisionerTypeEcho},
|
||||
Provisioners: []codersdk.ProvisionerType{provisionerType},
|
||||
Tags: tags,
|
||||
})
|
||||
}, &provisionerd.Options{
|
||||
Logger: testutil.Logger(t).Named("provisionerd"),
|
||||
Logger: testutil.Logger(t).Named("provisionerd").Leveled(slog.LevelDebug),
|
||||
UpdateInterval: 250 * time.Millisecond,
|
||||
ForceCancelInterval: 5 * time.Second,
|
||||
Connector: provisionerd.LocalProvisioners{
|
||||
string(database.ProvisionerTypeEcho): sdkproto.NewDRPCProvisionerClient(echoClient),
|
||||
string(provisionerType): sdkproto.NewDRPCProvisionerClient(provisionerClient),
|
||||
},
|
||||
})
|
||||
closer := coderdtest.NewProvisionerDaemonCloser(daemon)
|
||||
|
||||
@@ -0,0 +1,81 @@
|
||||
package coderd_test
|
||||
|
||||
import (
|
||||
"context"
|
||||
"testing"
|
||||
|
||||
"github.com/google/uuid"
|
||||
"github.com/stretchr/testify/require"
|
||||
|
||||
"github.com/coder/coder/v2/coderd/coderdtest"
|
||||
"github.com/coder/coder/v2/coderd/rbac"
|
||||
"github.com/coder/coder/v2/codersdk"
|
||||
"github.com/coder/coder/v2/codersdk/agentsdk"
|
||||
"github.com/coder/coder/v2/enterprise/coderd/coderdenttest"
|
||||
"github.com/coder/coder/v2/enterprise/coderd/license"
|
||||
"github.com/coder/coder/v2/provisioner/echo"
|
||||
"github.com/coder/coder/v2/testutil"
|
||||
)
|
||||
|
||||
// TestAgentGitSSHKeyCustomRoles tests that the agent can fetch its git ssh key when
|
||||
// the user has a custom role in a second workspace.
|
||||
func TestAgentGitSSHKeyCustomRoles(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
owner, _ := coderdenttest.New(t, &coderdenttest.Options{
|
||||
Options: &coderdtest.Options{
|
||||
IncludeProvisionerDaemon: true,
|
||||
},
|
||||
LicenseOptions: &coderdenttest.LicenseOptions{
|
||||
Features: license.Features{
|
||||
codersdk.FeatureCustomRoles: 1,
|
||||
codersdk.FeatureMultipleOrganizations: 1,
|
||||
codersdk.FeatureExternalProvisionerDaemons: 1,
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
// When custom roles exist in a second organization
|
||||
org := coderdenttest.CreateOrganization(t, owner, coderdenttest.CreateOrganizationOptions{
|
||||
IncludeProvisionerDaemon: true,
|
||||
})
|
||||
|
||||
ctx := testutil.Context(t, testutil.WaitShort)
|
||||
//nolint:gocritic // required to make orgs
|
||||
newRole, err := owner.CreateOrganizationRole(ctx, codersdk.Role{
|
||||
Name: "custom",
|
||||
OrganizationID: org.ID.String(),
|
||||
DisplayName: "",
|
||||
SitePermissions: nil,
|
||||
OrganizationPermissions: codersdk.CreatePermissions(map[codersdk.RBACResource][]codersdk.RBACAction{
|
||||
codersdk.ResourceTemplate: {codersdk.ActionRead, codersdk.ActionCreate, codersdk.ActionUpdate},
|
||||
}),
|
||||
UserPermissions: nil,
|
||||
})
|
||||
require.NoError(t, err)
|
||||
|
||||
// Create the new user
|
||||
client, _ := coderdtest.CreateAnotherUser(t, owner, org.ID, rbac.RoleIdentifier{Name: newRole.Name, OrganizationID: org.ID})
|
||||
|
||||
// Create the workspace + agent
|
||||
authToken := uuid.NewString()
|
||||
version := coderdtest.CreateTemplateVersion(t, client, org.ID, &echo.Responses{
|
||||
Parse: echo.ParseComplete,
|
||||
ProvisionPlan: echo.PlanComplete,
|
||||
ProvisionApply: echo.ProvisionApplyWithAgent(authToken),
|
||||
})
|
||||
project := coderdtest.CreateTemplate(t, client, org.ID, version.ID)
|
||||
coderdtest.AwaitTemplateVersionJobCompleted(t, client, version.ID)
|
||||
workspace := coderdtest.CreateWorkspace(t, client, project.ID)
|
||||
coderdtest.AwaitWorkspaceBuildJobCompleted(t, client, workspace.LatestBuild.ID)
|
||||
|
||||
agentClient := agentsdk.New(client.URL)
|
||||
agentClient.SetSessionToken(authToken)
|
||||
|
||||
ctx, cancel := context.WithTimeout(context.Background(), testutil.WaitLong)
|
||||
defer cancel()
|
||||
|
||||
agentKey, err := agentClient.GitSSHKey(ctx)
|
||||
require.NoError(t, err)
|
||||
require.NotEmpty(t, agentKey.PrivateKey)
|
||||
}
|
||||
@@ -109,7 +109,7 @@ func TestWorkspaceBuild(t *testing.T) {
|
||||
|
||||
for _, c := range cases {
|
||||
t.Run(c.Name, func(t *testing.T) {
|
||||
_, err = c.Client.CreateWorkspace(ctx, owner.OrganizationID, codersdk.Me, codersdk.CreateWorkspaceRequest{
|
||||
_, err = c.Client.CreateUserWorkspace(ctx, codersdk.Me, codersdk.CreateWorkspaceRequest{
|
||||
TemplateVersionID: oldVersion.ID,
|
||||
Name: "abc123",
|
||||
AutomaticUpdates: codersdk.AutomaticUpdatesNever,
|
||||
|
||||
@@ -1,7 +1,10 @@
|
||||
package coderd_test
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"database/sql"
|
||||
"fmt"
|
||||
"net/http"
|
||||
"sync/atomic"
|
||||
"testing"
|
||||
@@ -17,8 +20,10 @@ import (
|
||||
"github.com/coder/coder/v2/coderd/autobuild"
|
||||
"github.com/coder/coder/v2/coderd/coderdtest"
|
||||
"github.com/coder/coder/v2/coderd/database"
|
||||
"github.com/coder/coder/v2/coderd/database/dbauthz"
|
||||
"github.com/coder/coder/v2/coderd/database/dbfake"
|
||||
"github.com/coder/coder/v2/coderd/database/dbtestutil"
|
||||
"github.com/coder/coder/v2/coderd/httpmw"
|
||||
"github.com/coder/coder/v2/coderd/notifications"
|
||||
"github.com/coder/coder/v2/coderd/rbac"
|
||||
agplschedule "github.com/coder/coder/v2/coderd/schedule"
|
||||
@@ -31,6 +36,7 @@ import (
|
||||
"github.com/coder/coder/v2/enterprise/coderd/license"
|
||||
"github.com/coder/coder/v2/enterprise/coderd/schedule"
|
||||
"github.com/coder/coder/v2/provisioner/echo"
|
||||
"github.com/coder/coder/v2/provisionersdk"
|
||||
"github.com/coder/coder/v2/testutil"
|
||||
)
|
||||
|
||||
@@ -1176,6 +1182,193 @@ func TestTemplateDoesNotAllowUserAutostop(t *testing.T) {
|
||||
})
|
||||
}
|
||||
|
||||
// TestWorkspaceTagsTerraform tests that a workspace can be created with tags.
|
||||
// This is an end-to-end-style test, meaning that we actually run the
|
||||
// real Terraform provisioner and validate that the workspace is created
|
||||
// successfully. The workspace itself does not specify any resources, and
|
||||
// this is fine.
|
||||
func TestWorkspaceTagsTerraform(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
mainTfTemplate := `
|
||||
terraform {
|
||||
required_providers {
|
||||
coder = {
|
||||
source = "coder/coder"
|
||||
}
|
||||
}
|
||||
}
|
||||
provider "coder" {}
|
||||
data "coder_workspace" "me" {}
|
||||
data "coder_workspace_owner" "me" {}
|
||||
data "coder_parameter" "unrelated" {
|
||||
name = "unrelated"
|
||||
type = "list(string)"
|
||||
default = jsonencode(["a", "b"])
|
||||
}
|
||||
%s
|
||||
`
|
||||
|
||||
for _, tc := range []struct {
|
||||
name string
|
||||
// tags to apply to the external provisioner
|
||||
provisionerTags map[string]string
|
||||
// tags to apply to the create template version request
|
||||
createTemplateVersionRequestTags map[string]string
|
||||
// the coder_workspace_tags bit of main.tf.
|
||||
// you can add more stuff here if you need
|
||||
tfWorkspaceTags string
|
||||
skipCreateWorkspace bool
|
||||
}{
|
||||
{
|
||||
name: "no tags",
|
||||
tfWorkspaceTags: ``,
|
||||
},
|
||||
{
|
||||
name: "empty tags",
|
||||
tfWorkspaceTags: `
|
||||
data "coder_workspace_tags" "tags" {
|
||||
tags = {}
|
||||
}
|
||||
`,
|
||||
},
|
||||
{
|
||||
name: "static tag",
|
||||
provisionerTags: map[string]string{"foo": "bar"},
|
||||
tfWorkspaceTags: `
|
||||
data "coder_workspace_tags" "tags" {
|
||||
tags = {
|
||||
"foo" = "bar"
|
||||
}
|
||||
}`,
|
||||
},
|
||||
{
|
||||
name: "tag variable",
|
||||
provisionerTags: map[string]string{"foo": "bar"},
|
||||
tfWorkspaceTags: `
|
||||
variable "foo" {
|
||||
default = "bar"
|
||||
}
|
||||
data "coder_workspace_tags" "tags" {
|
||||
tags = {
|
||||
"foo" = var.foo
|
||||
}
|
||||
}`,
|
||||
},
|
||||
{
|
||||
name: "tag param",
|
||||
provisionerTags: map[string]string{"foo": "bar"},
|
||||
tfWorkspaceTags: `
|
||||
data "coder_parameter" "foo" {
|
||||
name = "foo"
|
||||
type = "string"
|
||||
default = "bar"
|
||||
}
|
||||
data "coder_workspace_tags" "tags" {
|
||||
tags = {
|
||||
"foo" = data.coder_parameter.foo.value
|
||||
}
|
||||
}`,
|
||||
},
|
||||
{
|
||||
name: "tag param with default from var",
|
||||
provisionerTags: map[string]string{"foo": "bar"},
|
||||
tfWorkspaceTags: `
|
||||
variable "foo" {
|
||||
type = string
|
||||
default = "bar"
|
||||
}
|
||||
data "coder_parameter" "foo" {
|
||||
name = "foo"
|
||||
type = "string"
|
||||
default = var.foo
|
||||
}
|
||||
data "coder_workspace_tags" "tags" {
|
||||
tags = {
|
||||
"foo" = data.coder_parameter.foo.value
|
||||
}
|
||||
}`,
|
||||
},
|
||||
{
|
||||
name: "override no tags",
|
||||
provisionerTags: map[string]string{"foo": "baz"},
|
||||
createTemplateVersionRequestTags: map[string]string{"foo": "baz"},
|
||||
tfWorkspaceTags: ``,
|
||||
},
|
||||
{
|
||||
name: "override empty tags",
|
||||
provisionerTags: map[string]string{"foo": "baz"},
|
||||
createTemplateVersionRequestTags: map[string]string{"foo": "baz"},
|
||||
tfWorkspaceTags: `
|
||||
data "coder_workspace_tags" "tags" {
|
||||
tags = {}
|
||||
}`,
|
||||
},
|
||||
{
|
||||
name: "overrides static tag from request",
|
||||
provisionerTags: map[string]string{"foo": "baz"},
|
||||
createTemplateVersionRequestTags: map[string]string{"foo": "baz"},
|
||||
tfWorkspaceTags: `
|
||||
data "coder_workspace_tags" "tags" {
|
||||
tags = {
|
||||
"foo" = "bar"
|
||||
}
|
||||
}`,
|
||||
// When we go to create the workspace, there won't be any provisioner
|
||||
// matching tag foo=bar.
|
||||
skipCreateWorkspace: true,
|
||||
},
|
||||
} {
|
||||
tc := tc
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
t.Parallel()
|
||||
ctx := testutil.Context(t, testutil.WaitSuperLong)
|
||||
|
||||
client, owner := coderdenttest.New(t, &coderdenttest.Options{
|
||||
Options: &coderdtest.Options{
|
||||
// We intentionally do not run a built-in provisioner daemon here.
|
||||
IncludeProvisionerDaemon: false,
|
||||
},
|
||||
LicenseOptions: &coderdenttest.LicenseOptions{
|
||||
Features: license.Features{
|
||||
codersdk.FeatureExternalProvisionerDaemons: 1,
|
||||
},
|
||||
},
|
||||
})
|
||||
templateAdmin, _ := coderdtest.CreateAnotherUser(t, client, owner.OrganizationID, rbac.RoleTemplateAdmin())
|
||||
member, memberUser := coderdtest.CreateAnotherUser(t, client, owner.OrganizationID)
|
||||
|
||||
_ = coderdenttest.NewExternalProvisionerDaemonTerraform(t, client, owner.OrganizationID, tc.provisionerTags)
|
||||
|
||||
// Creating a template as a template admin must succeed
|
||||
templateFiles := map[string]string{"main.tf": fmt.Sprintf(mainTfTemplate, tc.tfWorkspaceTags)}
|
||||
tarBytes := testutil.CreateTar(t, templateFiles)
|
||||
fi, err := templateAdmin.Upload(ctx, "application/x-tar", bytes.NewReader(tarBytes))
|
||||
require.NoError(t, err, "failed to upload file")
|
||||
tv, err := templateAdmin.CreateTemplateVersion(ctx, owner.OrganizationID, codersdk.CreateTemplateVersionRequest{
|
||||
Name: testutil.GetRandomName(t),
|
||||
FileID: fi.ID,
|
||||
StorageMethod: codersdk.ProvisionerStorageMethodFile,
|
||||
Provisioner: codersdk.ProvisionerTypeTerraform,
|
||||
ProvisionerTags: tc.createTemplateVersionRequestTags,
|
||||
})
|
||||
require.NoError(t, err, "failed to create template version")
|
||||
coderdtest.AwaitTemplateVersionJobCompleted(t, templateAdmin, tv.ID)
|
||||
tpl := coderdtest.CreateTemplate(t, templateAdmin, owner.OrganizationID, tv.ID)
|
||||
|
||||
if !tc.skipCreateWorkspace {
|
||||
// Creating a workspace as a non-privileged user must succeed
|
||||
ws, err := member.CreateUserWorkspace(ctx, memberUser.Username, codersdk.CreateWorkspaceRequest{
|
||||
TemplateID: tpl.ID,
|
||||
Name: coderdtest.RandomUsername(t),
|
||||
})
|
||||
require.NoError(t, err, "failed to create workspace")
|
||||
coderdtest.AwaitWorkspaceBuildJobCompleted(t, member, ws.LatestBuild.ID)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// Blocked by autostart requirements
|
||||
func TestExecutorAutostartBlocked(t *testing.T) {
|
||||
t.Parallel()
|
||||
@@ -1522,6 +1715,214 @@ func TestAdminViewAllWorkspaces(t *testing.T) {
|
||||
require.Equal(t, 0, len(memberViewWorkspaces.Workspaces), "member in other org should see 0 workspaces")
|
||||
}
|
||||
|
||||
func TestWorkspaceByOwnerAndName(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
t.Run("Matching Provisioner", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
ctx, cancel := context.WithTimeout(context.Background(), testutil.WaitLong)
|
||||
defer cancel()
|
||||
|
||||
client, db, userResponse := coderdenttest.NewWithDatabase(t, &coderdenttest.Options{
|
||||
LicenseOptions: &coderdenttest.LicenseOptions{
|
||||
Features: license.Features{
|
||||
codersdk.FeatureExternalProvisionerDaemons: 1,
|
||||
},
|
||||
},
|
||||
})
|
||||
userSubject, _, err := httpmw.UserRBACSubject(ctx, db, userResponse.UserID, rbac.ExpandableScope(rbac.ScopeAll))
|
||||
require.NoError(t, err)
|
||||
user, err := client.User(ctx, userSubject.ID)
|
||||
require.NoError(t, err)
|
||||
username := user.Username
|
||||
|
||||
_ = coderdenttest.NewExternalProvisionerDaemon(t, client, userResponse.OrganizationID, map[string]string{
|
||||
provisionersdk.TagScope: provisionersdk.ScopeOrganization,
|
||||
})
|
||||
|
||||
version := coderdtest.CreateTemplateVersion(t, client, userResponse.OrganizationID, nil)
|
||||
coderdtest.AwaitTemplateVersionJobCompleted(t, client, version.ID)
|
||||
template := coderdtest.CreateTemplate(t, client, userResponse.OrganizationID, version.ID)
|
||||
workspace := coderdtest.CreateWorkspace(t, client, template.ID)
|
||||
|
||||
// Pending builds should show matching provisioners
|
||||
require.Equal(t, workspace.LatestBuild.Status, codersdk.WorkspaceStatusPending)
|
||||
require.Equal(t, workspace.LatestBuild.MatchedProvisioners.Count, 1)
|
||||
require.Equal(t, workspace.LatestBuild.MatchedProvisioners.Available, 1)
|
||||
|
||||
// Completed builds should not show matching provisioners, because no provisioner daemon can
|
||||
// be eligible to process a job that is already completed.
|
||||
completedBuild := coderdtest.AwaitWorkspaceBuildJobCompleted(t, client, workspace.LatestBuild.ID)
|
||||
require.Equal(t, completedBuild.Status, codersdk.WorkspaceStatusRunning)
|
||||
require.Equal(t, completedBuild.MatchedProvisioners.Count, 0)
|
||||
require.Equal(t, completedBuild.MatchedProvisioners.Available, 0)
|
||||
|
||||
ws, err := client.WorkspaceByOwnerAndName(ctx, username, workspace.Name, codersdk.WorkspaceOptions{})
|
||||
require.NoError(t, err)
|
||||
|
||||
// Verify the workspace details
|
||||
require.Equal(t, workspace.ID, ws.ID)
|
||||
require.Equal(t, workspace.Name, ws.Name)
|
||||
require.Equal(t, workspace.TemplateID, ws.TemplateID)
|
||||
require.Equal(t, completedBuild.Status, ws.LatestBuild.Status)
|
||||
require.Equal(t, ws.LatestBuild.MatchedProvisioners.Count, 0)
|
||||
require.Equal(t, ws.LatestBuild.MatchedProvisioners.Available, 0)
|
||||
|
||||
// Verify that the provisioner daemon is registered in the database
|
||||
//nolint:gocritic // unit testing
|
||||
daemons, err := db.GetProvisionerDaemons(dbauthz.AsSystemRestricted(ctx))
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, 1, len(daemons))
|
||||
require.Equal(t, provisionersdk.ScopeOrganization, daemons[0].Tags[provisionersdk.TagScope])
|
||||
})
|
||||
|
||||
t.Run("No Matching Provisioner", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
ctx, cancel := context.WithTimeout(context.Background(), testutil.WaitLong)
|
||||
defer cancel()
|
||||
|
||||
client, db, userResponse := coderdenttest.NewWithDatabase(t, &coderdenttest.Options{
|
||||
LicenseOptions: &coderdenttest.LicenseOptions{
|
||||
Features: license.Features{
|
||||
codersdk.FeatureExternalProvisionerDaemons: 1,
|
||||
},
|
||||
},
|
||||
})
|
||||
userSubject, _, err := httpmw.UserRBACSubject(ctx, db, userResponse.UserID, rbac.ExpandableScope(rbac.ScopeAll))
|
||||
require.NoError(t, err)
|
||||
user, err := client.User(ctx, userSubject.ID)
|
||||
require.NoError(t, err)
|
||||
username := user.Username
|
||||
|
||||
closer := coderdenttest.NewExternalProvisionerDaemon(t, client, userResponse.OrganizationID, map[string]string{
|
||||
provisionersdk.TagScope: provisionersdk.ScopeOrganization,
|
||||
})
|
||||
|
||||
version := coderdtest.CreateTemplateVersion(t, client, userResponse.OrganizationID, nil)
|
||||
coderdtest.AwaitTemplateVersionJobCompleted(t, client, version.ID)
|
||||
template := coderdtest.CreateTemplate(t, client, userResponse.OrganizationID, version.ID)
|
||||
|
||||
// nolint:gocritic // unit testing
|
||||
daemons, err := db.GetProvisionerDaemons(dbauthz.AsSystemRestricted(ctx))
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, len(daemons), 1)
|
||||
|
||||
// Simulate a provisioner daemon failure:
|
||||
err = closer.Close()
|
||||
require.NoError(t, err)
|
||||
|
||||
// Simulate it's subsequent deletion from the database:
|
||||
|
||||
// nolint:gocritic // unit testing
|
||||
_, err = db.UpsertProvisionerDaemon(dbauthz.AsSystemRestricted(ctx), database.UpsertProvisionerDaemonParams{
|
||||
Name: daemons[0].Name,
|
||||
OrganizationID: daemons[0].OrganizationID,
|
||||
Tags: daemons[0].Tags,
|
||||
Provisioners: daemons[0].Provisioners,
|
||||
Version: daemons[0].Version,
|
||||
APIVersion: daemons[0].APIVersion,
|
||||
KeyID: daemons[0].KeyID,
|
||||
// Simulate the passing of time such that the provisioner daemon is considered stale
|
||||
// and will be deleted:
|
||||
CreatedAt: time.Now().Add(-time.Hour * 24 * 8),
|
||||
LastSeenAt: sql.NullTime{
|
||||
Time: time.Now().Add(-time.Hour * 24 * 8),
|
||||
Valid: true,
|
||||
},
|
||||
})
|
||||
require.NoError(t, err)
|
||||
// nolint:gocritic // unit testing
|
||||
err = db.DeleteOldProvisionerDaemons(dbauthz.AsSystemRestricted(ctx))
|
||||
require.NoError(t, err)
|
||||
|
||||
// Create a workspace that will not be able to provision due to a lack of provisioner daemons:
|
||||
workspace := coderdtest.CreateWorkspace(t, client, template.ID)
|
||||
|
||||
require.Equal(t, workspace.LatestBuild.Status, codersdk.WorkspaceStatusPending)
|
||||
require.Equal(t, workspace.LatestBuild.MatchedProvisioners.Count, 0)
|
||||
require.Equal(t, workspace.LatestBuild.MatchedProvisioners.Available, 0)
|
||||
|
||||
// nolint:gocritic // unit testing
|
||||
_, err = client.WorkspaceByOwnerAndName(dbauthz.As(ctx, userSubject), username, workspace.Name, codersdk.WorkspaceOptions{})
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, workspace.LatestBuild.Status, codersdk.WorkspaceStatusPending)
|
||||
require.Equal(t, workspace.LatestBuild.MatchedProvisioners.Count, 0)
|
||||
require.Equal(t, workspace.LatestBuild.MatchedProvisioners.Available, 0)
|
||||
})
|
||||
|
||||
t.Run("Unavailable Provisioner", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
ctx, cancel := context.WithTimeout(context.Background(), testutil.WaitLong)
|
||||
defer cancel()
|
||||
|
||||
client, db, userResponse := coderdenttest.NewWithDatabase(t, &coderdenttest.Options{
|
||||
LicenseOptions: &coderdenttest.LicenseOptions{
|
||||
Features: license.Features{
|
||||
codersdk.FeatureExternalProvisionerDaemons: 1,
|
||||
},
|
||||
},
|
||||
})
|
||||
userSubject, _, err := httpmw.UserRBACSubject(ctx, db, userResponse.UserID, rbac.ExpandableScope(rbac.ScopeAll))
|
||||
require.NoError(t, err)
|
||||
user, err := client.User(ctx, userSubject.ID)
|
||||
require.NoError(t, err)
|
||||
username := user.Username
|
||||
|
||||
closer := coderdenttest.NewExternalProvisionerDaemon(t, client, userResponse.OrganizationID, map[string]string{
|
||||
provisionersdk.TagScope: provisionersdk.ScopeOrganization,
|
||||
})
|
||||
|
||||
version := coderdtest.CreateTemplateVersion(t, client, userResponse.OrganizationID, nil)
|
||||
coderdtest.AwaitTemplateVersionJobCompleted(t, client, version.ID)
|
||||
template := coderdtest.CreateTemplate(t, client, userResponse.OrganizationID, version.ID)
|
||||
|
||||
// nolint:gocritic // unit testing
|
||||
daemons, err := db.GetProvisionerDaemons(dbauthz.AsSystemRestricted(ctx))
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, len(daemons), 1)
|
||||
|
||||
// Simulate a provisioner daemon failure:
|
||||
err = closer.Close()
|
||||
require.NoError(t, err)
|
||||
|
||||
// nolint:gocritic // unit testing
|
||||
_, err = db.UpsertProvisionerDaemon(dbauthz.AsSystemRestricted(ctx), database.UpsertProvisionerDaemonParams{
|
||||
Name: daemons[0].Name,
|
||||
OrganizationID: daemons[0].OrganizationID,
|
||||
Tags: daemons[0].Tags,
|
||||
Provisioners: daemons[0].Provisioners,
|
||||
Version: daemons[0].Version,
|
||||
APIVersion: daemons[0].APIVersion,
|
||||
KeyID: daemons[0].KeyID,
|
||||
// Simulate the passing of time such that the provisioner daemon, though not stale, has been
|
||||
// has been inactive for a while:
|
||||
CreatedAt: time.Now().Add(-time.Hour * 24 * 2),
|
||||
LastSeenAt: sql.NullTime{
|
||||
Time: time.Now().Add(-time.Hour * 24 * 2),
|
||||
Valid: true,
|
||||
},
|
||||
})
|
||||
require.NoError(t, err)
|
||||
|
||||
// Create a workspace that will not be able to provision due to a lack of provisioner daemons:
|
||||
workspace := coderdtest.CreateWorkspace(t, client, template.ID)
|
||||
|
||||
require.Equal(t, workspace.LatestBuild.Status, codersdk.WorkspaceStatusPending)
|
||||
require.Equal(t, workspace.LatestBuild.MatchedProvisioners.Count, 1)
|
||||
require.Equal(t, workspace.LatestBuild.MatchedProvisioners.Available, 0)
|
||||
|
||||
// nolint:gocritic // unit testing
|
||||
_, err = client.WorkspaceByOwnerAndName(dbauthz.As(ctx, userSubject), username, workspace.Name, codersdk.WorkspaceOptions{})
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, workspace.LatestBuild.Status, codersdk.WorkspaceStatusPending)
|
||||
require.Equal(t, workspace.LatestBuild.MatchedProvisioners.Count, 1)
|
||||
require.Equal(t, workspace.LatestBuild.MatchedProvisioners.Available, 0)
|
||||
})
|
||||
}
|
||||
|
||||
func must[T any](value T, err error) T {
|
||||
if err != nil {
|
||||
panic(err)
|
||||
|
||||
@@ -3,6 +3,8 @@ package dbcrypt
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/base64"
|
||||
"os"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/require"
|
||||
@@ -89,3 +91,35 @@ func TestCiphersBackwardCompatibility(t *testing.T) {
|
||||
require.NoError(t, err, "decryption should succeed")
|
||||
require.Equal(t, msg, string(decrypted), "decrypted message should match original message")
|
||||
}
|
||||
|
||||
// If you're looking here, you're probably in trouble.
|
||||
// Here's what you need to do:
|
||||
// 1. Get the current CODER_EXTERNAL_TOKEN_ENCRYPTION_KEYS environment variable.
|
||||
// 2. Run the following command:
|
||||
// ENCRYPT_ME="<value to encrypt>" CODER_EXTERNAL_TOKEN_ENCRYPTION_KEYS="<secret keys here>" go test -v -count=1 ./enterprise/dbcrypt -test.run='^TestHelpMeEncryptSomeValue$'
|
||||
// 3. Copy the value from the test output and do what you need with it.
|
||||
func TestHelpMeEncryptSomeValue(t *testing.T) {
|
||||
t.Parallel()
|
||||
t.Skip("this only exists if you need to encrypt a value with dbcrypt, it does not actually test anything")
|
||||
|
||||
valueToEncrypt := os.Getenv("ENCRYPT_ME")
|
||||
t.Logf("valueToEncrypt: %q", valueToEncrypt)
|
||||
keys := os.Getenv("CODER_EXTERNAL_TOKEN_ENCRYPTION_KEYS")
|
||||
require.NotEmpty(t, keys, "Set the CODER_EXTERNAL_TOKEN_ENCRYPTION_KEYS environment variable to use this")
|
||||
|
||||
base64Keys := strings.Split(keys, ",")
|
||||
activeKey := base64Keys[0]
|
||||
|
||||
decodedKey, err := base64.StdEncoding.DecodeString(activeKey)
|
||||
require.NoError(t, err, "the active key should be valid base64")
|
||||
|
||||
cipher, err := cipherAES256(decodedKey)
|
||||
require.NoError(t, err)
|
||||
|
||||
t.Logf("cipher digest: %+v", cipher.HexDigest())
|
||||
|
||||
encryptedEmptyString, err := cipher.Encrypt([]byte(valueToEncrypt))
|
||||
require.NoError(t, err)
|
||||
|
||||
t.Logf("encrypted and base64-encoded: %q", base64.StdEncoding.EncodeToString(encryptedEmptyString))
|
||||
}
|
||||
|
||||
@@ -261,6 +261,21 @@ func (db *dbCrypt) UpdateExternalAuthLink(ctx context.Context, params database.U
|
||||
return link, nil
|
||||
}
|
||||
|
||||
func (db *dbCrypt) UpdateExternalAuthLinkRefreshToken(ctx context.Context, params database.UpdateExternalAuthLinkRefreshTokenParams) error {
|
||||
// We would normally use a sql.NullString here, but sqlc does not want to make
|
||||
// a params struct with a nullable string.
|
||||
var digest sql.NullString
|
||||
if params.OAuthRefreshTokenKeyID != "" {
|
||||
digest.String = params.OAuthRefreshTokenKeyID
|
||||
digest.Valid = true
|
||||
}
|
||||
if err := db.encryptField(¶ms.OAuthRefreshToken, &digest); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return db.Store.UpdateExternalAuthLinkRefreshToken(ctx, params)
|
||||
}
|
||||
|
||||
func (db *dbCrypt) GetCryptoKeys(ctx context.Context) ([]database.CryptoKey, error) {
|
||||
keys, err := db.Store.GetCryptoKeys(ctx)
|
||||
if err != nil {
|
||||
|
||||
@@ -17,6 +17,7 @@ import (
|
||||
"github.com/coder/coder/v2/coderd/database/dbgen"
|
||||
"github.com/coder/coder/v2/coderd/database/dbmock"
|
||||
"github.com/coder/coder/v2/coderd/database/dbtestutil"
|
||||
"github.com/coder/coder/v2/coderd/database/dbtime"
|
||||
)
|
||||
|
||||
func TestUserLinks(t *testing.T) {
|
||||
@@ -96,6 +97,31 @@ func TestUserLinks(t *testing.T) {
|
||||
require.EqualValues(t, expectedClaims, rawLink.Claims)
|
||||
})
|
||||
|
||||
t.Run("UpdateExternalAuthLinkRefreshToken", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
db, crypt, ciphers := setup(t)
|
||||
user := dbgen.User(t, crypt, database.User{})
|
||||
link := dbgen.ExternalAuthLink(t, crypt, database.ExternalAuthLink{
|
||||
UserID: user.ID,
|
||||
})
|
||||
|
||||
err := crypt.UpdateExternalAuthLinkRefreshToken(ctx, database.UpdateExternalAuthLinkRefreshTokenParams{
|
||||
OAuthRefreshToken: "",
|
||||
OAuthRefreshTokenKeyID: link.OAuthRefreshTokenKeyID.String,
|
||||
UpdatedAt: dbtime.Now(),
|
||||
ProviderID: link.ProviderID,
|
||||
UserID: link.UserID,
|
||||
})
|
||||
require.NoError(t, err)
|
||||
|
||||
rawLink, err := db.GetExternalAuthLink(ctx, database.GetExternalAuthLinkParams{
|
||||
ProviderID: link.ProviderID,
|
||||
UserID: link.UserID,
|
||||
})
|
||||
require.NoError(t, err)
|
||||
requireEncryptedEquals(t, ciphers[0], rawLink.OAuthRefreshToken, "")
|
||||
})
|
||||
|
||||
t.Run("GetUserLinkByLinkedID", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
t.Run("OK", func(t *testing.T) {
|
||||
|
||||
@@ -3,6 +3,9 @@ terraform {
|
||||
coder = {
|
||||
source = "coder/coder"
|
||||
}
|
||||
cloudinit = {
|
||||
source = "hashicorp/cloudinit"
|
||||
}
|
||||
aws = {
|
||||
source = "hashicorp/aws"
|
||||
}
|
||||
|
||||
@@ -184,7 +184,7 @@ resource "docker_container" "workspace" {
|
||||
ip = "host-gateway"
|
||||
}
|
||||
volumes {
|
||||
container_path = "/home/${local.username}"
|
||||
container_path = "/home/coder"
|
||||
volume_name = docker_volume.home_volume.name
|
||||
read_only = false
|
||||
}
|
||||
|
||||
@@ -174,15 +174,15 @@ require (
|
||||
go.uber.org/atomic v1.11.0
|
||||
go.uber.org/goleak v1.3.1-0.20240429205332-517bace7cc29
|
||||
go4.org/netipx v0.0.0-20230728180743-ad4cb58a6516
|
||||
golang.org/x/crypto v0.29.0
|
||||
golang.org/x/crypto v0.31.0
|
||||
golang.org/x/exp v0.0.0-20240808152545-0cdaa3abc0fa
|
||||
golang.org/x/mod v0.22.0
|
||||
golang.org/x/net v0.31.0
|
||||
golang.org/x/oauth2 v0.24.0
|
||||
golang.org/x/sync v0.9.0
|
||||
golang.org/x/sys v0.27.0
|
||||
golang.org/x/term v0.26.0
|
||||
golang.org/x/text v0.20.0
|
||||
golang.org/x/sync v0.10.0
|
||||
golang.org/x/sys v0.28.0
|
||||
golang.org/x/term v0.27.0
|
||||
golang.org/x/text v0.21.0
|
||||
golang.org/x/tools v0.27.0
|
||||
golang.org/x/xerrors v0.0.0-20240903120638-7835f813f4da
|
||||
google.golang.org/api v0.209.0
|
||||
|
||||
@@ -1058,8 +1058,8 @@ golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5y
|
||||
golang.org/x/crypto v0.1.0/go.mod h1:RecgLatLF4+eUMCP1PoPZQb+cVrJcOPbHkTkbkB9sbw=
|
||||
golang.org/x/crypto v0.12.0/go.mod h1:NF0Gs7EO5K4qLn+Ylc+fih8BSTeIjAP05siRnAh98yw=
|
||||
golang.org/x/crypto v0.19.0/go.mod h1:Iy9bg/ha4yyC70EfRS8jz+B6ybOBKMaSxLj6P6oBDfU=
|
||||
golang.org/x/crypto v0.29.0 h1:L5SG1JTTXupVV3n6sUqMTeWbjAyfPwoda2DLX8J8FrQ=
|
||||
golang.org/x/crypto v0.29.0/go.mod h1:+F4F4N5hv6v38hfeYwTdx20oUvLLc+QfrE9Ax9HtgRg=
|
||||
golang.org/x/crypto v0.31.0 h1:ihbySMvVjLAeSH1IbfcRTkD/iNscyz8rGzjF/E5hV6U=
|
||||
golang.org/x/crypto v0.31.0/go.mod h1:kDsLvtWBEx7MV9tJOj9bnXsPbxwJQ6csT/x4KIN4Ssk=
|
||||
golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
|
||||
golang.org/x/exp v0.0.0-20240808152545-0cdaa3abc0fa h1:ELnwvuAXPNtPk1TJRuGkI9fDTwym6AYBu0qzT8AcHdI=
|
||||
golang.org/x/exp v0.0.0-20240808152545-0cdaa3abc0fa/go.mod h1:akd2r19cwCdwSwWeIdzYQGa/EZZyqcOdwWiwj5L5eKQ=
|
||||
@@ -1106,8 +1106,8 @@ golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJ
|
||||
golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.9.0 h1:fEo0HyrW1GIgZdpbhCRO0PkJajUS5H9IFUztCgEo2jQ=
|
||||
golang.org/x/sync v0.9.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
|
||||
golang.org/x/sync v0.10.0 h1:3NQrjDixjgGwUOCaF8w2+VYHv0Ve/vGYSbdkTa98gmQ=
|
||||
golang.org/x/sync v0.10.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
|
||||
golang.org/x/sys v0.0.0-20180823144017-11551d06cbcc/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
@@ -1149,8 +1149,8 @@ golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.11.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||
golang.org/x/sys v0.26.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||
golang.org/x/sys v0.27.0 h1:wBqf8DvsY9Y/2P8gAfPDEYNuS30J4lPHJxXSb/nJZ+s=
|
||||
golang.org/x/sys v0.27.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||
golang.org/x/sys v0.28.0 h1:Fksou7UEQUWlKvIdsqzJmUmCX3cZuD2+P3XyyzwMhlA=
|
||||
golang.org/x/sys v0.28.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
|
||||
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
|
||||
golang.org/x/term v0.1.0/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
|
||||
@@ -1158,8 +1158,8 @@ golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k=
|
||||
golang.org/x/term v0.8.0/go.mod h1:xPskH00ivmX89bAKVGSKKtLOWNx2+17Eiy94tnKShWo=
|
||||
golang.org/x/term v0.11.0/go.mod h1:zC9APTIj3jG3FdV/Ons+XE1riIZXG4aZ4GTHiPZJPIU=
|
||||
golang.org/x/term v0.17.0/go.mod h1:lLRBjIVuehSbZlaOtGMbcMncT+aqLLLmKrsjNrUguwk=
|
||||
golang.org/x/term v0.26.0 h1:WEQa6V3Gja/BhNxg540hBip/kkaYtRg3cxg4oXSw4AU=
|
||||
golang.org/x/term v0.26.0/go.mod h1:Si5m1o57C5nBNQo5z1iq+XDijt21BDBDp2bK0QI8e3E=
|
||||
golang.org/x/term v0.27.0 h1:WP60Sv1nlK1T6SupCHbXzSaN0b9wUmsPoRS9b61A23Q=
|
||||
golang.org/x/term v0.27.0/go.mod h1:iMsnZpn0cago0GOrHO2+Y7u7JPn5AylBrcoWkElMTSM=
|
||||
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||
golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk=
|
||||
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
||||
@@ -1170,8 +1170,8 @@ golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
|
||||
golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8=
|
||||
golang.org/x/text v0.12.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE=
|
||||
golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
|
||||
golang.org/x/text v0.20.0 h1:gK/Kv2otX8gz+wn7Rmb3vT96ZwuoxnQlY+HlJVj7Qug=
|
||||
golang.org/x/text v0.20.0/go.mod h1:D4IsuqiFMhST5bX19pQ9ikHC2GsaKyk/oF+pn3ducp4=
|
||||
golang.org/x/text v0.21.0 h1:zyQAAkrwaneQ066sspRyJaG9VNi/YJ1NfzcGB3hZ/qo=
|
||||
golang.org/x/text v0.21.0/go.mod h1:4IBbMaMmOPCJ8SecivzSH54+73PCFmPWxNTLm+vZkEQ=
|
||||
golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
|
||||
golang.org/x/time v0.8.0 h1:9i3RxcPv3PZnitoVGMPDKZSq1xW1gK1Xy3ArNOGZfEg=
|
||||
golang.org/x/time v0.8.0/go.mod h1:3BpzKBy/shNhVucY/MWOyx10tF3SFh9QdLuxbVysPQM=
|
||||
|
||||
@@ -26,7 +26,7 @@ func (s *server) Parse(sess *provisionersdk.Session, _ *proto.ParseRequest, _ <-
|
||||
return provisionersdk.ParseErrorf("load module: %s", formatDiagnostics(sess.WorkDirectory, diags))
|
||||
}
|
||||
|
||||
workspaceTags, err := parser.WorkspaceTags(ctx)
|
||||
workspaceTags, _, err := parser.WorkspaceTags(ctx)
|
||||
if err != nil {
|
||||
return provisionersdk.ParseErrorf("can't load workspace tags: %v", err)
|
||||
}
|
||||
|
||||
@@ -80,10 +80,12 @@ func New(workdir string, opts ...Option) (*Parser, tfconfig.Diagnostics) {
|
||||
}
|
||||
|
||||
// WorkspaceTags looks for all coder_workspace_tags datasource in the module
|
||||
// and returns the raw values for the tags. Use
|
||||
func (p *Parser) WorkspaceTags(ctx context.Context) (map[string]string, error) {
|
||||
// and returns the raw values for the tags. It also returns the set of
|
||||
// variables referenced by any expressions in the raw values of tags.
|
||||
func (p *Parser) WorkspaceTags(ctx context.Context) (map[string]string, map[string]struct{}, error) {
|
||||
tags := map[string]string{}
|
||||
var skipped []string
|
||||
skipped := []string{}
|
||||
requiredVars := map[string]struct{}{}
|
||||
for _, dataResource := range p.module.DataResources {
|
||||
if dataResource.Type != "coder_workspace_tags" {
|
||||
skipped = append(skipped, strings.Join([]string{"data", dataResource.Type, dataResource.Name}, "."))
|
||||
@@ -99,13 +101,13 @@ func (p *Parser) WorkspaceTags(ctx context.Context) (map[string]string, error) {
|
||||
// We know in which HCL file is the data resource defined.
|
||||
file, diags = p.underlying.ParseHCLFile(dataResource.Pos.Filename)
|
||||
if diags.HasErrors() {
|
||||
return nil, xerrors.Errorf("can't parse the resource file: %s", diags.Error())
|
||||
return nil, nil, xerrors.Errorf("can't parse the resource file: %s", diags.Error())
|
||||
}
|
||||
|
||||
// Parse root to find "coder_workspace_tags".
|
||||
content, _, diags := file.Body.PartialContent(rootTemplateSchema)
|
||||
if diags.HasErrors() {
|
||||
return nil, xerrors.Errorf("can't parse the resource file: %s", diags.Error())
|
||||
return nil, nil, xerrors.Errorf("can't parse the resource file: %s", diags.Error())
|
||||
}
|
||||
|
||||
// Iterate over blocks to locate the exact "coder_workspace_tags" data resource.
|
||||
@@ -117,7 +119,7 @@ func (p *Parser) WorkspaceTags(ctx context.Context) (map[string]string, error) {
|
||||
// Parse "coder_workspace_tags" to find all key-value tags.
|
||||
resContent, _, diags := block.Body.PartialContent(coderWorkspaceTagsSchema)
|
||||
if diags.HasErrors() {
|
||||
return nil, xerrors.Errorf(`can't parse the resource coder_workspace_tags: %s`, diags.Error())
|
||||
return nil, nil, xerrors.Errorf(`can't parse the resource coder_workspace_tags: %s`, diags.Error())
|
||||
}
|
||||
|
||||
if resContent == nil {
|
||||
@@ -125,54 +127,106 @@ func (p *Parser) WorkspaceTags(ctx context.Context) (map[string]string, error) {
|
||||
}
|
||||
|
||||
if _, ok := resContent.Attributes["tags"]; !ok {
|
||||
return nil, xerrors.Errorf(`"tags" attribute is required by coder_workspace_tags`)
|
||||
return nil, nil, xerrors.Errorf(`"tags" attribute is required by coder_workspace_tags`)
|
||||
}
|
||||
|
||||
expr := resContent.Attributes["tags"].Expr
|
||||
tagsExpr, ok := expr.(*hclsyntax.ObjectConsExpr)
|
||||
if !ok {
|
||||
return nil, xerrors.Errorf(`"tags" attribute is expected to be a key-value map`)
|
||||
return nil, nil, xerrors.Errorf(`"tags" attribute is expected to be a key-value map`)
|
||||
}
|
||||
|
||||
// Parse key-value entries in "coder_workspace_tags"
|
||||
for _, tagItem := range tagsExpr.Items {
|
||||
key, err := previewFileContent(tagItem.KeyExpr.Range())
|
||||
if err != nil {
|
||||
return nil, xerrors.Errorf("can't preview the resource file: %v", err)
|
||||
return nil, nil, xerrors.Errorf("can't preview the resource file: %v", err)
|
||||
}
|
||||
key = strings.Trim(key, `"`)
|
||||
|
||||
value, err := previewFileContent(tagItem.ValueExpr.Range())
|
||||
if err != nil {
|
||||
return nil, xerrors.Errorf("can't preview the resource file: %v", err)
|
||||
return nil, nil, xerrors.Errorf("can't preview the resource file: %v", err)
|
||||
}
|
||||
|
||||
if _, ok := tags[key]; ok {
|
||||
return nil, xerrors.Errorf(`workspace tag %q is defined multiple times`, key)
|
||||
return nil, nil, xerrors.Errorf(`workspace tag %q is defined multiple times`, key)
|
||||
}
|
||||
tags[key] = value
|
||||
|
||||
// Find values referenced by the expression.
|
||||
refVars := referencedVariablesExpr(tagItem.ValueExpr)
|
||||
for _, refVar := range refVars {
|
||||
requiredVars[refVar] = struct{}{}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
p.logger.Debug(ctx, "found workspace tags", slog.F("tags", maps.Keys(tags)), slog.F("skipped", skipped))
|
||||
return tags, nil
|
||||
|
||||
requiredVarNames := maps.Keys(requiredVars)
|
||||
slices.Sort(requiredVarNames)
|
||||
p.logger.Debug(ctx, "found workspace tags", slog.F("tags", maps.Keys(tags)), slog.F("skipped", skipped), slog.F("required_vars", requiredVarNames))
|
||||
return tags, requiredVars, nil
|
||||
}
|
||||
|
||||
// referencedVariablesExpr determines the variables referenced in expr
|
||||
// and returns the names of those variables.
|
||||
func referencedVariablesExpr(expr hclsyntax.Expression) (names []string) {
|
||||
var parts []string
|
||||
for _, expVar := range expr.Variables() {
|
||||
for _, tr := range expVar {
|
||||
switch v := tr.(type) {
|
||||
case hcl.TraverseRoot:
|
||||
parts = append(parts, v.Name)
|
||||
case hcl.TraverseAttr:
|
||||
parts = append(parts, v.Name)
|
||||
default: // skip
|
||||
}
|
||||
}
|
||||
|
||||
cleaned := cleanupTraversalName(parts)
|
||||
names = append(names, strings.Join(cleaned, "."))
|
||||
}
|
||||
return names
|
||||
}
|
||||
|
||||
// cleanupTraversalName chops off extraneous pieces of the traversal.
|
||||
// for example:
|
||||
// - var.foo -> unchanged
|
||||
// - data.coder_parameter.bar.value -> data.coder_parameter.bar
|
||||
// - null_resource.baz.zap -> null_resource.baz
|
||||
func cleanupTraversalName(parts []string) []string {
|
||||
if len(parts) == 0 {
|
||||
return parts
|
||||
}
|
||||
if len(parts) > 3 && parts[0] == "data" {
|
||||
return parts[:3]
|
||||
}
|
||||
if len(parts) > 2 {
|
||||
return parts[:2]
|
||||
}
|
||||
return parts
|
||||
}
|
||||
|
||||
func (p *Parser) WorkspaceTagDefaults(ctx context.Context) (map[string]string, error) {
|
||||
// This only gets us the expressions. We need to evaluate them.
|
||||
// Example: var.region -> "us"
|
||||
tags, err := p.WorkspaceTags(ctx)
|
||||
tags, requiredVars, err := p.WorkspaceTags(ctx)
|
||||
if err != nil {
|
||||
return nil, xerrors.Errorf("extract workspace tags: %w", err)
|
||||
}
|
||||
|
||||
if len(tags) == 0 {
|
||||
return map[string]string{}, nil
|
||||
}
|
||||
|
||||
// To evaluate the expressions, we need to load the default values for
|
||||
// variables and parameters.
|
||||
varsDefaults, err := p.VariableDefaults(ctx)
|
||||
if err != nil {
|
||||
return nil, xerrors.Errorf("load variable defaults: %w", err)
|
||||
}
|
||||
paramsDefaults, err := p.CoderParameterDefaults(ctx)
|
||||
paramsDefaults, err := p.CoderParameterDefaults(ctx, varsDefaults, requiredVars)
|
||||
if err != nil {
|
||||
return nil, xerrors.Errorf("load parameter defaults: %w", err)
|
||||
}
|
||||
@@ -247,10 +301,10 @@ func WriteArchive(bs []byte, mimetype string, path string) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
// VariableDefaults returns the default values for all variables passed to it.
|
||||
// VariableDefaults returns the default values for all variables in the module.
|
||||
func (p *Parser) VariableDefaults(ctx context.Context) (map[string]string, error) {
|
||||
// iterate through vars to get the default values for all
|
||||
// variables.
|
||||
// required variables.
|
||||
m := make(map[string]string)
|
||||
for _, v := range p.module.Variables {
|
||||
if v == nil {
|
||||
@@ -268,7 +322,7 @@ func (p *Parser) VariableDefaults(ctx context.Context) (map[string]string, error
|
||||
|
||||
// CoderParameterDefaults returns the default values of all coder_parameter data sources
|
||||
// in the parsed module.
|
||||
func (p *Parser) CoderParameterDefaults(ctx context.Context) (map[string]string, error) {
|
||||
func (p *Parser) CoderParameterDefaults(ctx context.Context, varsDefaults map[string]string, names map[string]struct{}) (map[string]string, error) {
|
||||
defaultsM := make(map[string]string)
|
||||
var (
|
||||
skipped []string
|
||||
@@ -281,12 +335,18 @@ func (p *Parser) CoderParameterDefaults(ctx context.Context) (map[string]string,
|
||||
continue
|
||||
}
|
||||
|
||||
if dataResource.Type != "coder_parameter" {
|
||||
skipped = append(skipped, strings.Join([]string{"data", dataResource.Type, dataResource.Name}, "."))
|
||||
if !strings.HasSuffix(dataResource.Pos.Filename, ".tf") {
|
||||
continue
|
||||
}
|
||||
|
||||
if !strings.HasSuffix(dataResource.Pos.Filename, ".tf") {
|
||||
needle := strings.Join([]string{"data", dataResource.Type, dataResource.Name}, ".")
|
||||
if dataResource.Type != "coder_parameter" {
|
||||
skipped = append(skipped, needle)
|
||||
continue
|
||||
}
|
||||
|
||||
if _, found := names[needle]; !found {
|
||||
skipped = append(skipped, needle)
|
||||
continue
|
||||
}
|
||||
|
||||
@@ -316,6 +376,7 @@ func (p *Parser) CoderParameterDefaults(ctx context.Context) (map[string]string,
|
||||
}
|
||||
|
||||
if _, ok := resContent.Attributes["default"]; !ok {
|
||||
p.logger.Warn(ctx, "coder_parameter data source does not have a default value", slog.F("name", dataResource.Name))
|
||||
defaultsM[dataResource.Name] = ""
|
||||
} else {
|
||||
expr := resContent.Attributes["default"].Expr
|
||||
@@ -323,7 +384,20 @@ func (p *Parser) CoderParameterDefaults(ctx context.Context) (map[string]string,
|
||||
if err != nil {
|
||||
return nil, xerrors.Errorf("can't preview the resource file: %v", err)
|
||||
}
|
||||
defaultsM[dataResource.Name] = strings.Trim(value, `"`)
|
||||
// Issue #15795: the "default" value could also be an expression we need
|
||||
// to evaluate.
|
||||
// TODO: should we support coder_parameter default values that reference other coder_parameter data sources?
|
||||
evalCtx := BuildEvalContext(varsDefaults, nil)
|
||||
val, diags := expr.Value(evalCtx)
|
||||
if diags.HasErrors() {
|
||||
return nil, xerrors.Errorf("failed to evaluate coder_parameter %q default value %q: %s", dataResource.Name, value, diags.Error())
|
||||
}
|
||||
// Do not use "val.AsString()" as it can panic
|
||||
strVal, err := CtyValueString(val)
|
||||
if err != nil {
|
||||
return nil, xerrors.Errorf("failed to marshal coder_parameter %q default value %q as string: %s", dataResource.Name, value, err)
|
||||
}
|
||||
defaultsM[dataResource.Name] = strings.Trim(strVal, `"`)
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -341,7 +415,7 @@ func evaluateWorkspaceTags(varsDefaults, paramsDefaults, workspaceTags map[strin
|
||||
}
|
||||
// We only add variables and coder_parameter data sources. Anything else will be
|
||||
// undefined and will raise a Terraform error.
|
||||
evalCtx := buildEvalContext(varsDefaults, paramsDefaults)
|
||||
evalCtx := BuildEvalContext(varsDefaults, paramsDefaults)
|
||||
tags := make(map[string]string)
|
||||
for workspaceTagKey, workspaceTagValue := range workspaceTags {
|
||||
expr, diags := hclsyntax.ParseExpression([]byte(workspaceTagValue), "expression.hcl", hcl.InitialPos)
|
||||
@@ -355,7 +429,7 @@ func evaluateWorkspaceTags(varsDefaults, paramsDefaults, workspaceTags map[strin
|
||||
}
|
||||
|
||||
// Do not use "val.AsString()" as it can panic
|
||||
str, err := ctyValueString(val)
|
||||
str, err := CtyValueString(val)
|
||||
if err != nil {
|
||||
return nil, xerrors.Errorf("failed to marshal workspace tag key %q value %q as string: %s", workspaceTagKey, workspaceTagValue, err)
|
||||
}
|
||||
@@ -381,16 +455,17 @@ func validWorkspaceTagValues(tags map[string]string) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func buildEvalContext(varDefaults map[string]string, paramDefaults map[string]string) *hcl.EvalContext {
|
||||
// BuildEvalContext builds an evaluation context for the given variable and parameter defaults.
|
||||
func BuildEvalContext(vars map[string]string, params map[string]string) *hcl.EvalContext {
|
||||
varDefaultsM := map[string]cty.Value{}
|
||||
for varName, varDefault := range varDefaults {
|
||||
for varName, varDefault := range vars {
|
||||
varDefaultsM[varName] = cty.MapVal(map[string]cty.Value{
|
||||
"value": cty.StringVal(varDefault),
|
||||
})
|
||||
}
|
||||
|
||||
paramDefaultsM := map[string]cty.Value{}
|
||||
for paramName, paramDefault := range paramDefaults {
|
||||
for paramName, paramDefault := range params {
|
||||
paramDefaultsM[paramName] = cty.MapVal(map[string]cty.Value{
|
||||
"value": cty.StringVal(paramDefault),
|
||||
})
|
||||
@@ -482,7 +557,10 @@ func compareSourcePos(x, y tfconfig.SourcePos) bool {
|
||||
return x.Line < y.Line
|
||||
}
|
||||
|
||||
func ctyValueString(val cty.Value) (string, error) {
|
||||
// CtyValueString converts a cty.Value to a string.
|
||||
// It supports only primitive types - bool, number, and string.
|
||||
// As a special case, it also supports map[string]interface{} with key "value".
|
||||
func CtyValueString(val cty.Value) (string, error) {
|
||||
switch val.Type() {
|
||||
case cty.Bool:
|
||||
if val.True() {
|
||||
@@ -500,7 +578,7 @@ func ctyValueString(val cty.Value) (string, error) {
|
||||
if !ok {
|
||||
return "", xerrors.Errorf("map does not have key 'value'")
|
||||
}
|
||||
return ctyValueString(valval)
|
||||
return CtyValueString(valval)
|
||||
default:
|
||||
return "", xerrors.Errorf("only primitive types are supported - bool, number, and string")
|
||||
}
|
||||
@@ -520,7 +598,11 @@ func interfaceToString(i interface{}) (string, error) {
|
||||
return strconv.FormatFloat(v, 'f', -1, 64), nil
|
||||
case bool:
|
||||
return strconv.FormatBool(v), nil
|
||||
default:
|
||||
return "", xerrors.Errorf("unsupported type %T", v)
|
||||
default: // just try to JSON-encode it.
|
||||
var sb strings.Builder
|
||||
if err := json.NewEncoder(&sb).Encode(i); err != nil {
|
||||
return "", xerrors.Errorf("convert %T: %w", v, err)
|
||||
}
|
||||
return strings.TrimSpace(sb.String()), nil
|
||||
}
|
||||
}
|
||||
|
||||
@@ -34,7 +34,7 @@ func Test_WorkspaceTagDefaultsFromFile(t *testing.T) {
|
||||
name: "single text file",
|
||||
files: map[string]string{
|
||||
"file.txt": `
|
||||
hello world`,
|
||||
hello world`,
|
||||
},
|
||||
expectTags: map[string]string{},
|
||||
expectError: "",
|
||||
@@ -49,8 +49,10 @@ func Test_WorkspaceTagDefaultsFromFile(t *testing.T) {
|
||||
type = string
|
||||
default = "us"
|
||||
}
|
||||
data "base" "ours" {
|
||||
all = true
|
||||
data "coder_parameter" "unrelated" {
|
||||
name = "unrelated"
|
||||
type = "list(string)"
|
||||
default = jsonencode(["a", "b"])
|
||||
}
|
||||
data "coder_parameter" "az" {
|
||||
name = "az"
|
||||
@@ -71,8 +73,10 @@ func Test_WorkspaceTagDefaultsFromFile(t *testing.T) {
|
||||
type = string
|
||||
default = "us"
|
||||
}
|
||||
data "base" "ours" {
|
||||
all = true
|
||||
data "coder_parameter" "unrelated" {
|
||||
name = "unrelated"
|
||||
type = "list(string)"
|
||||
default = jsonencode(["a", "b"])
|
||||
}
|
||||
data "coder_parameter" "az" {
|
||||
name = "az"
|
||||
@@ -94,8 +98,13 @@ func Test_WorkspaceTagDefaultsFromFile(t *testing.T) {
|
||||
type = string
|
||||
default = "us"
|
||||
}
|
||||
data "base" "ours" {
|
||||
all = true
|
||||
variable "unrelated" {
|
||||
type = bool
|
||||
}
|
||||
data "coder_parameter" "unrelated" {
|
||||
name = "unrelated"
|
||||
type = "list(string)"
|
||||
default = jsonencode(["a", "b"])
|
||||
}
|
||||
data "coder_parameter" "az" {
|
||||
name = "az"
|
||||
@@ -114,6 +123,77 @@ func Test_WorkspaceTagDefaultsFromFile(t *testing.T) {
|
||||
expectTags: map[string]string{"platform": "kubernetes", "cluster": "developers", "region": "us", "az": "a"},
|
||||
expectError: "",
|
||||
},
|
||||
{
|
||||
name: "main.tf with parameter that has default value from dynamic value",
|
||||
files: map[string]string{
|
||||
"main.tf": `
|
||||
provider "foo" {}
|
||||
resource "foo_bar" "baz" {}
|
||||
variable "region" {
|
||||
type = string
|
||||
default = "us"
|
||||
}
|
||||
variable "az" {
|
||||
type = string
|
||||
default = "${""}${"a"}"
|
||||
}
|
||||
data "coder_parameter" "unrelated" {
|
||||
name = "unrelated"
|
||||
type = "list(string)"
|
||||
default = jsonencode(["a", "b"])
|
||||
}
|
||||
data "coder_parameter" "az" {
|
||||
name = "az"
|
||||
type = "string"
|
||||
default = var.az
|
||||
}
|
||||
data "coder_workspace_tags" "tags" {
|
||||
tags = {
|
||||
"platform" = "kubernetes",
|
||||
"cluster" = "${"devel"}${"opers"}"
|
||||
"region" = var.region
|
||||
"az" = data.coder_parameter.az.value
|
||||
}
|
||||
}`,
|
||||
},
|
||||
expectTags: map[string]string{"platform": "kubernetes", "cluster": "developers", "region": "us", "az": "a"},
|
||||
expectError: "",
|
||||
},
|
||||
{
|
||||
name: "main.tf with parameter that has default value from another parameter",
|
||||
files: map[string]string{
|
||||
"main.tf": `
|
||||
provider "foo" {}
|
||||
resource "foo_bar" "baz" {}
|
||||
variable "region" {
|
||||
type = string
|
||||
default = "us"
|
||||
}
|
||||
data "coder_parameter" "unrelated" {
|
||||
name = "unrelated"
|
||||
type = "list(string)"
|
||||
default = jsonencode(["a", "b"])
|
||||
}
|
||||
data "coder_parameter" "az" {
|
||||
type = string
|
||||
default = "${""}${"a"}"
|
||||
}
|
||||
data "coder_parameter" "az2" {
|
||||
name = "az"
|
||||
type = "string"
|
||||
default = data.coder_parameter.az.value
|
||||
}
|
||||
data "coder_workspace_tags" "tags" {
|
||||
tags = {
|
||||
"platform" = "kubernetes",
|
||||
"cluster" = "${"devel"}${"opers"}"
|
||||
"region" = var.region
|
||||
"az" = data.coder_parameter.az2.value
|
||||
}
|
||||
}`,
|
||||
},
|
||||
expectError: "Unknown variable; There is no variable named \"data\".",
|
||||
},
|
||||
{
|
||||
name: "main.tf with multiple valid workspace tags",
|
||||
files: map[string]string{
|
||||
@@ -128,8 +208,10 @@ func Test_WorkspaceTagDefaultsFromFile(t *testing.T) {
|
||||
type = string
|
||||
default = "eu"
|
||||
}
|
||||
data "base" "ours" {
|
||||
all = true
|
||||
data "coder_parameter" "unrelated" {
|
||||
name = "unrelated"
|
||||
type = "list(string)"
|
||||
default = jsonencode(["a", "b"])
|
||||
}
|
||||
data "coder_parameter" "az" {
|
||||
name = "az"
|
||||
@@ -168,8 +250,10 @@ func Test_WorkspaceTagDefaultsFromFile(t *testing.T) {
|
||||
type = string
|
||||
default = "us"
|
||||
}
|
||||
data "base" "ours" {
|
||||
all = true
|
||||
data "coder_parameter" "unrelated" {
|
||||
name = "unrelated"
|
||||
type = "list(string)"
|
||||
default = jsonencode(["a", "b"])
|
||||
}
|
||||
data "coder_parameter" "az" {
|
||||
name = "az"
|
||||
@@ -196,8 +280,10 @@ func Test_WorkspaceTagDefaultsFromFile(t *testing.T) {
|
||||
type = string
|
||||
default = "us"
|
||||
}
|
||||
data "base" "ours" {
|
||||
all = true
|
||||
data "coder_parameter" "unrelated" {
|
||||
name = "unrelated"
|
||||
type = "list(string)"
|
||||
default = jsonencode(["a", "b"])
|
||||
}
|
||||
data "coder_parameter" "az" {
|
||||
name = "az"
|
||||
@@ -233,8 +319,10 @@ func Test_WorkspaceTagDefaultsFromFile(t *testing.T) {
|
||||
variable "notregion" {
|
||||
type = string
|
||||
}
|
||||
data "base" "ours" {
|
||||
all = true
|
||||
data "coder_parameter" "unrelated" {
|
||||
name = "unrelated"
|
||||
type = "list(string)"
|
||||
default = jsonencode(["a", "b"])
|
||||
}
|
||||
data "coder_parameter" "az" {
|
||||
name = "az"
|
||||
@@ -265,8 +353,10 @@ func Test_WorkspaceTagDefaultsFromFile(t *testing.T) {
|
||||
type = string
|
||||
default = "us"
|
||||
}
|
||||
data "base" "ours" {
|
||||
all = true
|
||||
data "coder_parameter" "unrelated" {
|
||||
name = "unrelated"
|
||||
type = "list(string)"
|
||||
default = jsonencode(["a", "b"])
|
||||
}
|
||||
data "coder_parameter" "az" {
|
||||
name = "az"
|
||||
@@ -301,8 +391,10 @@ func Test_WorkspaceTagDefaultsFromFile(t *testing.T) {
|
||||
type = string
|
||||
default = "us"
|
||||
}
|
||||
data "base" "ours" {
|
||||
all = true
|
||||
data "coder_parameter" "unrelated" {
|
||||
name = "unrelated"
|
||||
type = "list(string)"
|
||||
default = jsonencode(["a", "b"])
|
||||
}
|
||||
data "coder_parameter" "az" {
|
||||
name = "az"
|
||||
@@ -335,8 +427,10 @@ func Test_WorkspaceTagDefaultsFromFile(t *testing.T) {
|
||||
type = string
|
||||
default = "region.us"
|
||||
}
|
||||
data "base" "ours" {
|
||||
all = true
|
||||
data "coder_parameter" "unrelated" {
|
||||
name = "unrelated"
|
||||
type = "list(string)"
|
||||
default = jsonencode(["a", "b"])
|
||||
}
|
||||
data "coder_parameter" "az" {
|
||||
name = "az"
|
||||
@@ -355,6 +449,103 @@ func Test_WorkspaceTagDefaultsFromFile(t *testing.T) {
|
||||
expectTags: nil,
|
||||
expectError: `Function calls not allowed; Functions may not be called here.`,
|
||||
},
|
||||
{
|
||||
name: "supported types",
|
||||
files: map[string]string{
|
||||
"main.tf": `
|
||||
variable "stringvar" {
|
||||
type = string
|
||||
default = "a"
|
||||
}
|
||||
variable "numvar" {
|
||||
type = number
|
||||
default = 1
|
||||
}
|
||||
variable "boolvar" {
|
||||
type = bool
|
||||
default = true
|
||||
}
|
||||
variable "listvar" {
|
||||
type = list(string)
|
||||
default = ["a"]
|
||||
}
|
||||
variable "mapvar" {
|
||||
type = map(string)
|
||||
default = {"a": "b"}
|
||||
}
|
||||
data "coder_parameter" "stringparam" {
|
||||
name = "stringparam"
|
||||
type = "string"
|
||||
default = "a"
|
||||
}
|
||||
data "coder_parameter" "numparam" {
|
||||
name = "numparam"
|
||||
type = "number"
|
||||
default = 1
|
||||
}
|
||||
data "coder_parameter" "boolparam" {
|
||||
name = "boolparam"
|
||||
type = "bool"
|
||||
default = true
|
||||
}
|
||||
data "coder_parameter" "listparam" {
|
||||
name = "listparam"
|
||||
type = "list(string)"
|
||||
default = "[\"a\", \"b\"]"
|
||||
}
|
||||
data "coder_workspace_tags" "tags" {
|
||||
tags = {
|
||||
"stringvar" = var.stringvar
|
||||
"numvar" = var.numvar
|
||||
"boolvar" = var.boolvar
|
||||
"listvar" = var.listvar
|
||||
"mapvar" = var.mapvar
|
||||
"stringparam" = data.coder_parameter.stringparam.value
|
||||
"numparam" = data.coder_parameter.numparam.value
|
||||
"boolparam" = data.coder_parameter.boolparam.value
|
||||
"listparam" = data.coder_parameter.listparam.value
|
||||
}
|
||||
}`,
|
||||
},
|
||||
expectTags: map[string]string{
|
||||
"stringvar": "a",
|
||||
"numvar": "1",
|
||||
"boolvar": "true",
|
||||
"listvar": `["a"]`,
|
||||
"mapvar": `{"a":"b"}`,
|
||||
"stringparam": "a",
|
||||
"numparam": "1",
|
||||
"boolparam": "true",
|
||||
"listparam": `["a", "b"]`,
|
||||
},
|
||||
expectError: ``,
|
||||
},
|
||||
{
|
||||
name: "overlapping var name",
|
||||
files: map[string]string{
|
||||
`main.tf`: `
|
||||
variable "a" {
|
||||
type = string
|
||||
default = "1"
|
||||
}
|
||||
variable "unused" {
|
||||
type = map(string)
|
||||
default = {"a" : "b"}
|
||||
}
|
||||
variable "ab" {
|
||||
description = "This is a variable of type string"
|
||||
type = string
|
||||
default = "ab"
|
||||
}
|
||||
data "coder_workspace_tags" "tags" {
|
||||
tags = {
|
||||
"foo": "bar",
|
||||
"a": var.a,
|
||||
}
|
||||
}`,
|
||||
},
|
||||
expectTags: map[string]string{"foo": "bar", "a": "1"},
|
||||
},
|
||||
} {
|
||||
tc := tc
|
||||
t.Run(tc.name+"/tar", func(t *testing.T) {
|
||||
@@ -438,7 +629,7 @@ func BenchmarkWorkspaceTagDefaultsFromFile(b *testing.B) {
|
||||
tfparse.WriteArchive(tarFile, "application/x-tar", tmpDir)
|
||||
parser, diags := tfparse.New(tmpDir, tfparse.WithLogger(logger))
|
||||
require.NoError(b, diags.Err())
|
||||
_, err := parser.WorkspaceTags(ctx)
|
||||
_, _, err := parser.WorkspaceTags(ctx)
|
||||
if err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
@@ -452,7 +643,7 @@ func BenchmarkWorkspaceTagDefaultsFromFile(b *testing.B) {
|
||||
tfparse.WriteArchive(zipFile, "application/zip", tmpDir)
|
||||
parser, diags := tfparse.New(tmpDir, tfparse.WithLogger(logger))
|
||||
require.NoError(b, diags.Err())
|
||||
_, err := parser.WorkspaceTags(ctx)
|
||||
_, _, err := parser.WorkspaceTags(ctx)
|
||||
if err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
|
||||
+5
-3
@@ -232,13 +232,15 @@ export const createTemplate = async (
|
||||
* random name.
|
||||
*/
|
||||
export const createGroup = async (page: Page): Promise<string> => {
|
||||
await page.goto("/groups/create", { waitUntil: "domcontentloaded" });
|
||||
await expectUrl(page).toHavePathName("/groups/create");
|
||||
await page.goto("/deployment/groups/create", {
|
||||
waitUntil: "domcontentloaded",
|
||||
});
|
||||
await expectUrl(page).toHavePathName("/deployment/groups/create");
|
||||
|
||||
const name = randomName();
|
||||
await page.getByLabel("Name", { exact: true }).fill(name);
|
||||
await page.getByTestId("form-submit").click();
|
||||
await expectUrl(page).toHavePathName(`/groups/${name}`);
|
||||
await expectUrl(page).toHavePathName(`/deployment/groups/${name}`);
|
||||
return name;
|
||||
};
|
||||
|
||||
|
||||
+9
-1
@@ -682,12 +682,20 @@ class ApiMethods {
|
||||
|
||||
/**
|
||||
* @param organization Can be the organization's ID or name
|
||||
* @param tags to filter provisioner daemons by.
|
||||
*/
|
||||
getProvisionerDaemonsByOrganization = async (
|
||||
organization: string,
|
||||
tags?: Record<string, string>,
|
||||
): Promise<TypesGen.ProvisionerDaemon[]> => {
|
||||
const params = new URLSearchParams();
|
||||
|
||||
if (tags) {
|
||||
params.append("tags", JSON.stringify(tags));
|
||||
}
|
||||
|
||||
const response = await this.axios.get<TypesGen.ProvisionerDaemon[]>(
|
||||
`/api/v2/organizations/${organization}/provisionerdaemons`,
|
||||
`/api/v2/organizations/${organization}/provisionerdaemons?${params.toString()}`,
|
||||
);
|
||||
return response.data;
|
||||
};
|
||||
|
||||
@@ -115,16 +115,18 @@ export const organizations = () => {
|
||||
};
|
||||
};
|
||||
|
||||
export const getProvisionerDaemonsKey = (organization: string) => [
|
||||
"organization",
|
||||
organization,
|
||||
"provisionerDaemons",
|
||||
];
|
||||
export const getProvisionerDaemonsKey = (
|
||||
organization: string,
|
||||
tags?: Record<string, string>,
|
||||
) => ["organization", organization, tags, "provisionerDaemons"];
|
||||
|
||||
export const provisionerDaemons = (organization: string) => {
|
||||
export const provisionerDaemons = (
|
||||
organization: string,
|
||||
tags?: Record<string, string>,
|
||||
) => {
|
||||
return {
|
||||
queryKey: getProvisionerDaemonsKey(organization),
|
||||
queryFn: () => API.getProvisionerDaemonsByOrganization(organization),
|
||||
queryKey: getProvisionerDaemonsKey(organization, tags),
|
||||
queryFn: () => API.getProvisionerDaemonsByOrganization(organization, tags),
|
||||
};
|
||||
};
|
||||
|
||||
|
||||
Generated
+1
@@ -2012,6 +2012,7 @@ export interface WorkspaceBuild {
|
||||
readonly max_deadline?: string;
|
||||
readonly status: WorkspaceStatus;
|
||||
readonly daily_cost: number;
|
||||
readonly matched_provisioners?: MatchedProvisioners;
|
||||
}
|
||||
|
||||
// From codersdk/workspacebuilds.go
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import MuiAlert, {
|
||||
type AlertColor as MuiAlertColor,
|
||||
type AlertProps as MuiAlertProps,
|
||||
// biome-ignore lint/nursery/noRestrictedImports: Used as base component
|
||||
} from "@mui/material/Alert";
|
||||
@@ -11,6 +12,8 @@ import {
|
||||
useState,
|
||||
} from "react";
|
||||
|
||||
export type AlertColor = MuiAlertColor;
|
||||
|
||||
export type AlertProps = MuiAlertProps & {
|
||||
actions?: ReactNode;
|
||||
dismissible?: boolean;
|
||||
|
||||
@@ -147,6 +147,9 @@ const DeploymentSettingsNavigation: FC<DeploymentSettingsNavigationProps> = ({
|
||||
{permissions.viewAllUsers && (
|
||||
<SidebarNavSubItem href="users">Users</SidebarNavSubItem>
|
||||
)}
|
||||
{permissions.viewAnyGroup && (
|
||||
<SidebarNavSubItem href="groups">Groups</SidebarNavSubItem>
|
||||
)}
|
||||
{permissions.viewNotificationTemplate && (
|
||||
<SidebarNavSubItem href="notifications">
|
||||
<Stack direction="row" alignItems="center" spacing={1}>
|
||||
|
||||
@@ -0,0 +1,48 @@
|
||||
import type { Meta, StoryObj } from "@storybook/react";
|
||||
import { chromatic } from "testHelpers/chromatic";
|
||||
import { AlertVariant, ProvisionerAlert } from "./ProvisionerAlert";
|
||||
|
||||
const meta: Meta<typeof ProvisionerAlert> = {
|
||||
title: "modules/provisioners/ProvisionerAlert",
|
||||
parameters: {
|
||||
chromatic,
|
||||
layout: "centered",
|
||||
},
|
||||
component: ProvisionerAlert,
|
||||
args: {
|
||||
title: "Title",
|
||||
detail: "Detail",
|
||||
severity: "info",
|
||||
tags: { tag: "tagValue" },
|
||||
},
|
||||
};
|
||||
|
||||
export default meta;
|
||||
type Story = StoryObj<typeof ProvisionerAlert>;
|
||||
|
||||
export const Info: Story = {};
|
||||
|
||||
export const InfoInline: Story = {
|
||||
args: {
|
||||
variant: AlertVariant.Inline,
|
||||
},
|
||||
};
|
||||
|
||||
export const Warning: Story = {
|
||||
args: {
|
||||
severity: "warning",
|
||||
},
|
||||
};
|
||||
|
||||
export const WarningInline: Story = {
|
||||
args: {
|
||||
severity: "warning",
|
||||
variant: AlertVariant.Inline,
|
||||
},
|
||||
};
|
||||
|
||||
export const NullTags: Story = {
|
||||
args: {
|
||||
tags: undefined,
|
||||
},
|
||||
};
|
||||
@@ -0,0 +1,65 @@
|
||||
import type { Theme } from "@emotion/react";
|
||||
import AlertTitle from "@mui/material/AlertTitle";
|
||||
import { Alert, type AlertColor } from "components/Alert/Alert";
|
||||
import { AlertDetail } from "components/Alert/Alert";
|
||||
import { Stack } from "components/Stack/Stack";
|
||||
import { ProvisionerTag } from "modules/provisioners/ProvisionerTag";
|
||||
import type { FC } from "react";
|
||||
|
||||
export enum AlertVariant {
|
||||
// Alerts are usually styled with a full rounded border and meant to use as a visually distinct element of the page.
|
||||
// The Standalone variant conforms to this styling.
|
||||
Standalone = "Standalone",
|
||||
// We show these same alerts in environments such as log drawers where we stream the logs from builds.
|
||||
// In this case the full border is incongruent with the surroundings of the component.
|
||||
// The Inline variant replaces the full rounded border with a left border and a divider so that it complements the surroundings.
|
||||
Inline = "Inline",
|
||||
}
|
||||
|
||||
interface ProvisionerAlertProps {
|
||||
title: string;
|
||||
detail: string;
|
||||
severity: AlertColor;
|
||||
tags: Record<string, string>;
|
||||
variant?: AlertVariant;
|
||||
}
|
||||
|
||||
const getAlertStyles = (variant: AlertVariant, severity: AlertColor) => {
|
||||
switch (variant) {
|
||||
case AlertVariant.Inline:
|
||||
return {
|
||||
css: (theme: Theme) => ({
|
||||
borderRadius: 0,
|
||||
border: 0,
|
||||
borderBottom: `1px solid ${theme.palette.divider}`,
|
||||
borderLeft: `2px solid ${theme.palette[severity].main}`,
|
||||
}),
|
||||
};
|
||||
default:
|
||||
return {};
|
||||
}
|
||||
};
|
||||
|
||||
export const ProvisionerAlert: FC<ProvisionerAlertProps> = ({
|
||||
title,
|
||||
detail,
|
||||
severity,
|
||||
tags,
|
||||
variant = AlertVariant.Standalone,
|
||||
}) => {
|
||||
return (
|
||||
<Alert severity={severity} {...getAlertStyles(variant, severity)}>
|
||||
<AlertTitle>{title}</AlertTitle>
|
||||
<AlertDetail>
|
||||
<div>{detail}</div>
|
||||
<Stack direction="row" spacing={1} wrap="wrap">
|
||||
{Object.entries(tags ?? {})
|
||||
.filter(([key]) => key !== "owner")
|
||||
.map(([key, value]) => (
|
||||
<ProvisionerTag key={key} tagName={key} tagValue={value} />
|
||||
))}
|
||||
</Stack>
|
||||
</AlertDetail>
|
||||
</Alert>
|
||||
);
|
||||
};
|
||||
@@ -0,0 +1,71 @@
|
||||
import type { Meta, StoryObj } from "@storybook/react";
|
||||
import { chromatic } from "testHelpers/chromatic";
|
||||
import { MockTemplateVersion } from "testHelpers/entities";
|
||||
import { AlertVariant } from "./ProvisionerAlert";
|
||||
import { ProvisionerStatusAlert } from "./ProvisionerStatusAlert";
|
||||
|
||||
const meta: Meta<typeof ProvisionerStatusAlert> = {
|
||||
title: "modules/provisioners/ProvisionerStatusAlert",
|
||||
parameters: {
|
||||
chromatic,
|
||||
layout: "centered",
|
||||
},
|
||||
component: ProvisionerStatusAlert,
|
||||
args: {
|
||||
matchingProvisioners: 0,
|
||||
availableProvisioners: 0,
|
||||
tags: MockTemplateVersion.job.tags,
|
||||
},
|
||||
};
|
||||
|
||||
export default meta;
|
||||
type Story = StoryObj<typeof ProvisionerStatusAlert>;
|
||||
|
||||
export const HealthyProvisioners: Story = {
|
||||
args: {
|
||||
matchingProvisioners: 1,
|
||||
availableProvisioners: 1,
|
||||
},
|
||||
};
|
||||
|
||||
export const UndefinedMatchingProvisioners: Story = {
|
||||
args: {
|
||||
matchingProvisioners: undefined,
|
||||
availableProvisioners: undefined,
|
||||
},
|
||||
};
|
||||
|
||||
export const UndefinedAvailableProvisioners: Story = {
|
||||
args: {
|
||||
matchingProvisioners: 1,
|
||||
availableProvisioners: undefined,
|
||||
},
|
||||
};
|
||||
|
||||
export const NoMatchingProvisioners: Story = {
|
||||
args: {
|
||||
matchingProvisioners: 0,
|
||||
},
|
||||
};
|
||||
|
||||
export const NoMatchingProvisionersInLogs: Story = {
|
||||
args: {
|
||||
matchingProvisioners: 0,
|
||||
variant: AlertVariant.Inline,
|
||||
},
|
||||
};
|
||||
|
||||
export const NoAvailableProvisioners: Story = {
|
||||
args: {
|
||||
matchingProvisioners: 1,
|
||||
availableProvisioners: 0,
|
||||
},
|
||||
};
|
||||
|
||||
export const NoAvailableProvisionersInLogs: Story = {
|
||||
args: {
|
||||
matchingProvisioners: 1,
|
||||
availableProvisioners: 0,
|
||||
variant: AlertVariant.Inline,
|
||||
},
|
||||
};
|
||||
@@ -0,0 +1,50 @@
|
||||
import type { AlertColor } from "components/Alert/Alert";
|
||||
import type { FC } from "react";
|
||||
import { AlertVariant, ProvisionerAlert } from "./ProvisionerAlert";
|
||||
|
||||
interface ProvisionerStatusAlertProps {
|
||||
matchingProvisioners: number | undefined;
|
||||
availableProvisioners: number | undefined;
|
||||
tags: Record<string, string>;
|
||||
variant?: AlertVariant;
|
||||
}
|
||||
|
||||
export const ProvisionerStatusAlert: FC<ProvisionerStatusAlertProps> = ({
|
||||
matchingProvisioners,
|
||||
availableProvisioners,
|
||||
tags,
|
||||
variant = AlertVariant.Standalone,
|
||||
}) => {
|
||||
let title: string;
|
||||
let detail: string;
|
||||
let severity: AlertColor;
|
||||
switch (true) {
|
||||
case matchingProvisioners === 0:
|
||||
title = "Build pending provisioner deployment";
|
||||
detail =
|
||||
"Your build has been enqueued, but there are no provisioners that accept the required tags. Once a compatible provisioner becomes available, your build will continue. Please contact your administrator.";
|
||||
severity = "warning";
|
||||
break;
|
||||
case availableProvisioners === 0:
|
||||
title = "Build delayed";
|
||||
detail =
|
||||
"Provisioners that accept the required tags have not responded for longer than expected. This may delay your build. Please contact your administrator if your build does not complete.";
|
||||
severity = "warning";
|
||||
break;
|
||||
default:
|
||||
title = "Build enqueued";
|
||||
detail =
|
||||
"Your build has been enqueued and will begin once a provisioner becomes available to process it.";
|
||||
severity = "info";
|
||||
}
|
||||
|
||||
return (
|
||||
<ProvisionerAlert
|
||||
title={title}
|
||||
detail={detail}
|
||||
severity={severity}
|
||||
tags={tags}
|
||||
variant={variant}
|
||||
/>
|
||||
);
|
||||
};
|
||||
@@ -34,6 +34,42 @@ export const MissingVariables: Story = {
|
||||
},
|
||||
};
|
||||
|
||||
export const NoProvisioners: Story = {
|
||||
args: {
|
||||
templateVersion: {
|
||||
...MockTemplateVersion,
|
||||
matched_provisioners: {
|
||||
count: 0,
|
||||
available: 0,
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
export const ProvisionersUnhealthy: Story = {
|
||||
args: {
|
||||
templateVersion: {
|
||||
...MockTemplateVersion,
|
||||
matched_provisioners: {
|
||||
count: 1,
|
||||
available: 0,
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
export const ProvisionersHealthy: Story = {
|
||||
args: {
|
||||
templateVersion: {
|
||||
...MockTemplateVersion,
|
||||
matched_provisioners: {
|
||||
count: 1,
|
||||
available: 1,
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
export const Logs: Story = {
|
||||
args: {
|
||||
templateVersion: {
|
||||
|
||||
@@ -8,6 +8,8 @@ import { visuallyHidden } from "@mui/utils";
|
||||
import { JobError } from "api/queries/templates";
|
||||
import type { TemplateVersion } from "api/typesGenerated";
|
||||
import { Loader } from "components/Loader/Loader";
|
||||
import { AlertVariant } from "modules/provisioners/ProvisionerAlert";
|
||||
import { ProvisionerStatusAlert } from "modules/provisioners/ProvisionerStatusAlert";
|
||||
import { useWatchVersionLogs } from "modules/templates/useWatchVersionLogs";
|
||||
import { WorkspaceBuildLogs } from "modules/workspaces/WorkspaceBuildLogs/WorkspaceBuildLogs";
|
||||
import { type FC, useLayoutEffect, useRef } from "react";
|
||||
@@ -27,6 +29,10 @@ export const BuildLogsDrawer: FC<BuildLogsDrawerProps> = ({
|
||||
variablesSectionRef,
|
||||
...drawerProps
|
||||
}) => {
|
||||
const matchingProvisioners = templateVersion?.matched_provisioners?.count;
|
||||
const availableProvisioners =
|
||||
templateVersion?.matched_provisioners?.available;
|
||||
|
||||
const logs = useWatchVersionLogs(templateVersion);
|
||||
const logsContainer = useRef<HTMLDivElement>(null);
|
||||
|
||||
@@ -65,6 +71,8 @@ export const BuildLogsDrawer: FC<BuildLogsDrawerProps> = ({
|
||||
</IconButton>
|
||||
</header>
|
||||
|
||||
{}
|
||||
|
||||
{isMissingVariables ? (
|
||||
<MissingVariablesBanner
|
||||
onFillVariables={() => {
|
||||
@@ -82,7 +90,15 @@ export const BuildLogsDrawer: FC<BuildLogsDrawerProps> = ({
|
||||
<WorkspaceBuildLogs logs={logs} css={{ border: 0 }} />
|
||||
</section>
|
||||
) : (
|
||||
<Loader />
|
||||
<>
|
||||
<ProvisionerStatusAlert
|
||||
matchingProvisioners={matchingProvisioners}
|
||||
availableProvisioners={availableProvisioners}
|
||||
tags={templateVersion?.job.tags ?? {}}
|
||||
variant={AlertVariant.Inline}
|
||||
/>
|
||||
<Loader />
|
||||
</>
|
||||
)}
|
||||
</div>
|
||||
</Drawer>
|
||||
|
||||
@@ -51,7 +51,7 @@ export const authMethodLanguage = {
|
||||
<Link
|
||||
target="_blank"
|
||||
rel="noopener"
|
||||
href="https://coder.com/docs/admin/auth#disable-built-in-authentication"
|
||||
href="https://coder.com/docs/admin/users/headless-auth"
|
||||
>
|
||||
documentation
|
||||
</Link>{" "}
|
||||
|
||||
+72
@@ -42,6 +42,7 @@ const meta: Meta<typeof GeneralSettingsPageView> = {
|
||||
deploymentDAUs: MockDeploymentDAUResponse,
|
||||
invalidExperiments: [],
|
||||
safeExperiments: [],
|
||||
entitlements: undefined,
|
||||
},
|
||||
};
|
||||
|
||||
@@ -136,3 +137,74 @@ export const invalidExperimentsEnabled: Story = {
|
||||
invalidExperiments: ["invalid"],
|
||||
},
|
||||
};
|
||||
|
||||
export const WithLicenseUtilization: Story = {
|
||||
args: {
|
||||
entitlements: {
|
||||
...MockEntitlementsWithUserLimit,
|
||||
features: {
|
||||
...MockEntitlementsWithUserLimit.features,
|
||||
user_limit: {
|
||||
...MockEntitlementsWithUserLimit.features.user_limit,
|
||||
enabled: true,
|
||||
actual: 75,
|
||||
limit: 100,
|
||||
entitlement: "entitled",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
export const HighLicenseUtilization: Story = {
|
||||
args: {
|
||||
entitlements: {
|
||||
...MockEntitlementsWithUserLimit,
|
||||
features: {
|
||||
...MockEntitlementsWithUserLimit.features,
|
||||
user_limit: {
|
||||
...MockEntitlementsWithUserLimit.features.user_limit,
|
||||
enabled: true,
|
||||
actual: 95,
|
||||
limit: 100,
|
||||
entitlement: "entitled",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
export const ExceedsLicenseUtilization: Story = {
|
||||
args: {
|
||||
entitlements: {
|
||||
...MockEntitlementsWithUserLimit,
|
||||
features: {
|
||||
...MockEntitlementsWithUserLimit.features,
|
||||
user_limit: {
|
||||
...MockEntitlementsWithUserLimit.features.user_limit,
|
||||
enabled: true,
|
||||
actual: 100,
|
||||
limit: 95,
|
||||
entitlement: "entitled",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
export const NoLicenseLimit: Story = {
|
||||
args: {
|
||||
entitlements: {
|
||||
...MockEntitlementsWithUserLimit,
|
||||
features: {
|
||||
...MockEntitlementsWithUserLimit.features,
|
||||
user_limit: {
|
||||
...MockEntitlementsWithUserLimit.features.user_limit,
|
||||
enabled: false,
|
||||
actual: 0,
|
||||
limit: 0,
|
||||
entitlement: "entitled",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import AlertTitle from "@mui/material/AlertTitle";
|
||||
import LinearProgress from "@mui/material/LinearProgress";
|
||||
import type {
|
||||
DAUsResponse,
|
||||
Entitlements,
|
||||
@@ -36,6 +37,12 @@ export const GeneralSettingsPageView: FC<GeneralSettingsPageViewProps> = ({
|
||||
safeExperiments,
|
||||
invalidExperiments,
|
||||
}) => {
|
||||
const licenseUtilizationPercentage =
|
||||
entitlements?.features?.user_limit?.actual &&
|
||||
entitlements?.features?.user_limit?.limit
|
||||
? entitlements.features.user_limit.actual /
|
||||
entitlements.features.user_limit.limit
|
||||
: undefined;
|
||||
return (
|
||||
<>
|
||||
<SettingsHeader
|
||||
@@ -54,6 +61,37 @@ export const GeneralSettingsPageView: FC<GeneralSettingsPageViewProps> = ({
|
||||
</ChartSection>
|
||||
</div>
|
||||
)}
|
||||
{licenseUtilizationPercentage && (
|
||||
<ChartSection title="License Utilization">
|
||||
<LinearProgress
|
||||
variant="determinate"
|
||||
value={Math.min(licenseUtilizationPercentage * 100, 100)}
|
||||
color={
|
||||
licenseUtilizationPercentage < 0.9
|
||||
? "primary"
|
||||
: licenseUtilizationPercentage < 1
|
||||
? "warning"
|
||||
: "error"
|
||||
}
|
||||
css={{
|
||||
height: 24,
|
||||
borderRadius: 4,
|
||||
marginBottom: 8,
|
||||
}}
|
||||
/>
|
||||
<span
|
||||
css={{
|
||||
fontSize: "0.75rem",
|
||||
display: "block",
|
||||
textAlign: "right",
|
||||
}}
|
||||
>
|
||||
{Math.round(licenseUtilizationPercentage * 100)}% used (
|
||||
{entitlements!.features.user_limit.actual}/
|
||||
{entitlements!.features.user_limit.limit} users)
|
||||
</span>
|
||||
</ChartSection>
|
||||
)}
|
||||
{invalidExperiments.length > 0 && (
|
||||
<Alert severity="warning">
|
||||
<AlertTitle>Invalid experiments in use:</AlertTitle>
|
||||
|
||||
@@ -19,7 +19,7 @@ export const CreateGroupPage: FC = () => {
|
||||
<CreateGroupPageView
|
||||
onSubmit={async (data) => {
|
||||
const newGroup = await createGroupMutation.mutateAsync(data);
|
||||
navigate(`/groups/${newGroup.name}`);
|
||||
navigate(`/deployment/groups/${newGroup.name}`);
|
||||
}}
|
||||
error={createGroupMutation.error}
|
||||
isLoading={createGroupMutation.isLoading}
|
||||
|
||||
@@ -44,7 +44,7 @@ export const CreateGroupPageView: FC<CreateGroupPageViewProps> = ({
|
||||
initialTouched,
|
||||
});
|
||||
const getFieldHelpers = getFormHelpers<CreateGroupRequest>(form, error);
|
||||
const onCancel = () => navigate("/groups");
|
||||
const onCancel = () => navigate("/deployment/groups");
|
||||
|
||||
return (
|
||||
<Margins>
|
||||
|
||||
@@ -211,7 +211,7 @@ export const GroupPage: FC = () => {
|
||||
try {
|
||||
await deleteGroupMutation.mutateAsync(groupId);
|
||||
displaySuccess("Group deleted successfully.");
|
||||
navigate("/groups");
|
||||
navigate("/deployment/groups");
|
||||
} catch (error) {
|
||||
displayError(getErrorMessage(error, "Failed to delete group."));
|
||||
}
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user