diff --git a/.coderabbit.yaml b/.coderabbit.yaml deleted file mode 100644 index 03acfa4335995..0000000000000 --- a/.coderabbit.yaml +++ /dev/null @@ -1,28 +0,0 @@ -# yaml-language-server: $schema=https://coderabbit.ai/integrations/schema.v2.json - -# CodeRabbit Configuration -# This configuration disables automatic reviews entirely - -language: "en-US" -early_access: false - -reviews: - # Disable automatic reviews for new PRs, but allow incremental reviews - auto_review: - enabled: false # Disable automatic review of new/updated PRs - drafts: false # Don't review draft PRs automatically - - # Other review settings (only apply if manually requested) - profile: "chill" - request_changes_workflow: false - high_level_summary: false - poem: false - review_status: false - collapse_walkthrough: true - high_level_summary_in_walkthrough: true - -chat: - auto_reply: true # Allow automatic chat replies - -# Note: With auto_review.enabled: false, CodeRabbit will only perform initial -# reviews when manually requested, but incremental reviews and chat replies remain enabled diff --git a/.devcontainer/filebrowser/install.sh b/.devcontainer/filebrowser/install.sh old mode 100644 new mode 100755 index 48158a38cd782..6e8d58a14bf80 --- a/.devcontainer/filebrowser/install.sh +++ b/.devcontainer/filebrowser/install.sh @@ -8,7 +8,15 @@ printf "%sInstalling filebrowser\n\n" "${BOLD}" # Check if filebrowser is installed. if ! command -v filebrowser &>/dev/null; then - curl -fsSL https://raw.githubusercontent.com/filebrowser/get/master/get.sh | bash + VERSION="v2.42.1" + EXPECTED_HASH="7d83c0f077df10a8ec9bfd9bf6e745da5d172c3c768a322b0e50583a6bc1d3cc" + + curl -fsSL "https://github.com/filebrowser/filebrowser/releases/download/${VERSION}/linux-amd64-filebrowser.tar.gz" -o /tmp/filebrowser.tar.gz + echo "${EXPECTED_HASH} /tmp/filebrowser.tar.gz" | sha256sum -c + tar -xzf /tmp/filebrowser.tar.gz -C /tmp + sudo mv /tmp/filebrowser /usr/local/bin/ + sudo chmod +x /usr/local/bin/filebrowser + rm /tmp/filebrowser.tar.gz fi # Create entrypoint. diff --git a/.devcontainer/scripts/post_create.sh b/.devcontainer/scripts/post_create.sh index 8799908311431..50acf3b577b57 100755 --- a/.devcontainer/scripts/post_create.sh +++ b/.devcontainer/scripts/post_create.sh @@ -1,7 +1,7 @@ #!/bin/sh install_devcontainer_cli() { - npm install -g @devcontainers/cli + npm install -g @devcontainers/cli@0.80.0 --integrity=sha512-w2EaxgjyeVGyzfA/KUEZBhyXqu/5PyWNXcnrXsZOBrt3aN2zyGiHrXoG54TF6K0b5DSCF01Rt5fnIyrCeFzFKw== } install_ssh_config() { diff --git a/.editorconfig b/.editorconfig index 9415469de3c00..419ae5b6d16d2 100644 --- a/.editorconfig +++ b/.editorconfig @@ -18,3 +18,11 @@ indent_size = 2 [coderd/database/dump.sql] indent_style = space indent_size = 4 + +[coderd/database/queries/*.sql] +indent_style = tab +indent_size = 4 + +[coderd/database/migrations/*.sql] +indent_style = tab +indent_size = 4 diff --git a/.gitattributes b/.gitattributes index 1da452829a70a..ed396ce0044eb 100644 --- a/.gitattributes +++ b/.gitattributes @@ -15,6 +15,8 @@ provisionersdk/proto/*.go linguist-generated=true *.tfstate.json linguist-generated=true *.tfstate.dot linguist-generated=true *.tfplan.dot linguist-generated=true +site/e2e/google/protobuf/timestampGenerated.ts site/e2e/provisionerGenerated.ts linguist-generated=true +site/src/api/countriesGenerated.tsx linguist-generated=true +site/src/api/rbacresourcesGenerated.tsx linguist-generated=true site/src/api/typesGenerated.ts linguist-generated=true -site/src/pages/SetupPage/countries.tsx linguist-generated=true diff --git a/.github/actions/embedded-pg-cache/download/action.yml b/.github/actions/embedded-pg-cache/download/action.yml index c2c3c0c0b299c..854e5045c2dda 100644 --- a/.github/actions/embedded-pg-cache/download/action.yml +++ b/.github/actions/embedded-pg-cache/download/action.yml @@ -25,9 +25,11 @@ runs: export YEAR_MONTH=$(date +'%Y-%m') export PREV_YEAR_MONTH=$(date -d 'last month' +'%Y-%m') export DAY=$(date +'%d') - echo "year-month=$YEAR_MONTH" >> $GITHUB_OUTPUT - echo "prev-year-month=$PREV_YEAR_MONTH" >> $GITHUB_OUTPUT - echo "cache-key=${{ inputs.key-prefix }}-${YEAR_MONTH}-${DAY}" >> $GITHUB_OUTPUT + echo "year-month=$YEAR_MONTH" >> "$GITHUB_OUTPUT" + echo "prev-year-month=$PREV_YEAR_MONTH" >> "$GITHUB_OUTPUT" + echo "cache-key=${INPUTS_KEY_PREFIX}-${YEAR_MONTH}-${DAY}" >> "$GITHUB_OUTPUT" + env: + INPUTS_KEY_PREFIX: ${{ inputs.key-prefix }} # By default, depot keeps caches for 14 days. This is plenty for embedded # postgres, which changes infrequently. diff --git a/.github/actions/setup-go/action.yaml b/.github/actions/setup-go/action.yaml index a8a88621dda18..097a1b6cfd119 100644 --- a/.github/actions/setup-go/action.yaml +++ b/.github/actions/setup-go/action.yaml @@ -4,7 +4,7 @@ description: | inputs: version: description: "The Go version to use." - default: "1.24.4" + default: "1.24.6" use-preinstalled-go: description: "Whether to use preinstalled Go." default: "false" diff --git a/.github/actions/setup-node/action.yaml b/.github/actions/setup-node/action.yaml index 02ffa14312ffe..6ed9985185746 100644 --- a/.github/actions/setup-node/action.yaml +++ b/.github/actions/setup-node/action.yaml @@ -16,7 +16,7 @@ runs: - name: Setup Node uses: actions/setup-node@0a44ba7841725637a19e28fa30b79a866c81b0a6 # v4.0.4 with: - node-version: 20.16.0 + node-version: 20.19.4 # See https://github.com/actions/setup-node#caching-global-packages-data cache: "pnpm" cache-dependency-path: ${{ inputs.directory }}/pnpm-lock.yaml diff --git a/.github/actions/test-cache/download/action.yml b/.github/actions/test-cache/download/action.yml index 06a87fee06d4b..623bb61e11c52 100644 --- a/.github/actions/test-cache/download/action.yml +++ b/.github/actions/test-cache/download/action.yml @@ -27,9 +27,11 @@ runs: export YEAR_MONTH=$(date +'%Y-%m') export PREV_YEAR_MONTH=$(date -d 'last month' +'%Y-%m') export DAY=$(date +'%d') - echo "year-month=$YEAR_MONTH" >> $GITHUB_OUTPUT - echo "prev-year-month=$PREV_YEAR_MONTH" >> $GITHUB_OUTPUT - echo "cache-key=${{ inputs.key-prefix }}-${YEAR_MONTH}-${DAY}" >> $GITHUB_OUTPUT + echo "year-month=$YEAR_MONTH" >> "$GITHUB_OUTPUT" + echo "prev-year-month=$PREV_YEAR_MONTH" >> "$GITHUB_OUTPUT" + echo "cache-key=${INPUTS_KEY_PREFIX}-${YEAR_MONTH}-${DAY}" >> "$GITHUB_OUTPUT" + env: + INPUTS_KEY_PREFIX: ${{ inputs.key-prefix }} # TODO: As a cost optimization, we could remove caches that are older than # a day or two. By default, depot keeps caches for 14 days, which isn't diff --git a/.github/actions/upload-datadog/action.yaml b/.github/actions/upload-datadog/action.yaml index a2df93ab14b28..274ff3df6493a 100644 --- a/.github/actions/upload-datadog/action.yaml +++ b/.github/actions/upload-datadog/action.yaml @@ -12,13 +12,12 @@ runs: run: | set -e - owner=${{ github.repository_owner }} - echo "owner: $owner" - if [[ $owner != "coder" ]]; then + echo "owner: $REPO_OWNER" + if [[ "$REPO_OWNER" != "coder" ]]; then echo "Not a pull request from the main repo, skipping..." exit 0 fi - if [[ -z "${{ inputs.api-key }}" ]]; then + if [[ -z "${DATADOG_API_KEY}" ]]; then # This can happen for dependabot. echo "No API key provided, skipping..." exit 0 @@ -31,37 +30,38 @@ runs: TMP_DIR=$(mktemp -d) - if [[ "${{ runner.os }}" == "Windows" ]]; then + if [[ "${RUNNER_OS}" == "Windows" ]]; then BINARY_PATH="${TMP_DIR}/datadog-ci.exe" BINARY_URL="https://github.com/DataDog/datadog-ci/releases/download/${BINARY_VERSION}/datadog-ci_win-x64" - elif [[ "${{ runner.os }}" == "macOS" ]]; then + elif [[ "${RUNNER_OS}" == "macOS" ]]; then BINARY_PATH="${TMP_DIR}/datadog-ci" BINARY_URL="https://github.com/DataDog/datadog-ci/releases/download/${BINARY_VERSION}/datadog-ci_darwin-arm64" - elif [[ "${{ runner.os }}" == "Linux" ]]; then + elif [[ "${RUNNER_OS}" == "Linux" ]]; then BINARY_PATH="${TMP_DIR}/datadog-ci" BINARY_URL="https://github.com/DataDog/datadog-ci/releases/download/${BINARY_VERSION}/datadog-ci_linux-x64" else - echo "Unsupported OS: ${{ runner.os }}" + echo "Unsupported OS: $RUNNER_OS" exit 1 fi - echo "Downloading DataDog CI binary version ${BINARY_VERSION} for ${{ runner.os }}..." + echo "Downloading DataDog CI binary version ${BINARY_VERSION} for $RUNNER_OS..." curl -sSL "$BINARY_URL" -o "$BINARY_PATH" - if [[ "${{ runner.os }}" == "Windows" ]]; then + if [[ "${RUNNER_OS}" == "Windows" ]]; then echo "$BINARY_HASH_WINDOWS $BINARY_PATH" | sha256sum --check - elif [[ "${{ runner.os }}" == "macOS" ]]; then + elif [[ "${RUNNER_OS}" == "macOS" ]]; then echo "$BINARY_HASH_MACOS $BINARY_PATH" | shasum -a 256 --check - elif [[ "${{ runner.os }}" == "Linux" ]]; then + elif [[ "${RUNNER_OS}" == "Linux" ]]; then echo "$BINARY_HASH_LINUX $BINARY_PATH" | sha256sum --check fi # Make binary executable (not needed for Windows) - if [[ "${{ runner.os }}" != "Windows" ]]; then + if [[ "${RUNNER_OS}" != "Windows" ]]; then chmod +x "$BINARY_PATH" fi "$BINARY_PATH" junit upload --service coder ./gotests.xml \ - --tags os:${{runner.os}} --tags runner_name:${{runner.name}} + --tags "os:${RUNNER_OS}" --tags "runner_name:${RUNNER_NAME}" env: + REPO_OWNER: ${{ github.repository_owner }} DATADOG_API_KEY: ${{ inputs.api-key }} diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md new file mode 100644 index 0000000000000..66deeefbc1d47 --- /dev/null +++ b/.github/pull_request_template.md @@ -0,0 +1 @@ +If you have used AI to produce some or all of this PR, please ensure you have read our [AI Contribution guidelines](https://coder.com/docs/about/contributing/AI_CONTRIBUTING) before submitting. diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 4ed72569402da..747f158e28a9e 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -39,10 +39,10 @@ jobs: egress-policy: audit - name: Checkout - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0 with: fetch-depth: 1 - # For pull requests it's not necessary to checkout the code + persist-credentials: false - name: check changed files uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3.0.2 id: filter @@ -111,7 +111,9 @@ jobs: - id: debug run: | - echo "${{ toJSON(steps.filter )}}" + echo "$FILTER_JSON" + env: + FILTER_JSON: ${{ toJSON(steps.filter.outputs) }} # Disabled due to instability. See: https://github.com/coder/coder/issues/14553 # Re-enable once the flake hash calculation is stable. @@ -121,7 +123,7 @@ jobs: # runs-on: ${{ github.repository_owner == 'coder' && 'depot-ubuntu-22.04-8' || 'ubuntu-latest' }} # steps: # - name: Checkout - # uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + # uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0 # with: # fetch-depth: 1 # # See: https://github.com/stefanzweifel/git-auto-commit-action?tab=readme-ov-file#commits-made-by-this-action-do-not-trigger-new-workflow-runs @@ -159,9 +161,10 @@ jobs: egress-policy: audit - name: Checkout - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0 with: fetch-depth: 1 + persist-credentials: false - name: Setup Node uses: ./.github/actions/setup-node @@ -171,13 +174,13 @@ jobs: - name: Get golangci-lint cache dir run: | - linter_ver=$(egrep -o 'GOLANGCI_LINT_VERSION=\S+' dogfood/coder/Dockerfile | cut -d '=' -f 2) - go install github.com/golangci/golangci-lint/cmd/golangci-lint@v$linter_ver + linter_ver=$(grep -Eo 'GOLANGCI_LINT_VERSION=\S+' dogfood/coder/Dockerfile | cut -d '=' -f 2) + go install "github.com/golangci/golangci-lint/cmd/golangci-lint@v$linter_ver" dir=$(golangci-lint cache status | awk '/Dir/ { print $2 }') - echo "LINT_CACHE_DIR=$dir" >> $GITHUB_ENV + echo "LINT_CACHE_DIR=$dir" >> "$GITHUB_ENV" - name: golangci-lint cache - uses: actions/cache@5a3ec84eff668545956fd18022155c47e93e2684 # v4.2.3 + uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4 with: path: | ${{ env.LINT_CACHE_DIR }} @@ -187,7 +190,7 @@ jobs: # Check for any typos - name: Check for typos - uses: crate-ci/typos@392b78fe18a52790c53f42456e46124f77346842 # v1.34.0 + uses: crate-ci/typos@52bd719c2c91f9d676e2aa359fc8e0db8925e6d8 # v1.35.3 with: config: .github/workflows/typos.toml @@ -206,7 +209,12 @@ jobs: - name: make lint run: | - make --output-sync=line -j lint + # zizmor isn't included in the lint target because it takes a while, + # but we explicitly want to run it in CI. + make --output-sync=line -j lint lint/actions/zizmor + env: + # Used by zizmor to lint third-party GitHub actions. + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Check workflow files run: | @@ -231,9 +239,10 @@ jobs: egress-policy: audit - name: Checkout - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0 with: fetch-depth: 1 + persist-credentials: false - name: Setup Node uses: ./.github/actions/setup-node @@ -256,8 +265,8 @@ jobs: pushd /tmp/proto curl -L -o protoc.zip https://github.com/protocolbuffers/protobuf/releases/download/v23.4/protoc-23.4-linux-x86_64.zip unzip protoc.zip - cp -r ./bin/* /usr/local/bin - cp -r ./include /usr/local/bin/include + sudo cp -r ./bin/* /usr/local/bin + sudo cp -r ./include /usr/local/bin/include popd - name: make gen @@ -286,9 +295,10 @@ jobs: egress-policy: audit - name: Checkout - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0 with: fetch-depth: 1 + persist-credentials: false - name: Setup Node uses: ./.github/actions/setup-node @@ -305,8 +315,8 @@ jobs: - name: make fmt run: | - export PATH=${PATH}:$(go env GOPATH)/bin - make --output-sync -j -B fmt + PATH="${PATH}:$(go env GOPATH)/bin" \ + make --output-sync -j -B fmt - name: Check for unstaged files run: ./scripts/check_unstaged.sh @@ -340,6 +350,11 @@ jobs: - name: Disable Spotlight Indexing if: runner.os == 'macOS' run: | + enabled=$(sudo mdutil -a -s | { grep -Fc "Indexing enabled" || true; }) + if [ "$enabled" -eq 0 ]; then + echo "Spotlight indexing is already disabled" + exit 0 + fi sudo mdutil -a -i off sudo mdutil -X / sudo launchctl bootout system /System/Library/LaunchDaemons/com.apple.metadata.mds.plist @@ -348,12 +363,13 @@ jobs: # a separate repository to allow its use before actions/checkout. - name: Setup RAM Disks if: runner.os == 'Windows' - uses: coder/setup-ramdisk-action@e1100847ab2d7bcd9d14bcda8f2d1b0f07b36f1b + uses: coder/setup-ramdisk-action@e1100847ab2d7bcd9d14bcda8f2d1b0f07b36f1b # v0.1.0 - name: Checkout - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0 with: fetch-depth: 1 + persist-credentials: false - name: Setup Go Paths id: go-paths @@ -416,63 +432,61 @@ jobs: set -o errexit set -o pipefail - if [ "${{ runner.os }}" == "Windows" ]; then + if [ "$RUNNER_OS" == "Windows" ]; then # Create a temp dir on the R: ramdisk drive for Windows. The default # C: drive is extremely slow: https://github.com/actions/runner-images/issues/8755 mkdir -p "R:/temp/embedded-pg" go run scripts/embedded-pg/main.go -path "R:/temp/embedded-pg" -cache "${EMBEDDED_PG_CACHE_DIR}" - elif [ "${{ runner.os }}" == "macOS" ]; then + elif [ "$RUNNER_OS" == "macOS" ]; then # Postgres runs faster on a ramdisk on macOS too mkdir -p /tmp/tmpfs sudo mount_tmpfs -o noowners -s 8g /tmp/tmpfs go run scripts/embedded-pg/main.go -path /tmp/tmpfs/embedded-pg -cache "${EMBEDDED_PG_CACHE_DIR}" - elif [ "${{ runner.os }}" == "Linux" ]; then + elif [ "$RUNNER_OS" == "Linux" ]; then make test-postgres-docker fi # if macOS, install google-chrome for scaletests # As another concern, should we really have this kind of external dependency # requirement on standard CI? - if [ "${{ matrix.os }}" == "macos-latest" ]; then + if [ "${RUNNER_OS}" == "macOS" ]; then brew install google-chrome fi # macOS will output "The default interactive shell is now zsh" # intermittently in CI... - if [ "${{ matrix.os }}" == "macos-latest" ]; then + if [ "${RUNNER_OS}" == "macOS" ]; then touch ~/.bash_profile && echo "export BASH_SILENCE_DEPRECATION_WARNING=1" >> ~/.bash_profile fi - if [ "${{ runner.os }}" == "Windows" ]; then + if [ "${RUNNER_OS}" == "Windows" ]; then # Our Windows runners have 16 cores. # On Windows Postgres chokes up when we have 16x16=256 tests # running in parallel, and dbtestutil.NewDB starts to take more than # 10s to complete sometimes causing test timeouts. With 16x8=128 tests # Postgres tends not to choke. - NUM_PARALLEL_PACKAGES=8 - NUM_PARALLEL_TESTS=16 + export TEST_NUM_PARALLEL_PACKAGES=8 + export TEST_NUM_PARALLEL_TESTS=16 # Only the CLI and Agent are officially supported on Windows and the rest are too flaky - PACKAGES="./cli/... ./enterprise/cli/... ./agent/..." - elif [ "${{ runner.os }}" == "macOS" ]; then + export TEST_PACKAGES="./cli/... ./enterprise/cli/... ./agent/..." + elif [ "${RUNNER_OS}" == "macOS" ]; then # Our macOS runners have 8 cores. We set NUM_PARALLEL_TESTS to 16 # because the tests complete faster and Postgres doesn't choke. It seems # that macOS's tmpfs is faster than the one on Windows. - NUM_PARALLEL_PACKAGES=8 - NUM_PARALLEL_TESTS=16 + export TEST_NUM_PARALLEL_PACKAGES=8 + export TEST_NUM_PARALLEL_TESTS=16 # Only the CLI and Agent are officially supported on macOS and the rest are too flaky - PACKAGES="./cli/... ./enterprise/cli/... ./agent/..." - elif [ "${{ runner.os }}" == "Linux" ]; then + export TEST_PACKAGES="./cli/... ./enterprise/cli/... ./agent/..." + elif [ "${RUNNER_OS}" == "Linux" ]; then # Our Linux runners have 8 cores. - NUM_PARALLEL_PACKAGES=8 - NUM_PARALLEL_TESTS=8 - PACKAGES="./..." + export TEST_NUM_PARALLEL_PACKAGES=8 + export TEST_NUM_PARALLEL_TESTS=8 fi # by default, run tests with cache - TESTCOUNT="" - if [ "${{ github.ref }}" == "refs/heads/main" ]; then + if [ "${GITHUB_REF}" == "refs/heads/main" ]; then # on main, run tests without cache - TESTCOUNT="-count=1" + export TEST_COUNT="1" fi mkdir -p "$RUNNER_TEMP/sym" @@ -480,10 +494,15 @@ jobs: # terraform gets installed in a random directory, so we need to normalize # the path to the terraform binary or a bunch of cached tests will be # invalidated. See scripts/normalize_path.sh for more details. - normalize_path_with_symlinks "$RUNNER_TEMP/sym" "$(dirname $(which terraform))" + normalize_path_with_symlinks "$RUNNER_TEMP/sym" "$(dirname "$(which terraform)")" - gotestsum --format standard-quiet --packages "$PACKAGES" \ - -- -timeout=20m -v -p $NUM_PARALLEL_PACKAGES -parallel=$NUM_PARALLEL_TESTS $TESTCOUNT + make test + + - name: Upload failed test db dumps + uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 + with: + name: failed-test-db-dump-${{matrix.os}} + path: "**/*.test.sql" - name: Upload Go Build Cache uses: ./.github/actions/test-cache/upload @@ -532,9 +551,10 @@ jobs: egress-policy: audit - name: Checkout - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0 with: fetch-depth: 1 + persist-credentials: false - name: Setup Go uses: ./.github/actions/setup-go @@ -580,9 +600,10 @@ jobs: egress-policy: audit - name: Checkout - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0 with: fetch-depth: 1 + persist-credentials: false - name: Setup Go uses: ./.github/actions/setup-go @@ -639,9 +660,10 @@ jobs: egress-policy: audit - name: Checkout - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0 with: fetch-depth: 1 + persist-credentials: false - name: Setup Go uses: ./.github/actions/setup-go @@ -665,14 +687,15 @@ jobs: egress-policy: audit - name: Checkout - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0 with: fetch-depth: 1 + persist-credentials: false - name: Setup Node uses: ./.github/actions/setup-node - - run: pnpm test:ci --max-workers $(nproc) + - run: pnpm test:ci --max-workers "$(nproc)" working-directory: site test-e2e: @@ -697,9 +720,10 @@ jobs: egress-policy: audit - name: Checkout - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0 with: fetch-depth: 1 + persist-credentials: false - name: Setup Node uses: ./.github/actions/setup-node @@ -768,12 +792,13 @@ jobs: egress-policy: audit - name: Checkout - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0 with: # 👇 Ensures Chromatic can read your full git history fetch-depth: 0 # 👇 Tells the checkout which commit hash to reference ref: ${{ github.event.pull_request.head.ref }} + persist-credentials: false - name: Setup Node uses: ./.github/actions/setup-node @@ -783,7 +808,7 @@ jobs: # the check to pass. This is desired in PRs, but not in mainline. - name: Publish to Chromatic (non-mainline) if: github.ref != 'refs/heads/main' && github.repository_owner == 'coder' - uses: chromaui/action@4d8ebd13658d795114f8051e25c28d66f14886c6 # v13.1.2 + uses: chromaui/action@58d9ffb36c90c97a02d061544ecc849cc4a242a9 # v13.1.3 env: NODE_OPTIONS: "--max_old_space_size=4096" STORYBOOK: true @@ -815,7 +840,7 @@ jobs: # infinitely "in progress" in mainline unless we re-review each build. - name: Publish to Chromatic (mainline) if: github.ref == 'refs/heads/main' && github.repository_owner == 'coder' - uses: chromaui/action@4d8ebd13658d795114f8051e25c28d66f14886c6 # v13.1.2 + uses: chromaui/action@58d9ffb36c90c97a02d061544ecc849cc4a242a9 # v13.1.3 env: NODE_OPTIONS: "--max_old_space_size=4096" STORYBOOK: true @@ -848,10 +873,11 @@ jobs: egress-policy: audit - name: Checkout - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0 with: # 0 is required here for version.sh to work. fetch-depth: 0 + persist-credentials: false - name: Setup Node uses: ./.github/actions/setup-node @@ -864,8 +890,8 @@ jobs: pushd /tmp/proto curl -L -o protoc.zip https://github.com/protocolbuffers/protobuf/releases/download/v23.4/protoc-23.4-linux-x86_64.zip unzip protoc.zip - cp -r ./bin/* /usr/local/bin - cp -r ./include /usr/local/bin/include + sudo cp -r ./bin/* /usr/local/bin + sudo cp -r ./include /usr/local/bin/include popd - name: Setup Go @@ -905,6 +931,7 @@ jobs: - test-e2e - offlinedocs - sqlc-vet + - check-build # Allow this job to run even if the needed jobs fail, are skipped or # cancelled. if: always() @@ -915,7 +942,7 @@ jobs: egress-policy: audit - name: Ensure required checks - run: | + run: | # zizmor: ignore[template-injection] We're just reading needs.x.result here, no risk of injection echo "Checking required checks" echo "- fmt: ${{ needs.fmt.result }}" echo "- lint: ${{ needs.lint.result }}" @@ -925,6 +952,7 @@ jobs: echo "- test-js: ${{ needs.test-js.result }}" echo "- test-e2e: ${{ needs.test-e2e.result }}" echo "- offlinedocs: ${{ needs.offlinedocs.result }}" + echo "- check-build: ${{ needs.check-build.result }}" echo # We allow skipped jobs to pass, but not failed or cancelled jobs. @@ -945,21 +973,24 @@ jobs: steps: # Harden Runner doesn't work on macOS - name: Checkout - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0 with: fetch-depth: 0 + persist-credentials: false - name: Setup build tools run: | brew install bash gnu-getopt make - echo "$(brew --prefix bash)/bin" >> $GITHUB_PATH - echo "$(brew --prefix gnu-getopt)/bin" >> $GITHUB_PATH - echo "$(brew --prefix make)/libexec/gnubin" >> $GITHUB_PATH + { + echo "$(brew --prefix bash)/bin" + echo "$(brew --prefix gnu-getopt)/bin" + echo "$(brew --prefix make)/libexec/gnubin" + } >> "$GITHUB_PATH" - name: Switch XCode Version uses: maxim-lobanov/setup-xcode@60606e260d2fc5762a71e64e74b2174e8ea3c8bd # v1.6.0 with: - xcode-version: "16.0.0" + xcode-version: "16.1.0" - name: Setup Go uses: ./.github/actions/setup-go @@ -1015,6 +1046,47 @@ jobs: if: ${{ github.repository_owner == 'coder' && github.ref == 'refs/heads/main' }} run: rm -f /tmp/{apple_cert.p12,apple_cert_password.txt,apple_apikey.p8} + check-build: + # This job runs make build to verify compilation on PRs. + # The build doesn't get signed, and is not suitable for usage, unlike the + # `build` job that runs on main. + needs: changes + if: needs.changes.outputs.go == 'true' && github.ref != 'refs/heads/main' + runs-on: ${{ github.repository_owner == 'coder' && 'depot-ubuntu-22.04-8' || 'ubuntu-latest' }} + steps: + - name: Harden Runner + uses: step-security/harden-runner@ec9f2d5744a09debf3a187a3f4f675c53b671911 # v2.13.0 + with: + egress-policy: audit + + - name: Checkout + uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0 + with: + fetch-depth: 0 + persist-credentials: false + + - name: Setup Node + uses: ./.github/actions/setup-node + + - name: Setup Go + uses: ./.github/actions/setup-go + + - name: Install go-winres + run: go install github.com/tc-hib/go-winres@d743268d7ea168077ddd443c4240562d4f5e8c3e # v0.3.3 + + - name: Install nfpm + run: go install github.com/goreleaser/nfpm/v2/cmd/nfpm@v2.35.1 + + - name: Install zstd + run: sudo apt-get install -y zstd + + - name: Build + run: | + set -euxo pipefail + go mod download + make gen/mark-fresh + make build + build: # This builds and publishes ghcr.io/coder/coder-preview:main for each commit # to main branch. @@ -1043,12 +1115,13 @@ jobs: egress-policy: audit - name: Checkout - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0 with: fetch-depth: 0 + persist-credentials: false - name: GHCR Login - uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0 + uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0 with: registry: ghcr.io username: ${{ github.actor }} @@ -1060,6 +1133,27 @@ jobs: - name: Setup Go uses: ./.github/actions/setup-go + - name: Install rcodesign + run: | + set -euo pipefail + wget -O /tmp/rcodesign.tar.gz https://github.com/indygreg/apple-platform-rs/releases/download/apple-codesign%2F0.22.0/apple-codesign-0.22.0-x86_64-unknown-linux-musl.tar.gz + sudo tar -xzf /tmp/rcodesign.tar.gz \ + -C /usr/bin \ + --strip-components=1 \ + apple-codesign-0.22.0-x86_64-unknown-linux-musl/rcodesign + rm /tmp/rcodesign.tar.gz + + - name: Setup Apple Developer certificate + run: | + set -euo pipefail + touch /tmp/{apple_cert.p12,apple_cert_password.txt} + chmod 600 /tmp/{apple_cert.p12,apple_cert_password.txt} + echo "$AC_CERTIFICATE_P12_BASE64" | base64 -d > /tmp/apple_cert.p12 + echo "$AC_CERTIFICATE_PASSWORD" > /tmp/apple_cert_password.txt + env: + AC_CERTIFICATE_P12_BASE64: ${{ secrets.AC_CERTIFICATE_P12_BASE64 }} + AC_CERTIFICATE_PASSWORD: ${{ secrets.AC_CERTIFICATE_PASSWORD }} + # Necessary for signing Windows binaries. - name: Setup Java uses: actions/setup-java@c5195efecf7bdfc987ee8bae7a71cb8b11521c00 # v4.7.1 @@ -1095,17 +1189,17 @@ jobs: # Setup GCloud for signing Windows binaries. - name: Authenticate to Google Cloud id: gcloud_auth - uses: google-github-actions/auth@140bb5113ffb6b65a7e9b937a81fa96cf5064462 # v2.1.11 + uses: google-github-actions/auth@b7593ed2efd1c1617e1b0254da33b86225adb2a5 # v2.1.12 with: - workload_identity_provider: ${{ secrets.GCP_CODE_SIGNING_WORKLOAD_ID_PROVIDER }} - service_account: ${{ secrets.GCP_CODE_SIGNING_SERVICE_ACCOUNT }} + workload_identity_provider: ${{ vars.GCP_CODE_SIGNING_WORKLOAD_ID_PROVIDER }} + service_account: ${{ vars.GCP_CODE_SIGNING_SERVICE_ACCOUNT }} token_format: "access_token" - name: Setup GCloud SDK - uses: google-github-actions/setup-gcloud@6a7c903a70c8625ed6700fa299f5ddb4ca6022e9 # v2.1.5 + uses: google-github-actions/setup-gcloud@cb1e50a9932213ecece00a606661ae9ca44f3397 # v2.2.0 - name: Download dylibs - uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0 + uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0 with: name: dylibs path: ./build @@ -1122,8 +1216,8 @@ jobs: go mod download version="$(./scripts/version.sh)" - tag="main-$(echo "$version" | sed 's/+/-/g')" - echo "tag=$tag" >> $GITHUB_OUTPUT + tag="main-${version//+/-}" + echo "tag=$tag" >> "$GITHUB_OUTPUT" make gen/mark-fresh make -j \ @@ -1138,6 +1232,9 @@ jobs: CODER_WINDOWS_RESOURCES: "1" CODER_SIGN_GPG: "1" CODER_GPG_RELEASE_KEY_BASE64: ${{ secrets.GPG_RELEASE_KEY_BASE64 }} + CODER_SIGN_DARWIN: "1" + AC_CERTIFICATE_FILE: /tmp/apple_cert.p12 + AC_CERTIFICATE_PASSWORD_FILE: /tmp/apple_cert_password.txt EV_KEY: ${{ secrets.EV_KEY }} EV_KEYSTORE: ${{ secrets.EV_KEYSTORE }} EV_TSA_URL: ${{ secrets.EV_TSA_URL }} @@ -1156,15 +1253,15 @@ jobs: # build Docker images for each architecture version="$(./scripts/version.sh)" - tag="main-$(echo "$version" | sed 's/+/-/g')" - echo "tag=$tag" >> $GITHUB_OUTPUT + tag="main-${version//+/-}" + echo "tag=$tag" >> "$GITHUB_OUTPUT" # build images for each architecture # note: omitting the -j argument to avoid race conditions when pushing make build/coder_"$version"_linux_{amd64,arm64,armv7}.tag # only push if we are on main branch - if [ "${{ github.ref }}" == "refs/heads/main" ]; then + if [ "${GITHUB_REF}" == "refs/heads/main" ]; then # build and push multi-arch manifest, this depends on the other images # being pushed so will automatically push them # note: omitting the -j argument to avoid race conditions when pushing @@ -1177,10 +1274,11 @@ jobs: # we are adding `latest` tag and keeping `main` for backward # compatibality for t in "${tags[@]}"; do + # shellcheck disable=SC2046 ./scripts/build_docker_multiarch.sh \ --push \ --target "ghcr.io/coder/coder-preview:$t" \ - --version $version \ + --version "$version" \ $(cat build/coder_"$version"_linux_{amd64,arm64,armv7}.tag) done fi @@ -1190,12 +1288,13 @@ jobs: continue-on-error: true env: COSIGN_EXPERIMENTAL: 1 + BUILD_TAG: ${{ steps.build-docker.outputs.tag }} run: | set -euxo pipefail # Define image base and tags IMAGE_BASE="ghcr.io/coder/coder-preview" - TAGS=("${{ steps.build-docker.outputs.tag }}" "main" "latest") + TAGS=("${BUILD_TAG}" "main" "latest") # Generate and attest SBOM for each tag for tag in "${TAGS[@]}"; do @@ -1334,7 +1433,7 @@ jobs: # Report attestation failures but don't fail the workflow - name: Check attestation status if: github.ref == 'refs/heads/main' - run: | + run: | # zizmor: ignore[template-injection] We're just reading steps.attest_x.outcome here, no risk of injection if [[ "${{ steps.attest_main.outcome }}" == "failure" ]]; then echo "::warning::GitHub attestation for main tag failed" fi @@ -1391,18 +1490,19 @@ jobs: egress-policy: audit - name: Checkout - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0 with: fetch-depth: 0 + persist-credentials: false - name: Authenticate to Google Cloud - uses: google-github-actions/auth@140bb5113ffb6b65a7e9b937a81fa96cf5064462 # v2.1.11 + uses: google-github-actions/auth@b7593ed2efd1c1617e1b0254da33b86225adb2a5 # v2.1.12 with: - workload_identity_provider: projects/573722524737/locations/global/workloadIdentityPools/github/providers/github - service_account: coder-ci@coder-dogfood.iam.gserviceaccount.com + workload_identity_provider: ${{ vars.GCP_WORKLOAD_ID_PROVIDER }} + service_account: ${{ vars.GCP_SERVICE_ACCOUNT }} - name: Set up Google Cloud SDK - uses: google-github-actions/setup-gcloud@6a7c903a70c8625ed6700fa299f5ddb4ca6022e9 # v2.1.5 + uses: google-github-actions/setup-gcloud@cb1e50a9932213ecece00a606661ae9ca44f3397 # v2.2.0 - name: Set up Flux CLI uses: fluxcd/flux2/action@6bf37f6a560fd84982d67f853162e4b3c2235edb # v2.6.4 @@ -1455,9 +1555,10 @@ jobs: egress-policy: audit - name: Checkout - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0 with: fetch-depth: 0 + persist-credentials: false - name: Setup flyctl uses: superfly/flyctl-actions/setup-flyctl@fc53c09e1bc3be6f54706524e3b82c4f462f77be # v1.5 @@ -1490,10 +1591,10 @@ jobs: egress-policy: audit - name: Checkout - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0 with: fetch-depth: 1 - # We need golang to run the migration main.go + persist-credentials: false - name: Setup Go uses: ./.github/actions/setup-go @@ -1529,15 +1630,15 @@ jobs: "fields": [ { "type": "mrkdwn", - "text": "*Workflow:*\n${{ github.workflow }}" + "text": "*Workflow:*\n'"${GITHUB_WORKFLOW}"'" }, { "type": "mrkdwn", - "text": "*Committer:*\n${{ github.actor }}" + "text": "*Committer:*\n'"${GITHUB_ACTOR}"'" }, { "type": "mrkdwn", - "text": "*Commit:*\n${{ github.sha }}" + "text": "*Commit:*\n'"${GITHUB_SHA}"'" } ] }, @@ -1545,8 +1646,18 @@ jobs: "type": "section", "text": { "type": "mrkdwn", - "text": "*View failure:* <${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}|Click here>" + "text": "*View failure:* <'"${RUN_URL}"'|Click here>" + } + }, + { + "type": "section", + "text": { + "type": "mrkdwn", + "text": "<@U08TJ4YNCA3> investigate this CI failure. Check logs, search for existing issues, use git blame to find who last modified failing tests, create issue in coder/internal (not public repo), use title format \"flake: TestName\" for flaky tests, and assign to the person from git blame." } } ] - }' ${{ secrets.CI_FAILURE_SLACK_WEBHOOK }} + }' "${SLACK_WEBHOOK}" + env: + SLACK_WEBHOOK: ${{ secrets.CI_FAILURE_SLACK_WEBHOOK }} + RUN_URL: "${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}" diff --git a/.github/workflows/contrib.yaml b/.github/workflows/contrib.yaml index 27dffe94f4000..e9c5c9ec2afd8 100644 --- a/.github/workflows/contrib.yaml +++ b/.github/workflows/contrib.yaml @@ -3,6 +3,7 @@ name: contrib on: issue_comment: types: [created, edited] + # zizmor: ignore[dangerous-triggers] We explicitly want to run on pull_request_target. pull_request_target: types: - opened diff --git a/.github/workflows/dependabot.yaml b/.github/workflows/dependabot.yaml index f86601096ae96..f95ae3fa810e6 100644 --- a/.github/workflows/dependabot.yaml +++ b/.github/workflows/dependabot.yaml @@ -15,7 +15,7 @@ jobs: github.event_name == 'pull_request' && github.event.action == 'opened' && github.event.pull_request.user.login == 'dependabot[bot]' && - github.actor_id == 49699333 && + github.event.pull_request.user.id == 49699333 && github.repository == 'coder/coder' permissions: pull-requests: write @@ -44,10 +44,6 @@ jobs: GH_TOKEN: ${{secrets.GITHUB_TOKEN}} - name: Send Slack notification - env: - PR_URL: ${{github.event.pull_request.html_url}} - PR_TITLE: ${{github.event.pull_request.title}} - PR_NUMBER: ${{github.event.pull_request.number}} run: | curl -X POST -H 'Content-type: application/json' \ --data '{ @@ -58,7 +54,7 @@ jobs: "type": "header", "text": { "type": "plain_text", - "text": ":pr-merged: Auto merge enabled for Dependabot PR #${{ env.PR_NUMBER }}", + "text": ":pr-merged: Auto merge enabled for Dependabot PR #'"${PR_NUMBER}"'", "emoji": true } }, @@ -67,7 +63,7 @@ jobs: "fields": [ { "type": "mrkdwn", - "text": "${{ env.PR_TITLE }}" + "text": "'"${PR_TITLE}"'" } ] }, @@ -80,9 +76,14 @@ jobs: "type": "plain_text", "text": "View PR" }, - "url": "${{ env.PR_URL }}" + "url": "'"${PR_URL}"'" } ] } ] - }' ${{ secrets.DEPENDABOT_PRS_SLACK_WEBHOOK }} + }' "${{ secrets.DEPENDABOT_PRS_SLACK_WEBHOOK }}" + env: + SLACK_WEBHOOK: ${{ secrets.DEPENDABOT_PRS_SLACK_WEBHOOK }} + PR_NUMBER: ${{ github.event.pull_request.number }} + PR_TITLE: ${{ github.event.pull_request.title }} + PR_URL: ${{ github.event.pull_request.html_url }} diff --git a/.github/workflows/docker-base.yaml b/.github/workflows/docker-base.yaml index bb45d4c0a0601..5c8fa142450bb 100644 --- a/.github/workflows/docker-base.yaml +++ b/.github/workflows/docker-base.yaml @@ -43,10 +43,12 @@ jobs: egress-policy: audit - name: Checkout - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0 + with: + persist-credentials: false - name: Docker login - uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0 + uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0 with: registry: ghcr.io username: ${{ github.actor }} diff --git a/.github/workflows/docs-ci.yaml b/.github/workflows/docs-ci.yaml index 39954783f1ba8..887db40660caf 100644 --- a/.github/workflows/docs-ci.yaml +++ b/.github/workflows/docs-ci.yaml @@ -23,12 +23,14 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0 + with: + persist-credentials: false - name: Setup Node uses: ./.github/actions/setup-node - - uses: tj-actions/changed-files@055970845dd036d7345da7399b7e89f2e10f2b04 # v45.0.7 + - uses: tj-actions/changed-files@f963b3f3562b00b6d2dd25efc390eb04e51ef6c6 # v45.0.7 id: changed-files with: files: | @@ -39,10 +41,16 @@ jobs: - name: lint if: steps.changed-files.outputs.any_changed == 'true' run: | - pnpm exec markdownlint-cli2 ${{ steps.changed-files.outputs.all_changed_files }} + # shellcheck disable=SC2086 + pnpm exec markdownlint-cli2 $ALL_CHANGED_FILES + env: + ALL_CHANGED_FILES: ${{ steps.changed-files.outputs.all_changed_files }} - name: fmt if: steps.changed-files.outputs.any_changed == 'true' run: | # markdown-table-formatter requires a space separated list of files - echo ${{ steps.changed-files.outputs.all_changed_files }} | tr ',' '\n' | pnpm exec markdown-table-formatter --check + # shellcheck disable=SC2086 + echo $ALL_CHANGED_FILES | tr ',' '\n' | pnpm exec markdown-table-formatter --check + env: + ALL_CHANGED_FILES: ${{ steps.changed-files.outputs.all_changed_files }} diff --git a/.github/workflows/dogfood.yaml b/.github/workflows/dogfood.yaml index bafdb5fb19767..119cd4fe85244 100644 --- a/.github/workflows/dogfood.yaml +++ b/.github/workflows/dogfood.yaml @@ -18,8 +18,7 @@ on: workflow_dispatch: permissions: - # Necessary for GCP authentication (https://github.com/google-github-actions/setup-gcloud#usage) - id-token: write + contents: read jobs: build_image: @@ -32,7 +31,9 @@ jobs: egress-policy: audit - name: Checkout - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0 + with: + persist-credentials: false - name: Setup Nix uses: nixbuild/nix-quick-install-action@63ca48f939ee3b8d835f4126562537df0fee5b91 # v32 @@ -62,15 +63,16 @@ jobs: - name: Get branch name id: branch-name - uses: tj-actions/branch-names@dde14ac574a8b9b1cedc59a1cf312788af43d8d8 # v8.2.1 + uses: tj-actions/branch-names@5250492686b253f06fa55861556d1027b067aeb5 # v9.0.2 - name: "Branch name to Docker tag name" id: docker-tag-name run: | - tag=${{ steps.branch-name.outputs.current_branch }} # Replace / with --, e.g. user/feature => user--feature. - tag=${tag//\//--} - echo "tag=${tag}" >> $GITHUB_OUTPUT + tag=${BRANCH_NAME//\//--} + echo "tag=${tag}" >> "$GITHUB_OUTPUT" + env: + BRANCH_NAME: ${{ steps.branch-name.outputs.current_branch }} - name: Set up Depot CLI uses: depot/setup-action@b0b1ea4f69e92ebf5dea3f8713a1b0c37b2126a5 # v1.6.0 @@ -80,7 +82,7 @@ jobs: - name: Login to DockerHub if: github.ref == 'refs/heads/main' - uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0 + uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0 with: username: ${{ secrets.DOCKERHUB_USERNAME }} password: ${{ secrets.DOCKERHUB_PASSWORD }} @@ -107,15 +109,20 @@ jobs: CURRENT_SYSTEM=$(nix eval --impure --raw --expr 'builtins.currentSystem') - docker image tag codercom/oss-dogfood-nix:latest-$CURRENT_SYSTEM codercom/oss-dogfood-nix:${{ steps.docker-tag-name.outputs.tag }} - docker image push codercom/oss-dogfood-nix:${{ steps.docker-tag-name.outputs.tag }} + docker image tag "codercom/oss-dogfood-nix:latest-$CURRENT_SYSTEM" "codercom/oss-dogfood-nix:${DOCKER_TAG}" + docker image push "codercom/oss-dogfood-nix:${DOCKER_TAG}" - docker image tag codercom/oss-dogfood-nix:latest-$CURRENT_SYSTEM codercom/oss-dogfood-nix:latest - docker image push codercom/oss-dogfood-nix:latest + docker image tag "codercom/oss-dogfood-nix:latest-$CURRENT_SYSTEM" "codercom/oss-dogfood-nix:latest" + docker image push "codercom/oss-dogfood-nix:latest" + env: + DOCKER_TAG: ${{ steps.docker-tag-name.outputs.tag }} deploy_template: needs: build_image runs-on: ubuntu-latest + permissions: + # Necessary for GCP authentication (https://github.com/google-github-actions/setup-gcloud#usage) + id-token: write steps: - name: Harden Runner uses: step-security/harden-runner@ec9f2d5744a09debf3a187a3f4f675c53b671911 # v2.13.0 @@ -123,16 +130,18 @@ jobs: egress-policy: audit - name: Checkout - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0 + with: + persist-credentials: false - name: Setup Terraform uses: ./.github/actions/setup-tf - name: Authenticate to Google Cloud - uses: google-github-actions/auth@140bb5113ffb6b65a7e9b937a81fa96cf5064462 # v2.1.11 + uses: google-github-actions/auth@b7593ed2efd1c1617e1b0254da33b86225adb2a5 # v2.1.12 with: - workload_identity_provider: projects/573722524737/locations/global/workloadIdentityPools/github/providers/github - service_account: coder-ci@coder-dogfood.iam.gserviceaccount.com + workload_identity_provider: ${{ vars.GCP_WORKLOAD_ID_PROVIDER }} + service_account: ${{ vars.GCP_SERVICE_ACCOUNT }} - name: Terraform init and validate run: | @@ -152,12 +161,12 @@ jobs: - name: Get short commit SHA if: github.ref == 'refs/heads/main' id: vars - run: echo "sha_short=$(git rev-parse --short HEAD)" >> $GITHUB_OUTPUT + run: echo "sha_short=$(git rev-parse --short HEAD)" >> "$GITHUB_OUTPUT" - name: Get latest commit title if: github.ref == 'refs/heads/main' id: message - run: echo "pr_title=$(git log --format=%s -n 1 ${{ github.sha }})" >> $GITHUB_OUTPUT + run: echo "pr_title=$(git log --format=%s -n 1 ${{ github.sha }})" >> "$GITHUB_OUTPUT" - name: "Push template" if: github.ref == 'refs/heads/main' @@ -169,6 +178,7 @@ jobs: CODER_URL: https://dev.coder.com CODER_SESSION_TOKEN: ${{ secrets.CODER_SESSION_TOKEN }} # Template source & details + TF_VAR_CODER_DOGFOOD_ANTHROPIC_API_KEY: ${{ secrets.CODER_DOGFOOD_ANTHROPIC_API_KEY }} TF_VAR_CODER_TEMPLATE_NAME: ${{ secrets.CODER_TEMPLATE_NAME }} TF_VAR_CODER_TEMPLATE_VERSION: ${{ steps.vars.outputs.sha_short }} TF_VAR_CODER_TEMPLATE_DIR: ./coder diff --git a/.github/workflows/nightly-gauntlet.yaml b/.github/workflows/nightly-gauntlet.yaml index a8e8fc957ee37..5769b3b652c44 100644 --- a/.github/workflows/nightly-gauntlet.yaml +++ b/.github/workflows/nightly-gauntlet.yaml @@ -27,7 +27,7 @@ jobs: - windows-2022 steps: - name: Harden Runner - uses: step-security/harden-runner@6c439dc8bdf85cadbbce9ed30d1c7b959517bc49 # v2.12.2 + uses: step-security/harden-runner@ec9f2d5744a09debf3a187a3f4f675c53b671911 # v2.13.0 with: egress-policy: audit @@ -37,6 +37,11 @@ jobs: - name: Disable Spotlight Indexing if: runner.os == 'macOS' run: | + enabled=$(sudo mdutil -a -s | { grep -Fc "Indexing enabled" || true; }) + if [ "$enabled" -eq 0 ]; then + echo "Spotlight indexing is already disabled" + exit 0 + fi sudo mdutil -a -i off sudo mdutil -X / sudo launchctl bootout system /System/Library/LaunchDaemons/com.apple.metadata.mds.plist @@ -45,12 +50,13 @@ jobs: # a separate repository to allow its use before actions/checkout. - name: Setup RAM Disks if: runner.os == 'Windows' - uses: coder/setup-ramdisk-action@e1100847ab2d7bcd9d14bcda8f2d1b0f07b36f1b + uses: coder/setup-ramdisk-action@e1100847ab2d7bcd9d14bcda8f2d1b0f07b36f1b # v0.1.0 - name: Checkout - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0 with: fetch-depth: 1 + persist-credentials: false - name: Setup Go uses: ./.github/actions/setup-go @@ -180,15 +186,15 @@ jobs: "fields": [ { "type": "mrkdwn", - "text": "*Workflow:*\n${{ github.workflow }}" + "text": "*Workflow:*\n'"${GITHUB_WORKFLOW}"'" }, { "type": "mrkdwn", - "text": "*Committer:*\n${{ github.actor }}" + "text": "*Committer:*\n'"${GITHUB_ACTOR}"'" }, { "type": "mrkdwn", - "text": "*Commit:*\n${{ github.sha }}" + "text": "*Commit:*\n'"${GITHUB_SHA}"'" } ] }, @@ -196,8 +202,18 @@ jobs: "type": "section", "text": { "type": "mrkdwn", - "text": "*View failure:* <${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}|Click here>" + "text": "*View failure:* <'"${RUN_URL}"'|Click here>" + } + }, + { + "type": "section", + "text": { + "type": "mrkdwn", + "text": "<@U08TJ4YNCA3> investigate this CI failure. Check logs, search for existing issues, use git blame to find who last modified failing tests, create issue in coder/internal (not public repo), use title format \"flake: TestName\" for flaky tests, and assign to the person from git blame." } } ] - }' ${{ secrets.CI_FAILURE_SLACK_WEBHOOK }} + }' "${SLACK_WEBHOOK}" + env: + SLACK_WEBHOOK: ${{ secrets.CI_FAILURE_SLACK_WEBHOOK }} + RUN_URL: "${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}" diff --git a/.github/workflows/pr-auto-assign.yaml b/.github/workflows/pr-auto-assign.yaml index 746b471f57b39..7e2f6441de383 100644 --- a/.github/workflows/pr-auto-assign.yaml +++ b/.github/workflows/pr-auto-assign.yaml @@ -3,6 +3,7 @@ name: PR Auto Assign on: + # zizmor: ignore[dangerous-triggers] We explicitly want to run on pull_request_target. pull_request_target: types: [opened] diff --git a/.github/workflows/pr-cleanup.yaml b/.github/workflows/pr-cleanup.yaml index 4c3023990efe5..32e260b112dea 100644 --- a/.github/workflows/pr-cleanup.yaml +++ b/.github/workflows/pr-cleanup.yaml @@ -27,10 +27,12 @@ jobs: id: pr_number run: | if [ -n "${{ github.event.pull_request.number }}" ]; then - echo "PR_NUMBER=${{ github.event.pull_request.number }}" >> $GITHUB_OUTPUT + echo "PR_NUMBER=${{ github.event.pull_request.number }}" >> "$GITHUB_OUTPUT" else - echo "PR_NUMBER=${{ github.event.inputs.pr_number }}" >> $GITHUB_OUTPUT + echo "PR_NUMBER=${PR_NUMBER}" >> "$GITHUB_OUTPUT" fi + env: + PR_NUMBER: ${{ github.event.inputs.pr_number }} - name: Delete image continue-on-error: true @@ -51,17 +53,21 @@ jobs: - name: Delete helm release run: | set -euo pipefail - helm delete --namespace "pr${{ steps.pr_number.outputs.PR_NUMBER }}" "pr${{ steps.pr_number.outputs.PR_NUMBER }}" || echo "helm release not found" + helm delete --namespace "pr${PR_NUMBER}" "pr${PR_NUMBER}" || echo "helm release not found" + env: + PR_NUMBER: ${{ steps.pr_number.outputs.PR_NUMBER }} - name: "Remove PR namespace" run: | - kubectl delete namespace "pr${{ steps.pr_number.outputs.PR_NUMBER }}" || echo "namespace not found" + kubectl delete namespace "pr${PR_NUMBER}" || echo "namespace not found" + env: + PR_NUMBER: ${{ steps.pr_number.outputs.PR_NUMBER }} - name: "Remove DNS records" run: | set -euo pipefail # Get identifier for the record - record_id=$(curl -X GET "https://api.cloudflare.com/client/v4/zones/${{ secrets.PR_DEPLOYMENTS_ZONE_ID }}/dns_records?name=%2A.pr${{ steps.pr_number.outputs.PR_NUMBER }}.${{ secrets.PR_DEPLOYMENTS_DOMAIN }}" \ + record_id=$(curl -X GET "https://api.cloudflare.com/client/v4/zones/${{ secrets.PR_DEPLOYMENTS_ZONE_ID }}/dns_records?name=%2A.pr${PR_NUMBER}.${{ secrets.PR_DEPLOYMENTS_DOMAIN }}" \ -H "Authorization: Bearer ${{ secrets.PR_DEPLOYMENTS_CLOUDFLARE_API_TOKEN }}" \ -H "Content-Type:application/json" | jq -r '.result[0].id') || echo "DNS record not found" @@ -73,9 +79,13 @@ jobs: -H "Authorization: Bearer ${{ secrets.PR_DEPLOYMENTS_CLOUDFLARE_API_TOKEN }}" \ -H "Content-Type:application/json" | jq -r '.success' ) || echo "DNS record not found" + env: + PR_NUMBER: ${{ steps.pr_number.outputs.PR_NUMBER }} - name: "Delete certificate" if: ${{ github.event.pull_request.merged == true }} run: | set -euxo pipefail - kubectl delete certificate "pr${{ steps.pr_number.outputs.PR_NUMBER }}-tls" -n pr-deployment-certs || echo "certificate not found" + kubectl delete certificate "pr${PR_NUMBER}-tls" -n pr-deployment-certs || echo "certificate not found" + env: + PR_NUMBER: ${{ steps.pr_number.outputs.PR_NUMBER }} diff --git a/.github/workflows/pr-deploy.yaml b/.github/workflows/pr-deploy.yaml index c82861db22094..ccf7511eafc78 100644 --- a/.github/workflows/pr-deploy.yaml +++ b/.github/workflows/pr-deploy.yaml @@ -44,7 +44,9 @@ jobs: egress-policy: audit - name: Checkout - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0 + with: + persist-credentials: false - name: Check if PR is open id: check_pr @@ -55,7 +57,7 @@ jobs: echo "PR doesn't exist or is closed." pr_open=false fi - echo "pr_open=$pr_open" >> $GITHUB_OUTPUT + echo "pr_open=$pr_open" >> "$GITHUB_OUTPUT" env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} @@ -79,9 +81,10 @@ jobs: egress-policy: audit - name: Checkout - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0 with: fetch-depth: 0 + persist-credentials: false - name: Get PR number, title, and branch name id: pr_info @@ -90,9 +93,11 @@ jobs: PR_NUMBER=$(gh pr view --json number | jq -r '.number') PR_TITLE=$(gh pr view --json title | jq -r '.title') PR_URL=$(gh pr view --json url | jq -r '.url') - echo "PR_URL=$PR_URL" >> $GITHUB_OUTPUT - echo "PR_NUMBER=$PR_NUMBER" >> $GITHUB_OUTPUT - echo "PR_TITLE=$PR_TITLE" >> $GITHUB_OUTPUT + { + echo "PR_URL=$PR_URL" + echo "PR_NUMBER=$PR_NUMBER" + echo "PR_TITLE=$PR_TITLE" + } >> "$GITHUB_OUTPUT" env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} @@ -100,8 +105,8 @@ jobs: id: set_tags run: | set -euo pipefail - echo "CODER_BASE_IMAGE_TAG=$CODER_BASE_IMAGE_TAG" >> $GITHUB_OUTPUT - echo "CODER_IMAGE_TAG=$CODER_IMAGE_TAG" >> $GITHUB_OUTPUT + echo "CODER_BASE_IMAGE_TAG=$CODER_BASE_IMAGE_TAG" >> "$GITHUB_OUTPUT" + echo "CODER_IMAGE_TAG=$CODER_IMAGE_TAG" >> "$GITHUB_OUTPUT" env: CODER_BASE_IMAGE_TAG: ghcr.io/coder/coder-preview-base:pr${{ steps.pr_info.outputs.PR_NUMBER }} CODER_IMAGE_TAG: ghcr.io/coder/coder-preview:pr${{ steps.pr_info.outputs.PR_NUMBER }} @@ -118,14 +123,16 @@ jobs: id: check_deployment run: | set -euo pipefail - if helm status "pr${{ steps.pr_info.outputs.PR_NUMBER }}" --namespace "pr${{ steps.pr_info.outputs.PR_NUMBER }}" > /dev/null 2>&1; then + if helm status "pr${PR_NUMBER}" --namespace "pr${PR_NUMBER}" > /dev/null 2>&1; then echo "Deployment already exists. Skipping deployment." NEW=false else echo "Deployment doesn't exist." NEW=true fi - echo "NEW=$NEW" >> $GITHUB_OUTPUT + echo "NEW=$NEW" >> "$GITHUB_OUTPUT" + env: + PR_NUMBER: ${{ steps.pr_info.outputs.PR_NUMBER }} - name: Check changed files uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3.0.2 @@ -154,17 +161,20 @@ jobs: - name: Print number of changed files run: | set -euo pipefail - echo "Total number of changed files: ${{ steps.filter.outputs.all_count }}" - echo "Number of ignored files: ${{ steps.filter.outputs.ignored_count }}" + echo "Total number of changed files: ${ALL_COUNT}" + echo "Number of ignored files: ${IGNORED_COUNT}" + env: + ALL_COUNT: ${{ steps.filter.outputs.all_count }} + IGNORED_COUNT: ${{ steps.filter.outputs.ignored_count }} - name: Build conditionals id: build_conditionals run: | set -euo pipefail # build if the workflow is manually triggered and the deployment doesn't exist (first build or force rebuild) - echo "first_or_force_build=${{ (github.event_name == 'workflow_dispatch' && steps.check_deployment.outputs.NEW == 'true') || github.event.inputs.build == 'true' }}" >> $GITHUB_OUTPUT + echo "first_or_force_build=${{ (github.event_name == 'workflow_dispatch' && steps.check_deployment.outputs.NEW == 'true') || github.event.inputs.build == 'true' }}" >> "$GITHUB_OUTPUT" # build if the deployment already exist and there are changes in the files that we care about (automatic updates) - echo "automatic_rebuild=${{ steps.check_deployment.outputs.NEW == 'false' && steps.filter.outputs.all_count > steps.filter.outputs.ignored_count }}" >> $GITHUB_OUTPUT + echo "automatic_rebuild=${{ steps.check_deployment.outputs.NEW == 'false' && steps.filter.outputs.all_count > steps.filter.outputs.ignored_count }}" >> "$GITHUB_OUTPUT" comment-pr: needs: get_info @@ -223,9 +233,10 @@ jobs: egress-policy: audit - name: Checkout - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0 with: fetch-depth: 0 + persist-credentials: false - name: Setup Node uses: ./.github/actions/setup-node @@ -237,7 +248,7 @@ jobs: uses: ./.github/actions/setup-sqlc - name: GHCR Login - uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0 + uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0 with: registry: ghcr.io username: ${{ github.actor }} @@ -250,12 +261,13 @@ jobs: make gen/mark-fresh export DOCKER_IMAGE_NO_PREREQUISITES=true version="$(./scripts/version.sh)" - export CODER_IMAGE_BUILD_BASE_TAG="$(CODER_IMAGE_BASE=coder-base ./scripts/image_tag.sh --version "$version")" + CODER_IMAGE_BUILD_BASE_TAG="$(CODER_IMAGE_BASE=coder-base ./scripts/image_tag.sh --version "$version")" + export CODER_IMAGE_BUILD_BASE_TAG make -j build/coder_linux_amd64 ./scripts/build_docker.sh \ --arch amd64 \ - --target ${{ env.CODER_IMAGE_TAG }} \ - --version $version \ + --target "${CODER_IMAGE_TAG}" \ + --version "$version" \ --push \ build/coder_linux_amd64 @@ -293,13 +305,13 @@ jobs: set -euo pipefail foundTag=$( gh api /orgs/coder/packages/container/coder-preview/versions | - jq -r --arg tag "pr${{ env.PR_NUMBER }}" '.[] | + jq -r --arg tag "pr${PR_NUMBER}" '.[] | select(.metadata.container.tags == [$tag]) | .metadata.container.tags[0]' ) if [ -z "$foundTag" ]; then echo "Image not found" - echo "${{ env.CODER_IMAGE_TAG }} not found in ghcr.io/coder/coder-preview" + echo "${CODER_IMAGE_TAG} not found in ghcr.io/coder/coder-preview" exit 1 else echo "Image found" @@ -314,40 +326,42 @@ jobs: curl -X POST "https://api.cloudflare.com/client/v4/zones/${{ secrets.PR_DEPLOYMENTS_ZONE_ID }}/dns_records" \ -H "Authorization: Bearer ${{ secrets.PR_DEPLOYMENTS_CLOUDFLARE_API_TOKEN }}" \ -H "Content-Type:application/json" \ - --data '{"type":"CNAME","name":"*.${{ env.PR_HOSTNAME }}","content":"${{ env.PR_HOSTNAME }}","ttl":1,"proxied":false}' + --data '{"type":"CNAME","name":"*.'"${PR_HOSTNAME}"'","content":"'"${PR_HOSTNAME}"'","ttl":1,"proxied":false}' - name: Create PR namespace if: needs.get_info.outputs.NEW == 'true' || github.event.inputs.deploy == 'true' run: | set -euo pipefail # try to delete the namespace, but don't fail if it doesn't exist - kubectl delete namespace "pr${{ env.PR_NUMBER }}" || true - kubectl create namespace "pr${{ env.PR_NUMBER }}" + kubectl delete namespace "pr${PR_NUMBER}" || true + kubectl create namespace "pr${PR_NUMBER}" - name: Checkout - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0 + with: + persist-credentials: false - name: Check and Create Certificate if: needs.get_info.outputs.NEW == 'true' || github.event.inputs.deploy == 'true' run: | # Using kubectl to check if a Certificate resource already exists # we are doing this to avoid letsenrypt rate limits - if ! kubectl get certificate pr${{ env.PR_NUMBER }}-tls -n pr-deployment-certs > /dev/null 2>&1; then + if ! kubectl get certificate "pr${PR_NUMBER}-tls" -n pr-deployment-certs > /dev/null 2>&1; then echo "Certificate doesn't exist. Creating a new one." envsubst < ./.github/pr-deployments/certificate.yaml | kubectl apply -f - else echo "Certificate exists. Skipping certificate creation." fi - echo "Copy certificate from pr-deployment-certs to pr${{ env.PR_NUMBER }} namespace" - until kubectl get secret pr${{ env.PR_NUMBER }}-tls -n pr-deployment-certs &> /dev/null + echo "Copy certificate from pr-deployment-certs to pr${PR_NUMBER} namespace" + until kubectl get secret "pr${PR_NUMBER}-tls" -n pr-deployment-certs &> /dev/null do - echo "Waiting for secret pr${{ env.PR_NUMBER }}-tls to be created..." + echo "Waiting for secret pr${PR_NUMBER}-tls to be created..." sleep 5 done ( - kubectl get secret pr${{ env.PR_NUMBER }}-tls -n pr-deployment-certs -o json | + kubectl get secret "pr${PR_NUMBER}-tls" -n pr-deployment-certs -o json | jq 'del(.metadata.namespace,.metadata.creationTimestamp,.metadata.resourceVersion,.metadata.selfLink,.metadata.uid,.metadata.managedFields)' | - kubectl -n pr${{ env.PR_NUMBER }} apply -f - + kubectl -n "pr${PR_NUMBER}" apply -f - ) - name: Set up PostgreSQL database @@ -355,13 +369,13 @@ jobs: run: | helm repo add bitnami https://charts.bitnami.com/bitnami helm install coder-db bitnami/postgresql \ - --namespace pr${{ env.PR_NUMBER }} \ + --namespace "pr${PR_NUMBER}" \ --set auth.username=coder \ --set auth.password=coder \ --set auth.database=coder \ --set persistence.size=10Gi - kubectl create secret generic coder-db-url -n pr${{ env.PR_NUMBER }} \ - --from-literal=url="postgres://coder:coder@coder-db-postgresql.pr${{ env.PR_NUMBER }}.svc.cluster.local:5432/coder?sslmode=disable" + kubectl create secret generic coder-db-url -n "pr${PR_NUMBER}" \ + --from-literal=url="postgres://coder:coder@coder-db-postgresql.pr${PR_NUMBER}.svc.cluster.local:5432/coder?sslmode=disable" - name: Create a service account, role, and rolebinding for the PR namespace if: needs.get_info.outputs.NEW == 'true' || github.event.inputs.deploy == 'true' @@ -383,8 +397,8 @@ jobs: run: | set -euo pipefail helm dependency update --skip-refresh ./helm/coder - helm upgrade --install "pr${{ env.PR_NUMBER }}" ./helm/coder \ - --namespace "pr${{ env.PR_NUMBER }}" \ + helm upgrade --install "pr${PR_NUMBER}" ./helm/coder \ + --namespace "pr${PR_NUMBER}" \ --values ./pr-deploy-values.yaml \ --force @@ -393,8 +407,8 @@ jobs: run: | helm repo add coder-logstream-kube https://helm.coder.com/logstream-kube helm upgrade --install coder-logstream-kube coder-logstream-kube/coder-logstream-kube \ - --namespace "pr${{ env.PR_NUMBER }}" \ - --set url="https://${{ env.PR_HOSTNAME }}" + --namespace "pr${PR_NUMBER}" \ + --set url="https://${PR_HOSTNAME}" - name: Get Coder binary if: needs.get_info.outputs.NEW == 'true' || github.event.inputs.deploy == 'true' @@ -402,16 +416,16 @@ jobs: set -euo pipefail DEST="${HOME}/coder" - URL="https://${{ env.PR_HOSTNAME }}/bin/coder-linux-amd64" + URL="https://${PR_HOSTNAME}/bin/coder-linux-amd64" - mkdir -p "$(dirname ${DEST})" + mkdir -p "$(dirname "$DEST")" COUNT=0 - until $(curl --output /dev/null --silent --head --fail "$URL"); do + until curl --output /dev/null --silent --head --fail "$URL"; do printf '.' sleep 5 COUNT=$((COUNT+1)) - if [ $COUNT -ge 60 ]; then + if [ "$COUNT" -ge 60 ]; then echo "Timed out waiting for URL to be available" exit 1 fi @@ -420,7 +434,7 @@ jobs: curl -fsSL "$URL" -o "${DEST}" chmod +x "${DEST}" "${DEST}" version - mv "${DEST}" /usr/local/bin/coder + sudo mv "${DEST}" /usr/local/bin/coder - name: Create first user if: needs.get_info.outputs.NEW == 'true' || github.event.inputs.deploy == 'true' @@ -435,24 +449,24 @@ jobs: # add mask so that the password is not printed to the logs echo "::add-mask::$password" - echo "password=$password" >> $GITHUB_OUTPUT + echo "password=$password" >> "$GITHUB_OUTPUT" coder login \ - --first-user-username pr${{ env.PR_NUMBER }}-admin \ - --first-user-email pr${{ env.PR_NUMBER }}@coder.com \ - --first-user-password $password \ + --first-user-username "pr${PR_NUMBER}-admin" \ + --first-user-email "pr${PR_NUMBER}@coder.com" \ + --first-user-password "$password" \ --first-user-trial=false \ --use-token-as-session \ - https://${{ env.PR_HOSTNAME }} + "https://${PR_HOSTNAME}" # Create a user for the github.actor # TODO: update once https://github.com/coder/coder/issues/15466 is resolved # coder users create \ - # --username ${{ github.actor }} \ + # --username ${GITHUB_ACTOR} \ # --login-type github # promote the user to admin role - # coder org members edit-role ${{ github.actor }} organization-admin + # coder org members edit-role ${GITHUB_ACTOR} organization-admin # TODO: update once https://github.com/coder/internal/issues/207 is resolved - name: Send Slack notification @@ -461,17 +475,19 @@ jobs: curl -s -o /dev/null -X POST -H 'Content-type: application/json' \ -d \ '{ - "pr_number": "'"${{ env.PR_NUMBER }}"'", - "pr_url": "'"${{ env.PR_URL }}"'", - "pr_title": "'"${{ env.PR_TITLE }}"'", - "pr_access_url": "'"https://${{ env.PR_HOSTNAME }}"'", - "pr_username": "'"pr${{ env.PR_NUMBER }}-admin"'", - "pr_email": "'"pr${{ env.PR_NUMBER }}@coder.com"'", - "pr_password": "'"${{ steps.setup_deployment.outputs.password }}"'", - "pr_actor": "'"${{ github.actor }}"'" + "pr_number": "'"${PR_NUMBER}"'", + "pr_url": "'"${PR_URL}"'", + "pr_title": "'"${PR_TITLE}"'", + "pr_access_url": "'"https://${PR_HOSTNAME}"'", + "pr_username": "'"pr${PR_NUMBER}-admin"'", + "pr_email": "'"pr${PR_NUMBER}@coder.com"'", + "pr_password": "'"${PASSWORD}"'", + "pr_actor": "'"${GITHUB_ACTOR}"'" }' \ ${{ secrets.PR_DEPLOYMENTS_SLACK_WEBHOOK }} echo "Slack notification sent" + env: + PASSWORD: ${{ steps.setup_deployment.outputs.password }} - name: Find Comment uses: peter-evans/find-comment@3eae4d37986fb5a8592848f6a574fdf654e61f9e # v3.1.0 @@ -504,7 +520,7 @@ jobs: run: | set -euo pipefail cd .github/pr-deployments/template - coder templates push -y --variable namespace=pr${{ env.PR_NUMBER }} kubernetes + coder templates push -y --variable "namespace=pr${PR_NUMBER}" kubernetes # Create workspace coder create --template="kubernetes" kube --parameter cpu=2 --parameter memory=4 --parameter home_disk_size=2 -y diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml index 9feaf72b938ff..f4f9c8f317664 100644 --- a/.github/workflows/release.yaml +++ b/.github/workflows/release.yaml @@ -32,15 +32,43 @@ env: CODER_RELEASE_NOTES: ${{ inputs.release_notes }} jobs: + # Only allow maintainers/admins to release. + check-perms: + runs-on: ${{ github.repository_owner == 'coder' && 'depot-ubuntu-22.04-8' || 'ubuntu-latest' }} + steps: + - name: Allow only maintainers/admins + uses: actions/github-script@v7.0.1 + with: + github-token: ${{ secrets.GITHUB_TOKEN }} + script: | + const {data} = await github.rest.repos.getCollaboratorPermissionLevel({ + owner: context.repo.owner, + repo: context.repo.repo, + username: context.actor + }); + const role = data.role_name || data.user?.role_name || data.permission; + const perms = data.user?.permissions || {}; + core.info(`Actor ${context.actor} permission=${data.permission}, role_name=${role}`); + + const allowed = + role === 'admin' || + role === 'maintain' || + perms.admin === true || + perms.maintain === true; + + if (!allowed) core.setFailed('Denied: requires maintain or admin'); + # build-dylib is a separate job to build the dylib on macOS. build-dylib: runs-on: ${{ github.repository_owner == 'coder' && 'depot-macos-latest' || 'macos-latest' }} + needs: check-perms steps: # Harden Runner doesn't work on macOS. - name: Checkout - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0 with: fetch-depth: 0 + persist-credentials: false # If the event that triggered the build was an annotated tag (which our # tags are supposed to be), actions/checkout has a bug where the tag in @@ -53,14 +81,16 @@ jobs: - name: Setup build tools run: | brew install bash gnu-getopt make - echo "$(brew --prefix bash)/bin" >> $GITHUB_PATH - echo "$(brew --prefix gnu-getopt)/bin" >> $GITHUB_PATH - echo "$(brew --prefix make)/libexec/gnubin" >> $GITHUB_PATH + { + echo "$(brew --prefix bash)/bin" + echo "$(brew --prefix gnu-getopt)/bin" + echo "$(brew --prefix make)/libexec/gnubin" + } >> "$GITHUB_PATH" - name: Switch XCode Version uses: maxim-lobanov/setup-xcode@60606e260d2fc5762a71e64e74b2174e8ea3c8bd # v1.6.0 with: - xcode-version: "16.0.0" + xcode-version: "16.1.0" - name: Setup Go uses: ./.github/actions/setup-go @@ -114,7 +144,7 @@ jobs: release: name: Build and publish - needs: build-dylib + needs: [build-dylib, check-perms] runs-on: ${{ github.repository_owner == 'coder' && 'depot-ubuntu-22.04-8' || 'ubuntu-latest' }} permissions: # Required to publish a release @@ -139,9 +169,10 @@ jobs: egress-policy: audit - name: Checkout - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0 with: fetch-depth: 0 + persist-credentials: false # If the event that triggered the build was an annotated tag (which our # tags are supposed to be), actions/checkout has a bug where the tag in @@ -156,9 +187,9 @@ jobs: run: | set -euo pipefail version="$(./scripts/version.sh)" - echo "version=$version" >> $GITHUB_OUTPUT + echo "version=$version" >> "$GITHUB_OUTPUT" # Speed up future version.sh calls. - echo "CODER_FORCE_VERSION=$version" >> $GITHUB_ENV + echo "CODER_FORCE_VERSION=$version" >> "$GITHUB_ENV" echo "$version" # Verify that all expectations for a release are met. @@ -200,7 +231,7 @@ jobs: release_notes_file="$(mktemp -t release_notes.XXXXXX)" echo "$CODER_RELEASE_NOTES" > "$release_notes_file" - echo CODER_RELEASE_NOTES_FILE="$release_notes_file" >> $GITHUB_ENV + echo CODER_RELEASE_NOTES_FILE="$release_notes_file" >> "$GITHUB_ENV" - name: Show release notes run: | @@ -208,7 +239,7 @@ jobs: cat "$CODER_RELEASE_NOTES_FILE" - name: Docker Login - uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0 + uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0 with: registry: ghcr.io username: ${{ github.actor }} @@ -286,17 +317,17 @@ jobs: # Setup GCloud for signing Windows binaries. - name: Authenticate to Google Cloud id: gcloud_auth - uses: google-github-actions/auth@140bb5113ffb6b65a7e9b937a81fa96cf5064462 # v2.1.11 + uses: google-github-actions/auth@b7593ed2efd1c1617e1b0254da33b86225adb2a5 # v2.1.12 with: - workload_identity_provider: ${{ secrets.GCP_CODE_SIGNING_WORKLOAD_ID_PROVIDER }} - service_account: ${{ secrets.GCP_CODE_SIGNING_SERVICE_ACCOUNT }} + workload_identity_provider: ${{ vars.GCP_CODE_SIGNING_WORKLOAD_ID_PROVIDER }} + service_account: ${{ vars.GCP_CODE_SIGNING_SERVICE_ACCOUNT }} token_format: "access_token" - name: Setup GCloud SDK - uses: google-github-actions/setup-gcloud@6a7c903a70c8625ed6700fa299f5ddb4ca6022e9 # v2.1.5 + uses: google-github-actions/setup-gcloud@cb1e50a9932213ecece00a606661ae9ca44f3397 # v2.2.0 - name: Download dylibs - uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0 + uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0 with: name: dylibs path: ./build @@ -350,9 +381,9 @@ jobs: set -euo pipefail if [[ "${CODER_RELEASE:-}" != *t* ]] || [[ "${CODER_DRY_RUN:-}" == *t* ]]; then # Empty value means use the default and avoid building a fresh one. - echo "tag=" >> $GITHUB_OUTPUT + echo "tag=" >> "$GITHUB_OUTPUT" else - echo "tag=$(CODER_IMAGE_BASE=ghcr.io/coder/coder-base ./scripts/image_tag.sh)" >> $GITHUB_OUTPUT + echo "tag=$(CODER_IMAGE_BASE=ghcr.io/coder/coder-base ./scripts/image_tag.sh)" >> "$GITHUB_OUTPUT" fi - name: Create empty base-build-context directory @@ -387,7 +418,7 @@ jobs: # available immediately for i in {1..10}; do rc=0 - raw_manifests=$(docker buildx imagetools inspect --raw "${{ steps.image-base-tag.outputs.tag }}") || rc=$? + raw_manifests=$(docker buildx imagetools inspect --raw "${IMAGE_TAG}") || rc=$? if [[ "$rc" -eq 0 ]]; then break fi @@ -409,6 +440,8 @@ jobs: echo "$manifests" | grep -q linux/amd64 echo "$manifests" | grep -q linux/arm64 echo "$manifests" | grep -q linux/arm/v7 + env: + IMAGE_TAG: ${{ steps.image-base-tag.outputs.tag }} # GitHub attestation provides SLSA provenance for Docker images, establishing a verifiable # record that these images were built in GitHub Actions with specific inputs and environment. @@ -476,7 +509,7 @@ jobs: # Save multiarch image tag for attestation multiarch_image="$(./scripts/image_tag.sh)" - echo "multiarch_image=${multiarch_image}" >> $GITHUB_OUTPUT + echo "multiarch_image=${multiarch_image}" >> "$GITHUB_OUTPUT" # For debugging, print all docker image tags docker images @@ -484,16 +517,15 @@ jobs: # if the current version is equal to the highest (according to semver) # version in the repo, also create a multi-arch image as ":latest" and # push it - created_latest_tag=false if [[ "$(git tag | grep '^v' | grep -vE '(rc|dev|-|\+|\/)' | sort -r --version-sort | head -n1)" == "v$(./scripts/version.sh)" ]]; then + # shellcheck disable=SC2046 ./scripts/build_docker_multiarch.sh \ --push \ --target "$(./scripts/image_tag.sh --version latest)" \ $(cat build/coder_"$version"_linux_{amd64,arm64,armv7}.tag) - created_latest_tag=true - echo "created_latest_tag=true" >> $GITHUB_OUTPUT + echo "created_latest_tag=true" >> "$GITHUB_OUTPUT" else - echo "created_latest_tag=false" >> $GITHUB_OUTPUT + echo "created_latest_tag=false" >> "$GITHUB_OUTPUT" fi env: CODER_BASE_IMAGE_TAG: ${{ steps.image-base-tag.outputs.tag }} @@ -501,24 +533,27 @@ jobs: - name: SBOM Generation and Attestation if: ${{ !inputs.dry_run }} env: - COSIGN_EXPERIMENTAL: "1" + COSIGN_EXPERIMENTAL: '1' + MULTIARCH_IMAGE: ${{ steps.build_docker.outputs.multiarch_image }} + VERSION: ${{ steps.version.outputs.version }} + CREATED_LATEST_TAG: ${{ steps.build_docker.outputs.created_latest_tag }} run: | set -euxo pipefail # Generate SBOM for multi-arch image with version in filename - echo "Generating SBOM for multi-arch image: ${{ steps.build_docker.outputs.multiarch_image }}" - syft "${{ steps.build_docker.outputs.multiarch_image }}" -o spdx-json > coder_${{ steps.version.outputs.version }}_sbom.spdx.json + echo "Generating SBOM for multi-arch image: ${MULTIARCH_IMAGE}" + syft "${MULTIARCH_IMAGE}" -o spdx-json > "coder_${VERSION}_sbom.spdx.json" # Attest SBOM to multi-arch image - echo "Attesting SBOM to multi-arch image: ${{ steps.build_docker.outputs.multiarch_image }}" - cosign clean --force=true "${{ steps.build_docker.outputs.multiarch_image }}" + echo "Attesting SBOM to multi-arch image: ${MULTIARCH_IMAGE}" + cosign clean --force=true "${MULTIARCH_IMAGE}" cosign attest --type spdxjson \ - --predicate coder_${{ steps.version.outputs.version }}_sbom.spdx.json \ + --predicate "coder_${VERSION}_sbom.spdx.json" \ --yes \ - "${{ steps.build_docker.outputs.multiarch_image }}" + "${MULTIARCH_IMAGE}" # If latest tag was created, also attest it - if [[ "${{ steps.build_docker.outputs.created_latest_tag }}" == "true" ]]; then + if [[ "${CREATED_LATEST_TAG}" == "true" ]]; then latest_tag="$(./scripts/image_tag.sh --version latest)" echo "Generating SBOM for latest image: ${latest_tag}" syft "${latest_tag}" -o spdx-json > coder_latest_sbom.spdx.json @@ -572,7 +607,7 @@ jobs: - name: Get latest tag name id: latest_tag if: ${{ !inputs.dry_run && steps.build_docker.outputs.created_latest_tag == 'true' }} - run: echo "tag=$(./scripts/image_tag.sh --version latest)" >> $GITHUB_OUTPUT + run: echo "tag=$(./scripts/image_tag.sh --version latest)" >> "$GITHUB_OUTPUT" # If this is the highest version according to semver, also attest the "latest" tag - name: GitHub Attestation for "latest" Docker image @@ -615,7 +650,7 @@ jobs: # Report attestation failures but don't fail the workflow - name: Check attestation status if: ${{ !inputs.dry_run }} - run: | + run: | # zizmor: ignore[template-injection] We're just reading steps.attest_x.outcome here, no risk of injection if [[ "${{ steps.attest_base.outcome }}" == "failure" && "${{ steps.attest_base.conclusion }}" != "skipped" ]]; then echo "::warning::GitHub attestation for base image failed" fi @@ -635,27 +670,28 @@ jobs: run: ls -lh build - name: Publish Coder CLI binaries and detached signatures to GCS - if: ${{ !inputs.dry_run && github.ref == 'refs/heads/main' && github.repository_owner == 'coder'}} + if: ${{ !inputs.dry_run }} run: | set -euxo pipefail version="$(./scripts/version.sh)" - binaries=( - "coder-darwin-amd64" - "coder-darwin-arm64" - "coder-linux-amd64" - "coder-linux-arm64" - "coder-linux-armv7" - "coder-windows-amd64.exe" - "coder-windows-arm64.exe" - ) - - for binary in "${binaries[@]}"; do - detached_signature="${binary}.asc" - gcloud storage cp "./site/out/bin/${binary}" "gs://releases.coder.com/coder-cli/${version}/${binary}" - gcloud storage cp "./site/out/bin/${detached_signature}" "gs://releases.coder.com/coder-cli/${version}/${detached_signature}" - done + # Source array of slim binaries + declare -A binaries + binaries["coder-darwin-amd64"]="coder-slim_${version}_darwin_amd64" + binaries["coder-darwin-arm64"]="coder-slim_${version}_darwin_arm64" + binaries["coder-linux-amd64"]="coder-slim_${version}_linux_amd64" + binaries["coder-linux-arm64"]="coder-slim_${version}_linux_arm64" + binaries["coder-linux-armv7"]="coder-slim_${version}_linux_armv7" + binaries["coder-windows-amd64.exe"]="coder-slim_${version}_windows_amd64.exe" + binaries["coder-windows-arm64.exe"]="coder-slim_${version}_windows_arm64.exe" + + for cli_name in "${!binaries[@]}"; do + slim_binary="${binaries[$cli_name]}" + detached_signature="${slim_binary}.asc" + gcloud storage cp "./build/${slim_binary}" "gs://releases.coder.com/coder-cli/${version}/${cli_name}" + gcloud storage cp "./build/${detached_signature}" "gs://releases.coder.com/coder-cli/${version}/${cli_name}.asc" + done - name: Publish release run: | @@ -679,11 +715,11 @@ jobs: ./build/*.apk ./build/*.deb ./build/*.rpm - ./coder_${{ steps.version.outputs.version }}_sbom.spdx.json + "./coder_${VERSION}_sbom.spdx.json" ) # Only include the latest SBOM file if it was created - if [[ "${{ steps.build_docker.outputs.created_latest_tag }}" == "true" ]]; then + if [[ "${CREATED_LATEST_TAG}" == "true" ]]; then files+=(./coder_latest_sbom.spdx.json) fi @@ -694,15 +730,17 @@ jobs: env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} CODER_GPG_RELEASE_KEY_BASE64: ${{ secrets.GPG_RELEASE_KEY_BASE64 }} + VERSION: ${{ steps.version.outputs.version }} + CREATED_LATEST_TAG: ${{ steps.build_docker.outputs.created_latest_tag }} - name: Authenticate to Google Cloud - uses: google-github-actions/auth@140bb5113ffb6b65a7e9b937a81fa96cf5064462 # v2.1.11 + uses: google-github-actions/auth@b7593ed2efd1c1617e1b0254da33b86225adb2a5 # v2.1.12 with: - workload_identity_provider: ${{ secrets.GCP_WORKLOAD_ID_PROVIDER }} - service_account: ${{ secrets.GCP_SERVICE_ACCOUNT }} + workload_identity_provider: ${{ vars.GCP_WORKLOAD_ID_PROVIDER }} + service_account: ${{ vars.GCP_SERVICE_ACCOUNT }} - name: Setup GCloud SDK - uses: google-github-actions/setup-gcloud@6a7c903a70c8625ed6700fa299f5ddb4ca6022e9 # 2.1.5 + uses: google-github-actions/setup-gcloud@cb1e50a9932213ecece00a606661ae9ca44f3397 # 2.2.0 - name: Publish Helm Chart if: ${{ !inputs.dry_run }} @@ -714,12 +752,12 @@ jobs: cp "build/provisioner_helm_${version}.tgz" build/helm gsutil cp gs://helm.coder.com/v2/index.yaml build/helm/index.yaml helm repo index build/helm --url https://helm.coder.com/v2 --merge build/helm/index.yaml - gsutil -h "Cache-Control:no-cache,max-age=0" cp build/helm/coder_helm_${version}.tgz gs://helm.coder.com/v2 - gsutil -h "Cache-Control:no-cache,max-age=0" cp build/helm/provisioner_helm_${version}.tgz gs://helm.coder.com/v2 - gsutil -h "Cache-Control:no-cache,max-age=0" cp build/helm/index.yaml gs://helm.coder.com/v2 - gsutil -h "Cache-Control:no-cache,max-age=0" cp helm/artifacthub-repo.yml gs://helm.coder.com/v2 - helm push build/coder_helm_${version}.tgz oci://ghcr.io/coder/chart - helm push build/provisioner_helm_${version}.tgz oci://ghcr.io/coder/chart + gsutil -h "Cache-Control:no-cache,max-age=0" cp "build/helm/coder_helm_${version}.tgz" gs://helm.coder.com/v2 + gsutil -h "Cache-Control:no-cache,max-age=0" cp "build/helm/provisioner_helm_${version}.tgz" gs://helm.coder.com/v2 + gsutil -h "Cache-Control:no-cache,max-age=0" cp "build/helm/index.yaml" gs://helm.coder.com/v2 + gsutil -h "Cache-Control:no-cache,max-age=0" cp "helm/artifacthub-repo.yml" gs://helm.coder.com/v2 + helm push "build/coder_helm_${version}.tgz" oci://ghcr.io/coder/chart + helm push "build/provisioner_helm_${version}.tgz" oci://ghcr.io/coder/chart - name: Upload artifacts to actions (if dry-run) if: ${{ inputs.dry_run }} @@ -770,12 +808,12 @@ jobs: - name: Update homebrew env: - # Variables used by the `gh` command GH_REPO: coder/homebrew-coder GH_TOKEN: ${{ secrets.CDRCI_GITHUB_TOKEN }} + VERSION: ${{ needs.release.outputs.version }} run: | # Keep version number around for reference, removing any potential leading v - coder_version="$(echo "${{ needs.release.outputs.version }}" | tr -d v)" + coder_version="$(echo "${VERSION}" | tr -d v)" set -euxo pipefail @@ -794,9 +832,9 @@ jobs: wget "$checksums_url" -O checksums.txt # Get the SHAs - darwin_arm_sha="$(cat checksums.txt | grep "darwin_arm64.zip" | awk '{ print $1 }')" - darwin_intel_sha="$(cat checksums.txt | grep "darwin_amd64.zip" | awk '{ print $1 }')" - linux_sha="$(cat checksums.txt | grep "linux_amd64.tar.gz" | awk '{ print $1 }')" + darwin_arm_sha="$(grep "darwin_arm64.zip" checksums.txt | awk '{ print $1 }')" + darwin_intel_sha="$(grep "darwin_amd64.zip" checksums.txt | awk '{ print $1 }')" + linux_sha="$(grep "linux_amd64.tar.gz" checksums.txt | awk '{ print $1 }')" echo "macOS arm64: $darwin_arm_sha" echo "macOS amd64: $darwin_intel_sha" @@ -809,7 +847,7 @@ jobs: # Check if a PR already exists. pr_count="$(gh pr list --search "head:$brew_branch" --json id,closed | jq -r ".[] | select(.closed == false) | .id" | wc -l)" - if [[ "$pr_count" > 0 ]]; then + if [ "$pr_count" -gt 0 ]; then echo "Bailing out as PR already exists" 2>&1 exit 0 fi @@ -828,8 +866,8 @@ jobs: -B master -H "$brew_branch" \ -t "coder $coder_version" \ -b "" \ - -r "${{ github.actor }}" \ - -a "${{ github.actor }}" \ + -r "${GITHUB_ACTOR}" \ + -a "${GITHUB_ACTOR}" \ -b "This automatic PR was triggered by the release of Coder v$coder_version" publish-winget: @@ -850,9 +888,10 @@ jobs: GH_TOKEN: ${{ secrets.CDRCI_GITHUB_TOKEN }} - name: Checkout - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0 with: fetch-depth: 0 + persist-credentials: false # If the event that triggered the build was an annotated tag (which our # tags are supposed to be), actions/checkout has a bug where the tag in @@ -871,7 +910,7 @@ jobs: # The package version is the same as the tag minus the leading "v". # The version in this output already has the leading "v" removed but # we do it again to be safe. - $version = "${{ needs.release.outputs.version }}".Trim('v') + $version = $env:VERSION.Trim('v') $release_assets = gh release view --repo coder/coder "v${version}" --json assets | ` ConvertFrom-Json @@ -903,13 +942,14 @@ jobs: # For wingetcreate. We need a real token since we're pushing a commit # to GitHub and then making a PR in a different repo. WINGET_GH_TOKEN: ${{ secrets.CDRCI_GITHUB_TOKEN }} + VERSION: ${{ needs.release.outputs.version }} - name: Comment on PR run: | # wait 30 seconds Start-Sleep -Seconds 30.0 # Find the PR that wingetcreate just made. - $version = "${{ needs.release.outputs.version }}".Trim('v') + $version = $env:VERSION.Trim('v') $pr_list = gh pr list --repo microsoft/winget-pkgs --search "author:cdrci Coder.Coder version ${version}" --limit 1 --json number | ` ConvertFrom-Json $pr_number = $pr_list[0].number @@ -920,6 +960,7 @@ jobs: # For gh CLI. We need a real token since we're commenting on a PR in a # different repo. GH_TOKEN: ${{ secrets.CDRCI_GITHUB_TOKEN }} + VERSION: ${{ needs.release.outputs.version }} # publish-sqlc pushes the latest schema to sqlc cloud. # At present these pushes cannot be tagged, so the last push is always the latest. @@ -935,9 +976,10 @@ jobs: egress-policy: audit - name: Checkout - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0 with: fetch-depth: 1 + persist-credentials: false # We need golang to run the migration main.go - name: Setup Go diff --git a/.github/workflows/scorecard.yml b/.github/workflows/scorecard.yml index 1e5104310e085..87e9e6271c6ac 100644 --- a/.github/workflows/scorecard.yml +++ b/.github/workflows/scorecard.yml @@ -25,7 +25,7 @@ jobs: egress-policy: audit - name: "Checkout code" - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0 with: persist-credentials: false @@ -47,6 +47,6 @@ jobs: # Upload the results to GitHub's code scanning dashboard. - name: "Upload to code-scanning" - uses: github/codeql-action/upload-sarif@d6bbdef45e766d081b84a2def353b0055f728d3e # v3.29.3 + uses: github/codeql-action/upload-sarif@76621b61decf072c1cee8dd1ce2d2a82d33c17ed # v3.29.5 with: sarif_file: results.sarif diff --git a/.github/workflows/security.yaml b/.github/workflows/security.yaml index d31595c3a8465..e7fde82bf1dce 100644 --- a/.github/workflows/security.yaml +++ b/.github/workflows/security.yaml @@ -32,13 +32,15 @@ jobs: egress-policy: audit - name: Checkout - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0 + with: + persist-credentials: false - name: Setup Go uses: ./.github/actions/setup-go - name: Initialize CodeQL - uses: github/codeql-action/init@d6bbdef45e766d081b84a2def353b0055f728d3e # v3.29.3 + uses: github/codeql-action/init@76621b61decf072c1cee8dd1ce2d2a82d33c17ed # v3.29.5 with: languages: go, javascript @@ -48,7 +50,7 @@ jobs: rm Makefile - name: Perform CodeQL Analysis - uses: github/codeql-action/analyze@d6bbdef45e766d081b84a2def353b0055f728d3e # v3.29.3 + uses: github/codeql-action/analyze@76621b61decf072c1cee8dd1ce2d2a82d33c17ed # v3.29.5 - name: Send Slack notification on failure if: ${{ failure() }} @@ -72,9 +74,10 @@ jobs: egress-policy: audit - name: Checkout - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0 with: fetch-depth: 0 + persist-credentials: false - name: Setup Go uses: ./.github/actions/setup-go @@ -134,12 +137,13 @@ jobs: # This environment variables forces scripts/build_docker.sh to build # the base image tag locally instead of using the cached version from # the registry. - export CODER_IMAGE_BUILD_BASE_TAG="$(CODER_IMAGE_BASE=coder-base ./scripts/image_tag.sh --version "$version")" + CODER_IMAGE_BUILD_BASE_TAG="$(CODER_IMAGE_BASE=coder-base ./scripts/image_tag.sh --version "$version")" + export CODER_IMAGE_BUILD_BASE_TAG # We would like to use make -j here, but it doesn't work with the some recent additions # to our code generation. make "$image_job" - echo "image=$(cat "$image_job")" >> $GITHUB_OUTPUT + echo "image=$(cat "$image_job")" >> "$GITHUB_OUTPUT" - name: Run Trivy vulnerability scanner uses: aquasecurity/trivy-action@dc5a429b52fcf669ce959baa2c2dd26090d2a6c4 @@ -150,7 +154,7 @@ jobs: severity: "CRITICAL,HIGH" - name: Upload Trivy scan results to GitHub Security tab - uses: github/codeql-action/upload-sarif@d6bbdef45e766d081b84a2def353b0055f728d3e # v3.29.3 + uses: github/codeql-action/upload-sarif@76621b61decf072c1cee8dd1ce2d2a82d33c17ed # v3.29.5 with: sarif_file: trivy-results.sarif category: "Trivy" diff --git a/.github/workflows/stale.yaml b/.github/workflows/stale.yaml index 00d7eef888833..27ec157fa0f3f 100644 --- a/.github/workflows/stale.yaml +++ b/.github/workflows/stale.yaml @@ -101,7 +101,9 @@ jobs: egress-policy: audit - name: Checkout repository - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0 + with: + persist-credentials: false - name: Run delete-old-branches-action uses: beatlabs/delete-old-branches-action@4eeeb8740ff8b3cb310296ddd6b43c3387734588 # v0.0.11 with: diff --git a/.github/workflows/typos.toml b/.github/workflows/typos.toml index 6a9b07b475111..6f475668118c9 100644 --- a/.github/workflows/typos.toml +++ b/.github/workflows/typos.toml @@ -28,6 +28,7 @@ HELO = "HELO" LKE = "LKE" byt = "byt" typ = "typ" +Inferrable = "Inferrable" [files] extend-exclude = [ @@ -47,5 +48,5 @@ extend-exclude = [ "provisioner/terraform/testdata/**", # notifications' golden files confuse the detector because of quoted-printable encoding "coderd/notifications/testdata/**", - "agent/agentcontainers/testdata/devcontainercli/**" + "agent/agentcontainers/testdata/devcontainercli/**", ] diff --git a/.github/workflows/weekly-docs.yaml b/.github/workflows/weekly-docs.yaml index dd83a5629ca83..56f5e799305e8 100644 --- a/.github/workflows/weekly-docs.yaml +++ b/.github/workflows/weekly-docs.yaml @@ -26,7 +26,9 @@ jobs: egress-policy: audit - name: Checkout - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0 + with: + persist-credentials: false - name: Check Markdown links uses: umbrelladocs/action-linkspector@874d01cae9fd488e3077b08952093235bd626977 # v1.3.7 @@ -41,7 +43,10 @@ jobs: - name: Send Slack notification if: failure() && github.event_name == 'schedule' run: | - curl -X POST -H 'Content-type: application/json' -d '{"msg":"Broken links found in the documentation. Please check the logs at ${{ env.LOGS_URL }}"}' ${{ secrets.DOCS_LINK_SLACK_WEBHOOK }} + curl \ + -X POST \ + -H 'Content-type: application/json' \ + -d '{"msg":"Broken links found in the documentation. Please check the logs at '"${LOGS_URL}"'"}' "${{ secrets.DOCS_LINK_SLACK_WEBHOOK }}" echo "Sent Slack notification" env: LOGS_URL: https://github.com/coder/coder/actions/runs/${{ github.run_id }} diff --git a/.vscode/settings.json b/.vscode/settings.json index f2cf72b7d8ae0..7fef4af975bc2 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -49,7 +49,7 @@ "[javascript][javascriptreact][json][jsonc][typescript][typescriptreact]": { "editor.defaultFormatter": "biomejs.biome", "editor.codeActionsOnSave": { - "quickfix.biome": "explicit" + "source.fixAll.biome": "explicit" // "source.organizeImports.biome": "explicit" } }, diff --git a/AGENTS.md b/AGENTS.md new file mode 120000 index 0000000000000..681311eb9cf45 --- /dev/null +++ b/AGENTS.md @@ -0,0 +1 @@ +CLAUDE.md \ No newline at end of file diff --git a/CODEOWNERS b/CODEOWNERS index 4152e5351a4fb..fde24a9d874ed 100644 --- a/CODEOWNERS +++ b/CODEOWNERS @@ -7,7 +7,6 @@ tailnet/proto/ @spikecurtis @johnstcn vpn/vpn.proto @spikecurtis @johnstcn vpn/version.go @spikecurtis @johnstcn - # This caching code is particularly tricky, and one must be very careful when # altering it. coderd/files/ @aslilac @@ -17,3 +16,26 @@ coderd/rbac/ @Emyrk # Mainly dependent on coder/guts, which is maintained by @Emyrk scripts/apitypings/ @Emyrk +scripts/gensite/ @aslilac + +site/ @aslilac @Parkreiner +site/src/hooks/ @Parkreiner +# These rules intentionally do not specify any owners. More specific rules +# override less specific rules, so these files are "ignored" by the site/ rule. +site/e2e/google/protobuf/timestampGenerated.ts +site/e2e/provisionerGenerated.ts +site/src/api/countriesGenerated.ts +site/src/api/rbacresourcesGenerated.ts +site/src/api/typesGenerated.ts +site/src/testHelpers/entities.ts +site/CLAUDE.md + +# The blood and guts of the autostop algorithm, which is quite complex and +# requires elite ball knowledge of most of the scheduling code to make changes +# without inadvertently affecting other parts of the codebase. +coderd/schedule/autostop.go @deansheather @DanielleMaywood + +# Usage tracking code requires intimate knowledge of Tallyman and Metronome, as +# well as guidance from revenue. +coderd/usage/ @deansheather @spikecurtis +enterprise/coderd/usage/ @deansheather @spikecurtis diff --git a/Makefile b/Makefile index bd3f04a4874cd..3974966836881 100644 --- a/Makefile +++ b/Makefile @@ -559,7 +559,9 @@ else endif .PHONY: fmt/markdown -lint: lint/shellcheck lint/go lint/ts lint/examples lint/helm lint/site-icons lint/markdown +# Note: we don't run zizmor in the lint target because it takes a while. CI +# runs it explicitly. +lint: lint/shellcheck lint/go lint/ts lint/examples lint/helm lint/site-icons lint/markdown lint/actions/actionlint .PHONY: lint lint/site-icons: @@ -576,6 +578,7 @@ lint/go: ./scripts/check_codersdk_imports.sh linter_ver=$(shell egrep -o 'GOLANGCI_LINT_VERSION=\S+' dogfood/coder/Dockerfile | cut -d '=' -f 2) go run github.com/golangci/golangci-lint/cmd/golangci-lint@v$$linter_ver run + go run github.com/coder/paralleltestctx/cmd/paralleltestctx@v0.0.1 -custom-funcs="testutil.Context" ./... .PHONY: lint/go lint/examples: @@ -597,6 +600,20 @@ lint/markdown: node_modules/.installed pnpm lint-docs .PHONY: lint/markdown +lint/actions: lint/actions/actionlint lint/actions/zizmor +.PHONY: lint/actions + +lint/actions/actionlint: + go run github.com/rhysd/actionlint/cmd/actionlint@v1.7.7 +.PHONY: lint/actions/actionlint + +lint/actions/zizmor: + ./scripts/zizmor.sh \ + --strict-collection \ + --persona=regular \ + . +.PHONY: lint/actions/zizmor + # All files generated by the database should be added here, and this can be used # as a target for jobs that need to run after the database is generated. DB_GEN_FILES := \ @@ -635,7 +652,8 @@ GEN_FILES := \ coderd/database/pubsub/psmock/psmock.go \ agent/agentcontainers/acmock/acmock.go \ agent/agentcontainers/dcspec/dcspec_gen.go \ - coderd/httpmw/loggermw/loggermock/loggermock.go + coderd/httpmw/loggermw/loggermock/loggermock.go \ + codersdk/workspacesdk/agentconnmock/agentconnmock.go # all gen targets should be added here and to gen/mark-fresh gen: gen/db gen/golden-files $(GEN_FILES) @@ -685,6 +703,7 @@ gen/mark-fresh: agent/agentcontainers/acmock/acmock.go \ agent/agentcontainers/dcspec/dcspec_gen.go \ coderd/httpmw/loggermw/loggermock/loggermock.go \ + codersdk/workspacesdk/agentconnmock/agentconnmock.go \ " for file in $$files; do @@ -728,6 +747,10 @@ coderd/httpmw/loggermw/loggermock/loggermock.go: coderd/httpmw/loggermw/logger.g go generate ./coderd/httpmw/loggermw/loggermock/ touch "$@" +codersdk/workspacesdk/agentconnmock/agentconnmock.go: codersdk/workspacesdk/agentconn.go + go generate ./codersdk/workspacesdk/agentconnmock/ + touch "$@" + agent/agentcontainers/dcspec/dcspec_gen.go: \ node_modules/.installed \ agent/agentcontainers/dcspec/devContainer.base.schema.json \ @@ -935,12 +958,31 @@ else GOTESTSUM_RETRY_FLAGS := endif +# default to 8x8 parallelism to avoid overwhelming our workspaces. Hopefully we can remove these defaults +# when we get our test suite's resource utilization under control. +GOTEST_FLAGS := -v -p $(or $(TEST_NUM_PARALLEL_PACKAGES),"8") -parallel=$(or $(TEST_NUM_PARALLEL_TESTS),"8") + +# The most common use is to set TEST_COUNT=1 to avoid Go's test cache. +ifdef TEST_COUNT +GOTEST_FLAGS += -count=$(TEST_COUNT) +endif + +ifdef TEST_SHORT +GOTEST_FLAGS += -short +endif + +ifdef RUN +GOTEST_FLAGS += -run $(RUN) +endif + +TEST_PACKAGES ?= ./... + test: - $(GIT_FLAGS) gotestsum --format standard-quiet $(GOTESTSUM_RETRY_FLAGS) --packages="./..." -- -v -short -count=1 $(if $(RUN),-run $(RUN)) + $(GIT_FLAGS) gotestsum --format standard-quiet $(GOTESTSUM_RETRY_FLAGS) --packages="$(TEST_PACKAGES)" -- $(GOTEST_FLAGS) .PHONY: test test-cli: - $(GIT_FLAGS) gotestsum --format standard-quiet $(GOTESTSUM_RETRY_FLAGS) --packages="./cli/..." -- -v -short -count=1 + $(MAKE) test TEST_PACKAGES="./cli..." .PHONY: test-cli # sqlc-cloud-is-setup will fail if no SQLc auth token is set. Use this as a diff --git a/agent/agent_test.go b/agent/agent_test.go index d87148be9ad15..d80f5d1982b74 100644 --- a/agent/agent_test.go +++ b/agent/agent_test.go @@ -456,8 +456,6 @@ func TestAgent_GitSSH(t *testing.T) { func TestAgent_SessionTTYShell(t *testing.T) { t.Parallel() - ctx, cancel := context.WithTimeout(context.Background(), testutil.WaitLong) - t.Cleanup(cancel) if runtime.GOOS == "windows" { // This might be our implementation, or ConPTY itself. // It's difficult to find extensive tests for it, so @@ -468,6 +466,7 @@ func TestAgent_SessionTTYShell(t *testing.T) { for _, port := range sshPorts { t.Run(fmt.Sprintf("(%d)", port), func(t *testing.T) { t.Parallel() + ctx := testutil.Context(t, testutil.WaitShort) session := setupSSHSessionOnPort(t, agentsdk.Manifest{}, codersdk.ServiceBannerConfig{}, nil, port) command := "sh" @@ -2458,16 +2457,222 @@ func TestAgent_DevcontainersDisabledForSubAgent(t *testing.T) { require.Contains(t, err.Error(), "Dev Container integration inside other Dev Containers is explicitly not supported.") } +// TestAgent_DevcontainerPrebuildClaim tests that we correctly handle +// the claiming process for running devcontainers. +// +// You can run it manually as follows: +// +// CODER_TEST_USE_DOCKER=1 go test -count=1 ./agent -run TestAgent_DevcontainerPrebuildClaim +// +//nolint:paralleltest // This test sets an environment variable. +func TestAgent_DevcontainerPrebuildClaim(t *testing.T) { + if os.Getenv("CODER_TEST_USE_DOCKER") != "1" { + t.Skip("Set CODER_TEST_USE_DOCKER=1 to run this test") + } + if _, err := exec.LookPath("devcontainer"); err != nil { + t.Skip("This test requires the devcontainer CLI: npm install -g @devcontainers/cli") + } + + pool, err := dockertest.NewPool("") + require.NoError(t, err, "Could not connect to docker") + + var ( + ctx = testutil.Context(t, testutil.WaitShort) + + devcontainerID = uuid.New() + devcontainerLogSourceID = uuid.New() + + workspaceFolder = filepath.Join(t.TempDir(), "project") + devcontainerPath = filepath.Join(workspaceFolder, ".devcontainer") + devcontainerConfig = filepath.Join(devcontainerPath, "devcontainer.json") + ) + + // Given: A devcontainer project. + t.Logf("Workspace folder: %s", workspaceFolder) + + err = os.MkdirAll(devcontainerPath, 0o755) + require.NoError(t, err, "create dev container directory") + + // Given: This devcontainer project specifies an app that uses the owner name and workspace name. + err = os.WriteFile(devcontainerConfig, []byte(`{ + "name": "project", + "image": "busybox:latest", + "cmd": ["sleep", "infinity"], + "runArgs": ["--label=`+agentcontainers.DevcontainerIsTestRunLabel+`=true"], + "customizations": { + "coder": { + "apps": [{ + "slug": "zed", + "url": "zed://ssh/${localEnv:CODER_WORKSPACE_AGENT_NAME}.${localEnv:CODER_WORKSPACE_NAME}.${localEnv:CODER_WORKSPACE_OWNER_NAME}.coder${containerWorkspaceFolder}" + }] + } + } + }`), 0o600) + require.NoError(t, err, "write devcontainer config") + + // Given: A manifest with a prebuild username and workspace name. + manifest := agentsdk.Manifest{ + OwnerName: "prebuilds", + WorkspaceName: "prebuilds-xyz-123", + + Devcontainers: []codersdk.WorkspaceAgentDevcontainer{ + {ID: devcontainerID, Name: "test", WorkspaceFolder: workspaceFolder}, + }, + Scripts: []codersdk.WorkspaceAgentScript{ + {ID: devcontainerID, LogSourceID: devcontainerLogSourceID}, + }, + } + + // When: We create an agent with devcontainers enabled. + //nolint:dogsled + conn, client, _, _, _ := setupAgent(t, manifest, 0, func(_ *agenttest.Client, o *agent.Options) { + o.Devcontainers = true + o.DevcontainerAPIOptions = append(o.DevcontainerAPIOptions, + agentcontainers.WithContainerLabelIncludeFilter(agentcontainers.DevcontainerLocalFolderLabel, workspaceFolder), + agentcontainers.WithContainerLabelIncludeFilter(agentcontainers.DevcontainerIsTestRunLabel, "true"), + ) + }) + + testutil.Eventually(ctx, t, func(ctx context.Context) bool { + return slices.Contains(client.GetLifecycleStates(), codersdk.WorkspaceAgentLifecycleReady) + }, testutil.IntervalMedium, "agent not ready") + + var dcPrebuild codersdk.WorkspaceAgentDevcontainer + testutil.Eventually(ctx, t, func(ctx context.Context) bool { + resp, err := conn.ListContainers(ctx) + require.NoError(t, err) + + for _, dc := range resp.Devcontainers { + if dc.Container == nil { + continue + } + + v, ok := dc.Container.Labels[agentcontainers.DevcontainerLocalFolderLabel] + if ok && v == workspaceFolder { + dcPrebuild = dc + return true + } + } + + return false + }, testutil.IntervalMedium, "devcontainer not found") + defer func() { + pool.Client.RemoveContainer(docker.RemoveContainerOptions{ + ID: dcPrebuild.Container.ID, + RemoveVolumes: true, + Force: true, + }) + }() + + // Then: We expect a sub agent to have been created. + subAgents := client.GetSubAgents() + require.Len(t, subAgents, 1) + + subAgent := subAgents[0] + subAgentID, err := uuid.FromBytes(subAgent.GetId()) + require.NoError(t, err) + + // And: We expect there to be 1 app. + subAgentApps, err := client.GetSubAgentApps(subAgentID) + require.NoError(t, err) + require.Len(t, subAgentApps, 1) + + // And: This app should contain the prebuild workspace name and owner name. + subAgentApp := subAgentApps[0] + require.Equal(t, "zed://ssh/project.prebuilds-xyz-123.prebuilds.coder/workspaces/project", subAgentApp.GetUrl()) + + // Given: We close the client and connection + client.Close() + conn.Close() + + // Given: A new manifest with a regular user owner name and workspace name. + manifest = agentsdk.Manifest{ + OwnerName: "user", + WorkspaceName: "user-workspace", + + Devcontainers: []codersdk.WorkspaceAgentDevcontainer{ + {ID: devcontainerID, Name: "test", WorkspaceFolder: workspaceFolder}, + }, + Scripts: []codersdk.WorkspaceAgentScript{ + {ID: devcontainerID, LogSourceID: devcontainerLogSourceID}, + }, + } + + // When: We create an agent with devcontainers enabled. + //nolint:dogsled + conn, client, _, _, _ = setupAgent(t, manifest, 0, func(_ *agenttest.Client, o *agent.Options) { + o.Devcontainers = true + o.DevcontainerAPIOptions = append(o.DevcontainerAPIOptions, + agentcontainers.WithContainerLabelIncludeFilter(agentcontainers.DevcontainerLocalFolderLabel, workspaceFolder), + agentcontainers.WithContainerLabelIncludeFilter(agentcontainers.DevcontainerIsTestRunLabel, "true"), + ) + }) + + testutil.Eventually(ctx, t, func(ctx context.Context) bool { + return slices.Contains(client.GetLifecycleStates(), codersdk.WorkspaceAgentLifecycleReady) + }, testutil.IntervalMedium, "agent not ready") + + var dcClaimed codersdk.WorkspaceAgentDevcontainer + testutil.Eventually(ctx, t, func(ctx context.Context) bool { + resp, err := conn.ListContainers(ctx) + require.NoError(t, err) + + for _, dc := range resp.Devcontainers { + if dc.Container == nil { + continue + } + + v, ok := dc.Container.Labels[agentcontainers.DevcontainerLocalFolderLabel] + if ok && v == workspaceFolder { + dcClaimed = dc + return true + } + } + + return false + }, testutil.IntervalMedium, "devcontainer not found") + defer func() { + if dcClaimed.Container.ID != dcPrebuild.Container.ID { + pool.Client.RemoveContainer(docker.RemoveContainerOptions{ + ID: dcClaimed.Container.ID, + RemoveVolumes: true, + Force: true, + }) + } + }() + + // Then: We expect the claimed devcontainer and prebuild devcontainer + // to be using the same underlying container. + require.Equal(t, dcPrebuild.Container.ID, dcClaimed.Container.ID) + + // And: We expect there to be a sub agent created. + subAgents = client.GetSubAgents() + require.Len(t, subAgents, 1) + + subAgent = subAgents[0] + subAgentID, err = uuid.FromBytes(subAgent.GetId()) + require.NoError(t, err) + + // And: We expect there to be an app. + subAgentApps, err = client.GetSubAgentApps(subAgentID) + require.NoError(t, err) + require.Len(t, subAgentApps, 1) + + // And: We expect this app to have the user's owner name and workspace name. + subAgentApp = subAgentApps[0] + require.Equal(t, "zed://ssh/project.user-workspace.user.coder/workspaces/project", subAgentApp.GetUrl()) +} + func TestAgent_Dial(t *testing.T) { t.Parallel() cases := []struct { name string - setup func(t *testing.T) net.Listener + setup func(t testing.TB) net.Listener }{ { name: "TCP", - setup: func(t *testing.T) net.Listener { + setup: func(t testing.TB) net.Listener { l, err := net.Listen("tcp", "127.0.0.1:0") require.NoError(t, err, "create TCP listener") return l @@ -2475,7 +2680,7 @@ func TestAgent_Dial(t *testing.T) { }, { name: "UDP", - setup: func(t *testing.T) net.Listener { + setup: func(t testing.TB) net.Listener { addr := net.UDPAddr{ IP: net.ParseIP("127.0.0.1"), Port: 0, @@ -2493,57 +2698,69 @@ func TestAgent_Dial(t *testing.T) { // The purpose of this test is to ensure that a client can dial a // listener in the workspace over tailnet. - l := c.setup(t) - done := make(chan struct{}) - defer func() { - l.Close() - <-done - }() - - ctx, cancel := context.WithTimeout(context.Background(), testutil.WaitLong) - defer cancel() - - go func() { - defer close(done) - for range 2 { - c, err := l.Accept() - if assert.NoError(t, err, "accept connection") { - testAccept(ctx, t, c) - _ = c.Close() + // + // The OS sometimes drops packets if the system can't keep up with + // them. For TCP packets, it's typically fine due to + // retransmissions, but for UDP packets, it can fail this test. + // + // The OS gets involved for the Wireguard traffic (either via DERP + // or direct UDP), and also for the traffic between the agent and + // the listener in the "workspace". + // + // To avoid this, we'll retry this test up to 3 times. + //nolint:gocritic // This test is flaky due to uncontrollable OS packet drops under heavy load. + testutil.RunRetry(t, 3, func(t testing.TB) { + ctx := testutil.Context(t, testutil.WaitLong) + + l := c.setup(t) + done := make(chan struct{}) + defer func() { + l.Close() + <-done + }() + + go func() { + defer close(done) + for range 2 { + c, err := l.Accept() + if assert.NoError(t, err, "accept connection") { + testAccept(ctx, t, c) + _ = c.Close() + } } - } - }() + }() - agentID := uuid.UUID{0, 0, 0, 0, 0, 1, 2, 3, 4, 5, 6, 7, 8} - //nolint:dogsled - agentConn, _, _, _, _ := setupAgent(t, agentsdk.Manifest{ - AgentID: agentID, - }, 0) - require.True(t, agentConn.AwaitReachable(ctx)) - conn, err := agentConn.DialContext(ctx, l.Addr().Network(), l.Addr().String()) - require.NoError(t, err) - testDial(ctx, t, conn) - err = conn.Close() - require.NoError(t, err) + agentID := uuid.UUID{0, 0, 0, 0, 0, 1, 2, 3, 4, 5, 6, 7, 8} + //nolint:dogsled + agentConn, _, _, _, _ := setupAgent(t, agentsdk.Manifest{ + AgentID: agentID, + }, 0) + require.True(t, agentConn.AwaitReachable(ctx)) + conn, err := agentConn.DialContext(ctx, l.Addr().Network(), l.Addr().String()) + require.NoError(t, err) + testDial(ctx, t, conn) + err = conn.Close() + require.NoError(t, err) - // also connect via the CoderServicePrefix, to test that we can reach the agent on this - // IP. This will be required for CoderVPN. - _, rawPort, _ := net.SplitHostPort(l.Addr().String()) - port, _ := strconv.ParseUint(rawPort, 10, 16) - ipp := netip.AddrPortFrom(tailnet.CoderServicePrefix.AddrFromUUID(agentID), uint16(port)) - - switch l.Addr().Network() { - case "tcp": - conn, err = agentConn.Conn.DialContextTCP(ctx, ipp) - case "udp": - conn, err = agentConn.Conn.DialContextUDP(ctx, ipp) - default: - t.Fatalf("unknown network: %s", l.Addr().Network()) - } - require.NoError(t, err) - testDial(ctx, t, conn) - err = conn.Close() - require.NoError(t, err) + // also connect via the CoderServicePrefix, to test that we can reach the agent on this + // IP. This will be required for CoderVPN. + _, rawPort, _ := net.SplitHostPort(l.Addr().String()) + port, _ := strconv.ParseUint(rawPort, 10, 16) + ipp := netip.AddrPortFrom(tailnet.CoderServicePrefix.AddrFromUUID(agentID), uint16(port)) + + switch l.Addr().Network() { + case "tcp": + conn, err = agentConn.TailnetConn().DialContextTCP(ctx, ipp) + case "udp": + conn, err = agentConn.TailnetConn().DialContextUDP(ctx, ipp) + default: + t.Fatalf("unknown network: %s", l.Addr().Network()) + } + require.NoError(t, err) + testDial(ctx, t, conn) + err = conn.Close() + require.NoError(t, err) + }) }) } } @@ -2594,7 +2811,7 @@ func TestAgent_UpdatedDERP(t *testing.T) { }) // Setup a client connection. - newClientConn := func(derpMap *tailcfg.DERPMap, name string) *workspacesdk.AgentConn { + newClientConn := func(derpMap *tailcfg.DERPMap, name string) workspacesdk.AgentConn { conn, err := tailnet.NewConn(&tailnet.Options{ Addresses: []netip.Prefix{tailnet.TailscaleServicePrefix.RandomPrefix()}, DERPMap: derpMap, @@ -2674,13 +2891,13 @@ func TestAgent_UpdatedDERP(t *testing.T) { // Connect from a second client and make sure it uses the new DERP map. conn2 := newClientConn(newDerpMap, "client2") - require.Equal(t, []int{2}, conn2.DERPMap().RegionIDs()) + require.Equal(t, []int{2}, conn2.TailnetConn().DERPMap().RegionIDs()) t.Log("conn2 got the new DERPMap") // If the first client gets a DERP map update, it should be able to // reconnect just fine. - conn1.SetDERPMap(newDerpMap) - require.Equal(t, []int{2}, conn1.DERPMap().RegionIDs()) + conn1.TailnetConn().SetDERPMap(newDerpMap) + require.Equal(t, []int{2}, conn1.TailnetConn().DERPMap().RegionIDs()) t.Log("set the new DERPMap on conn1") ctx, cancel := context.WithTimeout(context.Background(), testutil.WaitLong) defer cancel() @@ -3046,8 +3263,8 @@ func setupSSHSessionOnPort( return session } -func setupAgent(t *testing.T, metadata agentsdk.Manifest, ptyTimeout time.Duration, opts ...func(*agenttest.Client, *agent.Options)) ( - *workspacesdk.AgentConn, +func setupAgent(t testing.TB, metadata agentsdk.Manifest, ptyTimeout time.Duration, opts ...func(*agenttest.Client, *agent.Options)) ( + workspacesdk.AgentConn, *agenttest.Client, <-chan *proto.Stats, afero.Fs, @@ -3144,7 +3361,7 @@ func setupAgent(t *testing.T, metadata agentsdk.Manifest, ptyTimeout time.Durati var dialTestPayload = []byte("dean-was-here123") -func testDial(ctx context.Context, t *testing.T, c net.Conn) { +func testDial(ctx context.Context, t testing.TB, c net.Conn) { t.Helper() if deadline, ok := ctx.Deadline(); ok { @@ -3160,7 +3377,7 @@ func testDial(ctx context.Context, t *testing.T, c net.Conn) { assertReadPayload(t, c, dialTestPayload) } -func testAccept(ctx context.Context, t *testing.T, c net.Conn) { +func testAccept(ctx context.Context, t testing.TB, c net.Conn) { t.Helper() defer c.Close() @@ -3177,7 +3394,7 @@ func testAccept(ctx context.Context, t *testing.T, c net.Conn) { assertWritePayload(t, c, dialTestPayload) } -func assertReadPayload(t *testing.T, r io.Reader, payload []byte) { +func assertReadPayload(t testing.TB, r io.Reader, payload []byte) { t.Helper() b := make([]byte, len(payload)+16) n, err := r.Read(b) @@ -3186,11 +3403,11 @@ func assertReadPayload(t *testing.T, r io.Reader, payload []byte) { assert.Equal(t, payload, b[:n]) } -func assertWritePayload(t *testing.T, w io.Writer, payload []byte) { +func assertWritePayload(t testing.TB, w io.Writer, payload []byte) { t.Helper() n, err := w.Write(payload) assert.NoError(t, err, "write payload") - assert.Equal(t, len(payload), n, "payload length does not match") + assert.Equal(t, len(payload), n, "written payload length does not match") } func testSessionOutput(t *testing.T, session *ssh.Session, expected, unexpected []string, expectedRe *regexp.Regexp) { @@ -3253,7 +3470,11 @@ func TestAgent_Metrics_SSH(t *testing.T) { registry := prometheus.NewRegistry() //nolint:dogsled - conn, _, _, _, _ := setupAgent(t, agentsdk.Manifest{}, 0, func(_ *agenttest.Client, o *agent.Options) { + conn, _, _, _, _ := setupAgent(t, agentsdk.Manifest{ + // Make sure we always get a DERP connection for + // currently_reachable_peers. + DisableDirectConnections: true, + }, 0, func(_ *agenttest.Client, o *agent.Options) { o.PrometheusRegistry = registry }) @@ -3307,7 +3528,7 @@ func TestAgent_Metrics_SSH(t *testing.T) { { Name: "coderd_agentstats_currently_reachable_peers", Type: proto.Stats_Metric_GAUGE, - Value: 0, + Value: 1, Labels: []*proto.Stats_Metric_Label{ { Name: "connection_type", @@ -3318,7 +3539,7 @@ func TestAgent_Metrics_SSH(t *testing.T) { { Name: "coderd_agentstats_currently_reachable_peers", Type: proto.Stats_Metric_GAUGE, - Value: 1, + Value: 0, Labels: []*proto.Stats_Metric_Label{ { Name: "connection_type", diff --git a/agent/agentcontainers/api.go b/agent/agentcontainers/api.go index 4f9287713fcfc..d77d4209cb245 100644 --- a/agent/agentcontainers/api.go +++ b/agent/agentcontainers/api.go @@ -77,7 +77,8 @@ type API struct { subAgentURL string subAgentEnv []string - projectDiscovery bool // If we should perform project discovery or not. + projectDiscovery bool // If we should perform project discovery or not. + discoveryAutostart bool // If we should autostart discovered projects. ownerName string workspaceName string @@ -143,7 +144,9 @@ func WithCommandEnv(ce CommandEnv) Option { strings.HasPrefix(s, "CODER_WORKSPACE_AGENT_URL=") || strings.HasPrefix(s, "CODER_AGENT_TOKEN=") || strings.HasPrefix(s, "CODER_AGENT_AUTH=") || - strings.HasPrefix(s, "CODER_AGENT_DEVCONTAINERS_ENABLE=") + strings.HasPrefix(s, "CODER_AGENT_DEVCONTAINERS_ENABLE=") || + strings.HasPrefix(s, "CODER_AGENT_DEVCONTAINERS_PROJECT_DISCOVERY_ENABLE=") || + strings.HasPrefix(s, "CODER_AGENT_DEVCONTAINERS_DISCOVERY_AUTOSTART_ENABLE=") }) return shell, dir, env, nil } @@ -160,8 +163,8 @@ func WithContainerCLI(ccli ContainerCLI) Option { // WithContainerLabelIncludeFilter sets a label filter for containers. // This option can be given multiple times to filter by multiple labels. -// The behavior is such that only containers matching one or more of the -// provided labels will be included. +// The behavior is such that only containers matching all of the provided +// labels will be included. func WithContainerLabelIncludeFilter(label, value string) Option { return func(api *API) { api.containerLabelIncludeFilter[label] = value @@ -286,6 +289,14 @@ func WithProjectDiscovery(projectDiscovery bool) Option { } } +// WithDiscoveryAutostart sets if the API should attempt to autostart +// projects that have been discovered +func WithDiscoveryAutostart(discoveryAutostart bool) Option { + return func(api *API) { + api.discoveryAutostart = discoveryAutostart + } +} + // ScriptLogger is an interface for sending devcontainer logs to the // controlplane. type ScriptLogger interface { @@ -524,23 +535,43 @@ func (api *API) discoverDevcontainersInProject(projectPath string) error { workspaceFolder := strings.TrimSuffix(path, relativeConfigPath) - logger.Debug(api.ctx, "discovered dev container project", slog.F("workspace_folder", workspaceFolder)) + logger := logger.With(slog.F("workspace_folder", workspaceFolder)) + logger.Debug(api.ctx, "discovered dev container project") api.mu.Lock() if _, found := api.knownDevcontainers[workspaceFolder]; !found { - logger.Debug(api.ctx, "adding dev container project", slog.F("workspace_folder", workspaceFolder)) + logger.Debug(api.ctx, "adding dev container project") dc := codersdk.WorkspaceAgentDevcontainer{ ID: uuid.New(), Name: "", // Updated later based on container state. WorkspaceFolder: workspaceFolder, ConfigPath: path, - Status: "", // Updated later based on container state. + Status: codersdk.WorkspaceAgentDevcontainerStatusStopped, Dirty: false, // Updated later based on config file changes. Container: nil, } + if api.discoveryAutostart { + config, err := api.dccli.ReadConfig(api.ctx, workspaceFolder, path, []string{}) + if err != nil { + logger.Error(api.ctx, "read project configuration", slog.Error(err)) + } else if config.Configuration.Customizations.Coder.AutoStart { + dc.Status = codersdk.WorkspaceAgentDevcontainerStatusStarting + } + } + api.knownDevcontainers[workspaceFolder] = dc + api.broadcastUpdatesLocked() + + if dc.Status == codersdk.WorkspaceAgentDevcontainerStatusStarting { + api.asyncWg.Add(1) + go func() { + defer api.asyncWg.Done() + + _ = api.CreateDevcontainer(dc.WorkspaceFolder, dc.ConfigPath) + }() + } } api.mu.Unlock() } @@ -732,7 +763,11 @@ func (api *API) broadcastUpdatesLocked() { func (api *API) watchContainers(rw http.ResponseWriter, r *http.Request) { ctx := r.Context() - conn, err := websocket.Accept(rw, r, nil) + conn, err := websocket.Accept(rw, r, &websocket.AcceptOptions{ + // We want `NoContextTakeover` compression to balance improving + // bandwidth cost/latency with minimal memory usage overhead. + CompressionMode: websocket.CompressionNoContextTakeover, + }) if err != nil { httpapi.Write(ctx, rw, http.StatusInternalServerError, codersdk.Response{ Message: "Failed to upgrade connection to websocket.", @@ -908,17 +943,22 @@ func (api *API) processUpdatedContainersLocked(ctx context.Context, updated code slog.F("config_file", configFile), ) + // If we haven't set any include filters, we should explicitly ignore test devcontainers. + if len(api.containerLabelIncludeFilter) == 0 && container.Labels[DevcontainerIsTestRunLabel] == "true" { + continue + } + // Filter out devcontainer tests, unless explicitly set in include filters. - if len(api.containerLabelIncludeFilter) > 0 || container.Labels[DevcontainerIsTestRunLabel] == "true" { - var ok bool + if len(api.containerLabelIncludeFilter) > 0 { + includeContainer := true for label, value := range api.containerLabelIncludeFilter { - if v, found := container.Labels[label]; found && v == value { - ok = true - } + v, found := container.Labels[label] + + includeContainer = includeContainer && (found && v == value) } // Verbose debug logging is fine here since typically filters // are only used in development or testing environments. - if !ok { + if !includeContainer { logger.Debug(ctx, "container does not match include filter, ignoring devcontainer", slog.F("container_labels", container.Labels), slog.F("include_filter", api.containerLabelIncludeFilter)) continue } diff --git a/agent/agentcontainers/api_test.go b/agent/agentcontainers/api_test.go index 5714027960a7b..263f1698a7117 100644 --- a/agent/agentcontainers/api_test.go +++ b/agent/agentcontainers/api_test.go @@ -71,6 +71,7 @@ func (f *fakeContainerCLI) ExecAs(ctx context.Context, name, user string, args . // fakeDevcontainerCLI implements the agentcontainers.DevcontainerCLI // interface for testing. type fakeDevcontainerCLI struct { + up func(workspaceFolder, configPath string) (string, error) upID string upErr error upErrC chan func() error // If set, send to return err, close to return upErr. @@ -79,9 +80,14 @@ type fakeDevcontainerCLI struct { readConfig agentcontainers.DevcontainerConfig readConfigErr error readConfigErrC chan func(envs []string) error + + configMap map[string]agentcontainers.DevcontainerConfig // By config path } -func (f *fakeDevcontainerCLI) Up(ctx context.Context, _, _ string, _ ...agentcontainers.DevcontainerCLIUpOptions) (string, error) { +func (f *fakeDevcontainerCLI) Up(ctx context.Context, workspaceFolder, configPath string, _ ...agentcontainers.DevcontainerCLIUpOptions) (string, error) { + if f.up != nil { + return f.up(workspaceFolder, configPath) + } if f.upErrC != nil { select { case <-ctx.Done(): @@ -109,7 +115,12 @@ func (f *fakeDevcontainerCLI) Exec(ctx context.Context, _, _ string, cmd string, return f.execErr } -func (f *fakeDevcontainerCLI) ReadConfig(ctx context.Context, _, _ string, envs []string, _ ...agentcontainers.DevcontainerCLIReadConfigOptions) (agentcontainers.DevcontainerConfig, error) { +func (f *fakeDevcontainerCLI) ReadConfig(ctx context.Context, _, configPath string, envs []string, _ ...agentcontainers.DevcontainerCLIReadConfigOptions) (agentcontainers.DevcontainerConfig, error) { + if f.configMap != nil { + if v, found := f.configMap[configPath]; found { + return v, f.readConfigErr + } + } if f.readConfigErrC != nil { select { case <-ctx.Done(): @@ -1664,6 +1675,8 @@ func TestAPI(t *testing.T) { coderBin, err := os.Executable() require.NoError(t, err) + coderBin, err = filepath.EvalSymlinks(coderBin) + require.NoError(t, err) mCCLI.EXPECT().List(gomock.Any()).Return(codersdk.WorkspaceAgentListContainersResponse{ Containers: []codersdk.WorkspaceAgentContainer{testContainer}, @@ -2085,9 +2098,6 @@ func TestAPI(t *testing.T) { } ) - coderBin, err := os.Executable() - require.NoError(t, err) - // Mock the `List` function to always return the test container. mCCLI.EXPECT().List(gomock.Any()).Return(codersdk.WorkspaceAgentListContainersResponse{ Containers: []codersdk.WorkspaceAgentContainer{testContainer}, @@ -2128,7 +2138,7 @@ func TestAPI(t *testing.T) { require.Equal(t, http.StatusOK, rec.Code) var response codersdk.WorkspaceAgentListContainersResponse - err = json.NewDecoder(rec.Body).Decode(&response) + err := json.NewDecoder(rec.Body).Decode(&response) require.NoError(t, err) // Then: We expect that there will be an error associated with the devcontainer. @@ -2138,7 +2148,7 @@ func TestAPI(t *testing.T) { gomock.InOrder( mCCLI.EXPECT().DetectArchitecture(gomock.Any(), testContainer.ID).Return(runtime.GOARCH, nil), mCCLI.EXPECT().ExecAs(gomock.Any(), testContainer.ID, "root", "mkdir", "-p", "/.coder-agent").Return(nil, nil), - mCCLI.EXPECT().Copy(gomock.Any(), testContainer.ID, coderBin, "/.coder-agent/coder").Return(nil), + mCCLI.EXPECT().Copy(gomock.Any(), testContainer.ID, gomock.Any(), "/.coder-agent/coder").Return(nil), mCCLI.EXPECT().ExecAs(gomock.Any(), testContainer.ID, "root", "chmod", "0755", "/.coder-agent", "/.coder-agent/coder").Return(nil, nil), mCCLI.EXPECT().ExecAs(gomock.Any(), testContainer.ID, "root", "/bin/sh", "-c", "chown $(id -u):$(id -g) /.coder-agent/coder").Return(nil, nil), ) @@ -2146,8 +2156,8 @@ func TestAPI(t *testing.T) { // Given: We allow creation to succeed. testutil.RequireSend(ctx, t, fSAC.createErrC, nil) - _, aw := mClock.AdvanceNext() - aw.MustWait(ctx) + err = api.RefreshContainers(ctx) + require.NoError(t, err) req = httptest.NewRequest(http.MethodGet, "/", nil) rec = httptest.NewRecorder() @@ -2447,6 +2457,8 @@ func TestAPI(t *testing.T) { coderBin, err := os.Executable() require.NoError(t, err) + coderBin, err = filepath.EvalSymlinks(coderBin) + require.NoError(t, err) // Mock the `List` function to always return out test container. mCCLI.EXPECT().List(gomock.Any()).Return(codersdk.WorkspaceAgentListContainersResponse{ @@ -2541,6 +2553,8 @@ func TestAPI(t *testing.T) { coderBin, err := os.Executable() require.NoError(t, err) + coderBin, err = filepath.EvalSymlinks(coderBin) + require.NoError(t, err) // Mock the `List` function to always return out test container. mCCLI.EXPECT().List(gomock.Any()).Return(codersdk.WorkspaceAgentListContainersResponse{ @@ -2646,6 +2660,8 @@ func TestAPI(t *testing.T) { coderBin, err := os.Executable() require.NoError(t, err) + coderBin, err = filepath.EvalSymlinks(coderBin) + require.NoError(t, err) // Mock the `List` function to always return our test container. mCCLI.EXPECT().List(gomock.Any()).Return(codersdk.WorkspaceAgentListContainersResponse{ @@ -3568,4 +3584,451 @@ func TestDevcontainerDiscovery(t *testing.T) { // This is implicitly handled by `testutil.Logger` failing when it // detects an error has been logged. }) + + t.Run("AutoStart", func(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + agentDir string + fs map[string]string + configMap map[string]agentcontainers.DevcontainerConfig + expectDevcontainerCount int + expectUpCalledCount int + }{ + { + name: "SingleEnabled", + agentDir: "/home/coder", + expectDevcontainerCount: 1, + expectUpCalledCount: 1, + fs: map[string]string{ + "/home/coder/.git/HEAD": "", + "/home/coder/.devcontainer/devcontainer.json": "", + }, + configMap: map[string]agentcontainers.DevcontainerConfig{ + "/home/coder/.devcontainer/devcontainer.json": { + Configuration: agentcontainers.DevcontainerConfiguration{ + Customizations: agentcontainers.DevcontainerCustomizations{ + Coder: agentcontainers.CoderCustomization{ + AutoStart: true, + }, + }, + }, + }, + }, + }, + { + name: "SingleDisabled", + agentDir: "/home/coder", + expectDevcontainerCount: 1, + expectUpCalledCount: 0, + fs: map[string]string{ + "/home/coder/.git/HEAD": "", + "/home/coder/.devcontainer/devcontainer.json": "", + }, + configMap: map[string]agentcontainers.DevcontainerConfig{ + "/home/coder/.devcontainer/devcontainer.json": { + Configuration: agentcontainers.DevcontainerConfiguration{ + Customizations: agentcontainers.DevcontainerCustomizations{ + Coder: agentcontainers.CoderCustomization{ + AutoStart: false, + }, + }, + }, + }, + }, + }, + { + name: "OneEnabledOneDisabled", + agentDir: "/home/coder", + expectDevcontainerCount: 2, + expectUpCalledCount: 1, + fs: map[string]string{ + "/home/coder/.git/HEAD": "", + "/home/coder/.devcontainer/devcontainer.json": "", + "/home/coder/project/.devcontainer.json": "", + }, + configMap: map[string]agentcontainers.DevcontainerConfig{ + "/home/coder/.devcontainer/devcontainer.json": { + Configuration: agentcontainers.DevcontainerConfiguration{ + Customizations: agentcontainers.DevcontainerCustomizations{ + Coder: agentcontainers.CoderCustomization{ + AutoStart: true, + }, + }, + }, + }, + "/home/coder/project/.devcontainer.json": { + Configuration: agentcontainers.DevcontainerConfiguration{ + Customizations: agentcontainers.DevcontainerCustomizations{ + Coder: agentcontainers.CoderCustomization{ + AutoStart: false, + }, + }, + }, + }, + }, + }, + { + name: "MultipleEnabled", + agentDir: "/home/coder", + expectDevcontainerCount: 2, + expectUpCalledCount: 2, + fs: map[string]string{ + "/home/coder/.git/HEAD": "", + "/home/coder/.devcontainer/devcontainer.json": "", + "/home/coder/project/.devcontainer.json": "", + }, + configMap: map[string]agentcontainers.DevcontainerConfig{ + "/home/coder/.devcontainer/devcontainer.json": { + Configuration: agentcontainers.DevcontainerConfiguration{ + Customizations: agentcontainers.DevcontainerCustomizations{ + Coder: agentcontainers.CoderCustomization{ + AutoStart: true, + }, + }, + }, + }, + "/home/coder/project/.devcontainer.json": { + Configuration: agentcontainers.DevcontainerConfiguration{ + Customizations: agentcontainers.DevcontainerCustomizations{ + Coder: agentcontainers.CoderCustomization{ + AutoStart: true, + }, + }, + }, + }, + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + + var ( + ctx = testutil.Context(t, testutil.WaitShort) + logger = testutil.Logger(t) + mClock = quartz.NewMock(t) + + upCalledMu sync.Mutex + upCalledFor = map[string]bool{} + + fCCLI = &fakeContainerCLI{} + fDCCLI = &fakeDevcontainerCLI{ + configMap: tt.configMap, + up: func(_, configPath string) (string, error) { + upCalledMu.Lock() + upCalledFor[configPath] = true + upCalledMu.Unlock() + return "", nil + }, + } + + r = chi.NewRouter() + ) + + api := agentcontainers.NewAPI(logger, + agentcontainers.WithClock(mClock), + agentcontainers.WithWatcher(watcher.NewNoop()), + agentcontainers.WithFileSystem(initFS(t, tt.fs)), + agentcontainers.WithManifestInfo("owner", "workspace", "parent-agent", "/home/coder"), + agentcontainers.WithContainerCLI(fCCLI), + agentcontainers.WithDevcontainerCLI(fDCCLI), + agentcontainers.WithProjectDiscovery(true), + agentcontainers.WithDiscoveryAutostart(true), + ) + api.Start() + r.Mount("/", api.Routes()) + + // Given: We allow the discover routing to progress + var got codersdk.WorkspaceAgentListContainersResponse + require.Eventuallyf(t, func() bool { + req := httptest.NewRequest(http.MethodGet, "/", nil).WithContext(ctx) + rec := httptest.NewRecorder() + r.ServeHTTP(rec, req) + + got = codersdk.WorkspaceAgentListContainersResponse{} + err := json.NewDecoder(rec.Body).Decode(&got) + require.NoError(t, err) + + upCalledMu.Lock() + upCalledCount := len(upCalledFor) + upCalledMu.Unlock() + + return len(got.Devcontainers) >= tt.expectDevcontainerCount && upCalledCount >= tt.expectUpCalledCount + }, testutil.WaitShort, testutil.IntervalFast, "dev containers never found") + + // Close the API. We expect this not to fail because we should have finished + // at this point. + err := api.Close() + require.NoError(t, err) + + // Then: We expect to find the expected devcontainers + assert.Len(t, got.Devcontainers, tt.expectDevcontainerCount) + + // And: We expect `up` to have been called the expected amount of times. + assert.Len(t, upCalledFor, tt.expectUpCalledCount) + + // And: `up` was called on the correct containers + for configPath, config := range tt.configMap { + autoStart := config.Configuration.Customizations.Coder.AutoStart + wasUpCalled := upCalledFor[configPath] + + require.Equal(t, autoStart, wasUpCalled) + } + }) + } + + t.Run("Disabled", func(t *testing.T) { + t.Parallel() + var ( + ctx = testutil.Context(t, testutil.WaitShort) + logger = testutil.Logger(t) + mClock = quartz.NewMock(t) + mDCCLI = acmock.NewMockDevcontainerCLI(gomock.NewController(t)) + + fs = map[string]string{ + "/home/coder/.git/HEAD": "", + "/home/coder/.devcontainer/devcontainer.json": "", + } + + r = chi.NewRouter() + ) + + // We expect that neither `ReadConfig`, nor `Up` are called as we + // have explicitly disabled the agentcontainers API from attempting + // to autostart devcontainers that it discovers. + mDCCLI.EXPECT().ReadConfig(gomock.Any(), + "/home/coder", + "/home/coder/.devcontainer/devcontainer.json", + []string{}, + ).Return(agentcontainers.DevcontainerConfig{ + Configuration: agentcontainers.DevcontainerConfiguration{ + Customizations: agentcontainers.DevcontainerCustomizations{ + Coder: agentcontainers.CoderCustomization{ + AutoStart: true, + }, + }, + }, + }, nil).Times(0) + + mDCCLI.EXPECT().Up(gomock.Any(), + "/home/coder", + "/home/coder/.devcontainer/devcontainer.json", + gomock.Any(), + ).Return("", nil).Times(0) + + api := agentcontainers.NewAPI(logger, + agentcontainers.WithClock(mClock), + agentcontainers.WithWatcher(watcher.NewNoop()), + agentcontainers.WithFileSystem(initFS(t, fs)), + agentcontainers.WithManifestInfo("owner", "workspace", "parent-agent", "/home/coder"), + agentcontainers.WithContainerCLI(&fakeContainerCLI{}), + agentcontainers.WithDevcontainerCLI(mDCCLI), + agentcontainers.WithProjectDiscovery(true), + agentcontainers.WithDiscoveryAutostart(false), + ) + api.Start() + defer api.Close() + r.Mount("/", api.Routes()) + + // When: All expected dev containers have been found. + require.Eventuallyf(t, func() bool { + req := httptest.NewRequest(http.MethodGet, "/", nil).WithContext(ctx) + rec := httptest.NewRecorder() + r.ServeHTTP(rec, req) + + got := codersdk.WorkspaceAgentListContainersResponse{} + err := json.NewDecoder(rec.Body).Decode(&got) + require.NoError(t, err) + + return len(got.Devcontainers) >= 1 + }, testutil.WaitShort, testutil.IntervalFast, "dev containers never found") + + // Then: We expect the mock infra to not fail. + }) + }) +} + +// TestDevcontainerPrebuildSupport validates that devcontainers survive the transition +// from prebuild to claimed workspace, ensuring the existing container is reused +// with updated configuration rather than being recreated. +func TestDevcontainerPrebuildSupport(t *testing.T) { + t.Parallel() + + if runtime.GOOS == "windows" { + t.Skip("Dev Container tests are not supported on Windows") + } + + var ( + ctx = testutil.Context(t, testutil.WaitShort) + logger = testutil.Logger(t) + + fDCCLI = &fakeDevcontainerCLI{readConfigErrC: make(chan func(envs []string) error, 1)} + fCCLI = &fakeContainerCLI{arch: runtime.GOARCH} + fSAC = &fakeSubAgentClient{} + + testDC = codersdk.WorkspaceAgentDevcontainer{ + ID: uuid.New(), + WorkspaceFolder: "/home/coder/coder", + ConfigPath: "/home/coder/coder/.devcontainer/devcontainer.json", + } + + testContainer = newFakeContainer("test-container-id", testDC.ConfigPath, testDC.WorkspaceFolder) + + prebuildOwner = "prebuilds" + prebuildWorkspace = "prebuilds-xyz-123" + prebuildAppURL = "prebuilds.zed" + + userOwner = "user" + userWorkspace = "user-workspace" + userAppURL = "user.zed" + ) + + // ================================================== + // PHASE 1: Prebuild workspace creates devcontainer + // ================================================== + + // Given: There are no containers initially. + fCCLI.containers = codersdk.WorkspaceAgentListContainersResponse{} + + api := agentcontainers.NewAPI(logger, + // We want this first `agentcontainers.API` to have a manifest info + // that is consistent with what a prebuild workspace would have. + agentcontainers.WithManifestInfo(prebuildOwner, prebuildWorkspace, "dev", "/home/coder"), + // Given: We start with a single dev container resource. + agentcontainers.WithDevcontainers( + []codersdk.WorkspaceAgentDevcontainer{testDC}, + []codersdk.WorkspaceAgentScript{{ID: testDC.ID, LogSourceID: uuid.New()}}, + ), + agentcontainers.WithSubAgentClient(fSAC), + agentcontainers.WithContainerCLI(fCCLI), + agentcontainers.WithDevcontainerCLI(fDCCLI), + agentcontainers.WithWatcher(watcher.NewNoop()), + ) + api.Start() + + fCCLI.containers = codersdk.WorkspaceAgentListContainersResponse{ + Containers: []codersdk.WorkspaceAgentContainer{testContainer}, + } + + // Given: We allow the dev container to be created. + fDCCLI.upID = testContainer.ID + fDCCLI.readConfig = agentcontainers.DevcontainerConfig{ + MergedConfiguration: agentcontainers.DevcontainerMergedConfiguration{ + Customizations: agentcontainers.DevcontainerMergedCustomizations{ + Coder: []agentcontainers.CoderCustomization{{ + Apps: []agentcontainers.SubAgentApp{ + {Slug: "zed", URL: prebuildAppURL}, + }, + }}, + }, + }, + } + + var readConfigEnvVars []string + testutil.RequireSend(ctx, t, fDCCLI.readConfigErrC, func(env []string) error { + readConfigEnvVars = env + return nil + }) + + // When: We create the dev container resource + err := api.CreateDevcontainer(testDC.WorkspaceFolder, testDC.ConfigPath) + require.NoError(t, err) + + require.Contains(t, readConfigEnvVars, "CODER_WORKSPACE_OWNER_NAME="+prebuildOwner) + require.Contains(t, readConfigEnvVars, "CODER_WORKSPACE_NAME="+prebuildWorkspace) + + // Then: We there to be only 1 agent. + require.Len(t, fSAC.agents, 1) + + // And: We expect only 1 agent to have been created. + require.Len(t, fSAC.created, 1) + firstAgent := fSAC.created[0] + + // And: We expect this agent to be the current agent. + _, found := fSAC.agents[firstAgent.ID] + require.True(t, found, "first agent expected to be current agent") + + // And: We expect there to be a single app. + require.Len(t, firstAgent.Apps, 1) + firstApp := firstAgent.Apps[0] + + // And: We expect this app to have the pre-claim URL. + require.Equal(t, prebuildAppURL, firstApp.URL) + + // Given: We now close the API + api.Close() + + // ============================================================= + // PHASE 2: User claims workspace, devcontainer should be reused + // ============================================================= + + // Given: We create a new claimed API + api = agentcontainers.NewAPI(logger, + // We want this second `agentcontainers.API` to have a manifest info + // that is consistent with what a claimed workspace would have. + agentcontainers.WithManifestInfo(userOwner, userWorkspace, "dev", "/home/coder"), + // Given: We start with a single dev container resource. + agentcontainers.WithDevcontainers( + []codersdk.WorkspaceAgentDevcontainer{testDC}, + []codersdk.WorkspaceAgentScript{{ID: testDC.ID, LogSourceID: uuid.New()}}, + ), + agentcontainers.WithSubAgentClient(fSAC), + agentcontainers.WithContainerCLI(fCCLI), + agentcontainers.WithDevcontainerCLI(fDCCLI), + agentcontainers.WithWatcher(watcher.NewNoop()), + ) + api.Start() + defer func() { + close(fDCCLI.readConfigErrC) + + api.Close() + }() + + // Given: We allow the dev container to be created. + fDCCLI.upID = testContainer.ID + fDCCLI.readConfig = agentcontainers.DevcontainerConfig{ + MergedConfiguration: agentcontainers.DevcontainerMergedConfiguration{ + Customizations: agentcontainers.DevcontainerMergedCustomizations{ + Coder: []agentcontainers.CoderCustomization{{ + Apps: []agentcontainers.SubAgentApp{ + {Slug: "zed", URL: userAppURL}, + }, + }}, + }, + }, + } + + testutil.RequireSend(ctx, t, fDCCLI.readConfigErrC, func(env []string) error { + readConfigEnvVars = env + return nil + }) + + // When: We create the dev container resource. + err = api.CreateDevcontainer(testDC.WorkspaceFolder, testDC.ConfigPath) + require.NoError(t, err) + + // Then: We expect the environment variables were passed correctly. + require.Contains(t, readConfigEnvVars, "CODER_WORKSPACE_OWNER_NAME="+userOwner) + require.Contains(t, readConfigEnvVars, "CODER_WORKSPACE_NAME="+userWorkspace) + + // And: We expect there to be only 1 agent. + require.Len(t, fSAC.agents, 1) + + // And: We expect _a separate agent_ to have been created. + require.Len(t, fSAC.created, 2) + secondAgent := fSAC.created[1] + + // And: We expect this new agent to be the current agent. + _, found = fSAC.agents[secondAgent.ID] + require.True(t, found, "second agent expected to be current agent") + + // And: We expect there to be a single app. + require.Len(t, secondAgent.Apps, 1) + secondApp := secondAgent.Apps[0] + + // And: We expect this app to have the post-claim URL. + require.Equal(t, userAppURL, secondApp.URL) } diff --git a/agent/agentcontainers/containers_dockercli_test.go b/agent/agentcontainers/containers_dockercli_test.go index c69110a757bc7..3c299e353858d 100644 --- a/agent/agentcontainers/containers_dockercli_test.go +++ b/agent/agentcontainers/containers_dockercli_test.go @@ -55,11 +55,11 @@ func TestIntegrationDockerCLI(t *testing.T) { }, testutil.WaitShort, testutil.IntervalSlow, "Container did not start in time") dcli := agentcontainers.NewDockerCLI(agentexec.DefaultExecer) - ctx := testutil.Context(t, testutil.WaitMedium) // Longer timeout for multiple subtests containerName := strings.TrimPrefix(ct.Container.Name, "/") t.Run("DetectArchitecture", func(t *testing.T) { t.Parallel() + ctx := testutil.Context(t, testutil.WaitShort) arch, err := dcli.DetectArchitecture(ctx, containerName) require.NoError(t, err, "DetectArchitecture failed") @@ -71,6 +71,7 @@ func TestIntegrationDockerCLI(t *testing.T) { t.Run("Copy", func(t *testing.T) { t.Parallel() + ctx := testutil.Context(t, testutil.WaitShort) want := "Help, I'm trapped!" tempFile := filepath.Join(t.TempDir(), "test-file.txt") @@ -90,6 +91,7 @@ func TestIntegrationDockerCLI(t *testing.T) { t.Run("ExecAs", func(t *testing.T) { t.Parallel() + ctx := testutil.Context(t, testutil.WaitShort) // Test ExecAs without specifying user (should use container's default). want := "root" diff --git a/agent/agentcontainers/devcontainercli.go b/agent/agentcontainers/devcontainercli.go index d7cd25f85a7b3..2242e62f602e8 100644 --- a/agent/agentcontainers/devcontainercli.go +++ b/agent/agentcontainers/devcontainercli.go @@ -91,6 +91,7 @@ type CoderCustomization struct { Apps []SubAgentApp `json:"apps,omitempty"` Name string `json:"name,omitempty"` Ignore bool `json:"ignore,omitempty"` + AutoStart bool `json:"autoStart,omitempty"` } type DevcontainerWorkspace struct { diff --git a/agent/agentcontainers/watcher/watcher_test.go b/agent/agentcontainers/watcher/watcher_test.go index 6cddfbdcee276..08222357d5fd0 100644 --- a/agent/agentcontainers/watcher/watcher_test.go +++ b/agent/agentcontainers/watcher/watcher_test.go @@ -4,6 +4,7 @@ import ( "context" "os" "path/filepath" + "runtime" "testing" "github.com/fsnotify/fsnotify" @@ -88,24 +89,34 @@ func TestFSNotifyWatcher(t *testing.T) { break } - err = os.WriteFile(testFile+".atomic", []byte(`{"test": "atomic"}`), 0o600) - require.NoError(t, err, "write new atomic test file failed") - - err = os.Rename(testFile+".atomic", testFile) - require.NoError(t, err, "rename atomic test file failed") - - // Verify that we receive the event we want. - for { - event, err := wut.Next(ctx) - require.NoError(t, err, "next event failed") - require.NotNil(t, event, "want non-nil event") - if !event.Has(fsnotify.Create) { - t.Logf("Ignoring event: %s", event) - continue + // TODO(DanielleMaywood): + // Unfortunately it appears this atomic-rename phase of the test is flakey on macOS. + // + // This test flake could be indicative of an issue that may present itself + // in a running environment. Fortunately, we only use this (as of 2025-07-29) + // for our dev container integration. We do not expect the host workspace + // (where this is used), to ever be run on macOS, as containers are a linux + // paradigm. + if runtime.GOOS != "darwin" { + err = os.WriteFile(testFile+".atomic", []byte(`{"test": "atomic"}`), 0o600) + require.NoError(t, err, "write new atomic test file failed") + + err = os.Rename(testFile+".atomic", testFile) + require.NoError(t, err, "rename atomic test file failed") + + // Verify that we receive the event we want. + for { + event, err := wut.Next(ctx) + require.NoError(t, err, "next event failed") + require.NotNil(t, event, "want non-nil event") + if !event.Has(fsnotify.Create) { + t.Logf("Ignoring event: %s", event) + continue + } + require.Truef(t, event.Has(fsnotify.Create), "want create event: %s", event.String()) + require.Equal(t, event.Name, testFile, "want event for test file") + break } - require.Truef(t, event.Has(fsnotify.Create), "want create event: %s", event.String()) - require.Equal(t, event.Name, testFile, "want event for test file") - break } // Test removing the file from the watcher. diff --git a/agent/agentssh/agentssh.go b/agent/agentssh/agentssh.go index f53fe207c72cf..f9c28a3e6ee25 100644 --- a/agent/agentssh/agentssh.go +++ b/agent/agentssh/agentssh.go @@ -46,6 +46,8 @@ const ( // MagicProcessCmdlineJetBrains is a string in a process's command line that // uniquely identifies it as JetBrains software. MagicProcessCmdlineJetBrains = "idea.vendor.name=JetBrains" + MagicProcessCmdlineToolbox = "com.jetbrains.toolbox" + MagicProcessCmdlineGateway = "remote-dev-server" // BlockedFileTransferErrorCode indicates that SSH server restricted the raw command from performing // the file transfer. diff --git a/agent/agentssh/agentssh_test.go b/agent/agentssh/agentssh_test.go index 159fe345483d2..7bf91123d5852 100644 --- a/agent/agentssh/agentssh_test.go +++ b/agent/agentssh/agentssh_test.go @@ -413,8 +413,9 @@ func TestSSHServer_ClosesStdin(t *testing.T) { ctx := testutil.Context(t, testutil.WaitMedium) logger := testutil.Logger(t) - s, err := agentssh.NewServer(ctx, logger, prometheus.NewRegistry(), afero.NewMemMapFs(), agentexec.DefaultExecer, nil) + s, err := agentssh.NewServer(ctx, logger.Named("ssh-server"), prometheus.NewRegistry(), afero.NewMemMapFs(), agentexec.DefaultExecer, nil) require.NoError(t, err) + logger = logger.Named("test") defer s.Close() err = s.UpdateHostSigner(42) assert.NoError(t, err) @@ -469,15 +470,25 @@ func TestSSHServer_ClosesStdin(t *testing.T) { err = testutil.RequireReceive(ctx, t, readCh) require.NoError(t, err) - sess.Close() + err = sess.Close() + require.NoError(t, err) var content []byte + expected := []byte("read exit code: 1\n") testutil.Eventually(ctx, t, func(_ context.Context) bool { content, err = os.ReadFile(filePath) - return err == nil + if err != nil { + logger.Debug(ctx, "failed to read file; will retry", slog.Error(err)) + return false + } + if len(content) != len(expected) { + logger.Debug(ctx, "file is partially written", slog.F("content", content)) + return false + } + return true }, testutil.IntervalFast) require.NoError(t, err) - require.Equal(t, "read exit code: 1\n", string(content)) + require.Equal(t, string(expected), string(content)) } func sshClient(t *testing.T, addr string) *ssh.Client { diff --git a/agent/agentssh/jetbrainstrack.go b/agent/agentssh/jetbrainstrack.go index 9b2fdf83b21d0..874f4c278ce79 100644 --- a/agent/agentssh/jetbrainstrack.go +++ b/agent/agentssh/jetbrainstrack.go @@ -53,7 +53,7 @@ func NewJetbrainsChannelWatcher(ctx ssh.Context, logger slog.Logger, reportConne // If this is not JetBrains, then we do not need to do anything special. We // attempt to match on something that appears unique to JetBrains software. - if !strings.Contains(strings.ToLower(cmdline), strings.ToLower(MagicProcessCmdlineJetBrains)) { + if !isJetbrainsProcess(cmdline) { return newChannel } @@ -104,3 +104,18 @@ func (c *ChannelOnClose) Close() error { c.once.Do(c.done) return c.Channel.Close() } + +func isJetbrainsProcess(cmdline string) bool { + opts := []string{ + MagicProcessCmdlineJetBrains, + MagicProcessCmdlineToolbox, + MagicProcessCmdlineGateway, + } + + for _, opt := range opts { + if strings.Contains(strings.ToLower(cmdline), strings.ToLower(opt)) { + return true + } + } + return false +} diff --git a/agent/agentssh/x11_test.go b/agent/agentssh/x11_test.go index 83af8a2f83838..2f2c657f65036 100644 --- a/agent/agentssh/x11_test.go +++ b/agent/agentssh/x11_test.go @@ -135,7 +135,7 @@ func TestServer_X11_EvictionLRU(t *testing.T) { t.Skip("X11 forwarding is only supported on Linux") } - ctx := testutil.Context(t, testutil.WaitLong) + ctx := testutil.Context(t, testutil.WaitSuperLong) logger := testutil.Logger(t) fs := afero.NewMemMapFs() @@ -238,7 +238,9 @@ func TestServer_X11_EvictionLRU(t *testing.T) { payload := "hello world" go func() { conn, err := inproc.Dial(ctx, testutil.NewAddr("tcp", fmt.Sprintf("localhost:%d", agentssh.X11StartPort+agentssh.X11DefaultDisplayOffset))) - assert.NoError(t, err) + if !assert.NoError(t, err) { + return + } _, err = conn.Write([]byte(payload)) assert.NoError(t, err) _ = conn.Close() diff --git a/biome.jsonc b/biome.jsonc new file mode 100644 index 0000000000000..ae81184cdca0c --- /dev/null +++ b/biome.jsonc @@ -0,0 +1,86 @@ +{ + "vcs": { + "enabled": true, + "clientKind": "git", + "useIgnoreFile": true, + "defaultBranch": "main" + }, + "files": { + "includes": [ + "**", + "!**/pnpm-lock.yaml" + ], + "ignoreUnknown": true + }, + "linter": { + "rules": { + "a11y": { + "noSvgWithoutTitle": "off", + "useButtonType": "off", + "useSemanticElements": "off", + "noStaticElementInteractions": "off" + }, + "correctness": { + "noUnusedImports": "warn", + "useUniqueElementIds": "off", // TODO: This is new but we want to fix it + "noNestedComponentDefinitions": "off", // TODO: Investigate, since it is used by shadcn components + "noUnusedVariables": { + "level": "warn", + "options": { + "ignoreRestSiblings": true + } + } + }, + "style": { + "noNonNullAssertion": "off", + "noParameterAssign": "off", + "useDefaultParameterLast": "off", + "useSelfClosingElements": "off", + "useAsConstAssertion": "error", + "useEnumInitializers": "error", + "useSingleVarDeclarator": "error", + "noUnusedTemplateLiteral": "error", + "useNumberNamespace": "error", + "noInferrableTypes": "error", + "noUselessElse": "error", + "noRestrictedImports": { + "level": "error", + "options": { + "paths": { + "@mui/material": "Use @mui/material/ instead. See: https://material-ui.com/guides/minimizing-bundle-size/.", + "@mui/icons-material": "Use @mui/icons-material/ instead. See: https://material-ui.com/guides/minimizing-bundle-size/.", + "@mui/material/Avatar": "Use components/Avatar/Avatar instead.", + "@mui/material/Alert": "Use components/Alert/Alert instead.", + "@mui/material/Popover": "Use components/Popover/Popover instead.", + "@mui/material/Typography": "Use native HTML elements instead. Eg: ,

,

, etc.", + "@mui/material/Box": "Use a
instead.", + "@mui/material/styles": "Import from @emotion/react instead.", + "lodash": "Use lodash/ instead." + } + } + } + }, + "suspicious": { + "noArrayIndexKey": "off", + "noThenProperty": "off", + "noTemplateCurlyInString": "off", + "useIterableCallbackReturn": "off", + "noUnknownAtRules": "off", // Allow Tailwind directives + "noConsole": { + "level": "error", + "options": { + "allow": [ + "error", + "info", + "warn" + ] + } + } + }, + "complexity": { + "noImportantStyles": "off" // TODO: check and fix !important styles + } + } + }, + "$schema": "https://biomejs.dev/schemas/2.2.0/schema.json" +} diff --git a/catalog-info.yaml b/catalog-info.yaml new file mode 100644 index 0000000000000..91f59872a89ae --- /dev/null +++ b/catalog-info.yaml @@ -0,0 +1,10 @@ +apiVersion: backstage.io/v1alpha1 +kind: Component +metadata: + name: coder + annotations: + github.com/project-slug: 'coder/coder' +spec: + type: service + lifecycle: production + owner: rd diff --git a/cli/agent.go b/cli/agent.go index 4f50fbfe88942..c192d4429ccaf 100644 --- a/cli/agent.go +++ b/cli/agent.go @@ -40,23 +40,24 @@ import ( func (r *RootCmd) workspaceAgent() *serpent.Command { var ( - auth string - logDir string - scriptDataDir string - pprofAddress string - noReap bool - sshMaxTimeout time.Duration - tailnetListenPort int64 - prometheusAddress string - debugAddress string - slogHumanPath string - slogJSONPath string - slogStackdriverPath string - blockFileTransfer bool - agentHeaderCommand string - agentHeader []string - devcontainers bool - devcontainerProjectDiscovery bool + auth string + logDir string + scriptDataDir string + pprofAddress string + noReap bool + sshMaxTimeout time.Duration + tailnetListenPort int64 + prometheusAddress string + debugAddress string + slogHumanPath string + slogJSONPath string + slogStackdriverPath string + blockFileTransfer bool + agentHeaderCommand string + agentHeader []string + devcontainers bool + devcontainerProjectDiscovery bool + devcontainerDiscoveryAutostart bool ) cmd := &serpent.Command{ Use: "agent", @@ -366,6 +367,7 @@ func (r *RootCmd) workspaceAgent() *serpent.Command { DevcontainerAPIOptions: []agentcontainers.Option{ agentcontainers.WithSubAgentURL(r.agentURL.String()), agentcontainers.WithProjectDiscovery(devcontainerProjectDiscovery), + agentcontainers.WithDiscoveryAutostart(devcontainerDiscoveryAutostart), }, }) @@ -519,6 +521,13 @@ func (r *RootCmd) workspaceAgent() *serpent.Command { Description: "Allow the agent to search the filesystem for devcontainer projects.", Value: serpent.BoolOf(&devcontainerProjectDiscovery), }, + { + Flag: "devcontainers-discovery-autostart-enable", + Default: "false", + Env: "CODER_AGENT_DEVCONTAINERS_DISCOVERY_AUTOSTART_ENABLE", + Description: "Allow the agent to autostart devcontainer projects it discovers based on their configuration.", + Value: serpent.BoolOf(&devcontainerDiscoveryAutostart), + }, } return cmd diff --git a/cli/agent_test.go b/cli/agent_test.go index 0a948c0c84e9a..1592235babaef 100644 --- a/cli/agent_test.go +++ b/cli/agent_test.go @@ -21,6 +21,7 @@ import ( "github.com/coder/coder/v2/coderd/coderdtest" "github.com/coder/coder/v2/coderd/database" "github.com/coder/coder/v2/coderd/database/dbfake" + "github.com/coder/coder/v2/coderd/database/dbtestutil" "github.com/coder/coder/v2/codersdk" "github.com/coder/coder/v2/codersdk/workspacesdk" "github.com/coder/coder/v2/provisionersdk/proto" @@ -67,7 +68,12 @@ func TestWorkspaceAgent(t *testing.T) { t.Parallel() instanceID := "instanceidentifier" certificates, metadataClient := coderdtest.NewAzureInstanceIdentity(t, instanceID) - client, db := coderdtest.NewWithDatabase(t, &coderdtest.Options{ + db, ps := dbtestutil.NewDB(t, + dbtestutil.WithDumpOnFailure(), + ) + client := coderdtest.New(t, &coderdtest.Options{ + Database: db, + Pubsub: ps, AzureCertificates: certificates, }) user := coderdtest.CreateFirstUser(t, client) @@ -106,7 +112,12 @@ func TestWorkspaceAgent(t *testing.T) { t.Parallel() instanceID := "instanceidentifier" certificates, metadataClient := coderdtest.NewAWSInstanceIdentity(t, instanceID) - client, db := coderdtest.NewWithDatabase(t, &coderdtest.Options{ + db, ps := dbtestutil.NewDB(t, + dbtestutil.WithDumpOnFailure(), + ) + client := coderdtest.New(t, &coderdtest.Options{ + Database: db, + Pubsub: ps, AWSCertificates: certificates, }) user := coderdtest.CreateFirstUser(t, client) @@ -146,7 +157,12 @@ func TestWorkspaceAgent(t *testing.T) { t.Parallel() instanceID := "instanceidentifier" validator, metadataClient := coderdtest.NewGoogleInstanceIdentity(t, instanceID, false) - client, db := coderdtest.NewWithDatabase(t, &coderdtest.Options{ + db, ps := dbtestutil.NewDB(t, + dbtestutil.WithDumpOnFailure(), + ) + client := coderdtest.New(t, &coderdtest.Options{ + Database: db, + Pubsub: ps, GoogleTokenValidator: validator, }) owner := coderdtest.CreateFirstUser(t, client) diff --git a/cli/cliui/parameter.go b/cli/cliui/parameter.go index 2e639f8dfa425..d972e346bf196 100644 --- a/cli/cliui/parameter.go +++ b/cli/cliui/parameter.go @@ -38,15 +38,16 @@ func RichParameter(inv *serpent.Invocation, templateVersionParameter codersdk.Te // Move the cursor up a single line for nicer display! _, _ = fmt.Fprint(inv.Stdout, "\033[1A") - var options []string - err = json.Unmarshal([]byte(templateVersionParameter.DefaultValue), &options) + var defaults []string + err = json.Unmarshal([]byte(templateVersionParameter.DefaultValue), &defaults) if err != nil { return "", err } - values, err := MultiSelect(inv, MultiSelectOptions{ - Options: options, - Defaults: options, + values, err := RichMultiSelect(inv, RichMultiSelectOptions{ + Options: templateVersionParameter.Options, + Defaults: defaults, + EnableCustomInput: templateVersionParameter.FormType == "tag-select", }) if err == nil { v, err := json.Marshal(&values) diff --git a/cli/cliui/select.go b/cli/cliui/select.go index 40f63d92e279d..f609ca81c3e26 100644 --- a/cli/cliui/select.go +++ b/cli/cliui/select.go @@ -5,6 +5,7 @@ import ( "fmt" "os" "os/signal" + "slices" "strings" "syscall" @@ -299,6 +300,77 @@ func (m selectModel) filteredOptions() []string { return options } +type RichMultiSelectOptions struct { + Message string + Options []codersdk.TemplateVersionParameterOption + Defaults []string + EnableCustomInput bool +} + +func RichMultiSelect(inv *serpent.Invocation, richOptions RichMultiSelectOptions) ([]string, error) { + var opts []string + var defaultOpts []string + + asLine := func(option codersdk.TemplateVersionParameterOption) string { + line := option.Name + if len(option.Description) > 0 { + line += ": " + option.Description + } + return line + } + + var predefinedOpts []string + for i, option := range richOptions.Options { + opts = append(opts, asLine(option)) // Some options may have description defined. + + // Check if option is selected by default + if slices.Contains(richOptions.Defaults, option.Value) { + defaultOpts = append(defaultOpts, opts[i]) + predefinedOpts = append(predefinedOpts, option.Value) + } + } + + // Check if "defaults" contains extra/custom options, user could select them. + for _, def := range richOptions.Defaults { + if !slices.Contains(predefinedOpts, def) { + opts = append(opts, def) + defaultOpts = append(defaultOpts, def) + } + } + + selected, err := MultiSelect(inv, MultiSelectOptions{ + Message: richOptions.Message, + Options: opts, + Defaults: defaultOpts, + EnableCustomInput: richOptions.EnableCustomInput, + }) + if err != nil { + return nil, err + } + + // Check selected option, convert descriptions (line) to values + // + // The function must return an initialized empty array, since it is later marshaled + // into JSON. Otherwise, `var results []string` would be marshaled to "null". + // See: https://github.com/golang/go/issues/27589 + results := []string{} + for _, sel := range selected { + custom := true + for i, option := range richOptions.Options { + if asLine(option) == sel { + results = append(results, richOptions.Options[i].Value) + custom = false + break + } + } + + if custom { + results = append(results, sel) + } + } + return results, nil +} + type MultiSelectOptions struct { Message string Options []string diff --git a/cli/cliui/select_test.go b/cli/cliui/select_test.go index c7630ac4f2460..55ab81f50f01b 100644 --- a/cli/cliui/select_test.go +++ b/cli/cliui/select_test.go @@ -52,15 +52,8 @@ func TestRichSelect(t *testing.T) { go func() { resp, err := newRichSelect(ptty, cliui.RichSelectOptions{ Options: []codersdk.TemplateVersionParameterOption{ - { - Name: "A-Name", - Value: "A-Value", - Description: "A-Description.", - }, { - Name: "B-Name", - Value: "B-Value", - Description: "B-Description.", - }, + {Name: "A-Name", Value: "A-Value", Description: "A-Description."}, + {Name: "B-Name", Value: "B-Value", Description: "B-Description."}, }, }) assert.NoError(t, err) @@ -86,63 +79,130 @@ func newRichSelect(ptty *ptytest.PTY, opts cliui.RichSelectOptions) (string, err return value, inv.Run() } -func TestMultiSelect(t *testing.T) { +func TestRichMultiSelect(t *testing.T) { t.Parallel() - t.Run("MultiSelect", func(t *testing.T) { - items := []string{"aaa", "bbb", "ccc"} - t.Parallel() - ptty := ptytest.New(t) - msgChan := make(chan []string) - go func() { - resp, err := newMultiSelect(ptty, items) - assert.NoError(t, err) - msgChan <- resp - }() - require.Equal(t, items, <-msgChan) - }) + tests := []struct { + name string + options []codersdk.TemplateVersionParameterOption + defaults []string + allowCustom bool + want []string + }{ + { + name: "Predefined", + options: []codersdk.TemplateVersionParameterOption{ + {Name: "AAA", Description: "This is AAA", Value: "aaa"}, + {Name: "BBB", Description: "This is BBB", Value: "bbb"}, + {Name: "CCC", Description: "This is CCC", Value: "ccc"}, + }, + defaults: []string{"bbb", "ccc"}, + allowCustom: false, + want: []string{"bbb", "ccc"}, + }, + { + name: "Custom", + options: []codersdk.TemplateVersionParameterOption{ + {Name: "AAA", Description: "This is AAA", Value: "aaa"}, + {Name: "BBB", Description: "This is BBB", Value: "bbb"}, + {Name: "CCC", Description: "This is CCC", Value: "ccc"}, + }, + defaults: []string{"aaa", "bbb"}, + allowCustom: true, + want: []string{"aaa", "bbb"}, + }, + { + name: "NoOptionSelected", + options: []codersdk.TemplateVersionParameterOption{ + {Name: "AAA", Description: "This is AAA", Value: "aaa"}, + {Name: "BBB", Description: "This is BBB", Value: "bbb"}, + {Name: "CCC", Description: "This is CCC", Value: "ccc"}, + }, + defaults: []string{}, + allowCustom: false, + want: []string{}, + }, + } - t.Run("MultiSelectWithCustomInput", func(t *testing.T) { - t.Parallel() - items := []string{"Code", "Chairs", "Whale", "Diamond", "Carrot"} - ptty := ptytest.New(t) - msgChan := make(chan []string) - go func() { - resp, err := newMultiSelectWithCustomInput(ptty, items) - assert.NoError(t, err) - msgChan <- resp - }() - require.Equal(t, items, <-msgChan) - }) -} + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() -func newMultiSelectWithCustomInput(ptty *ptytest.PTY, items []string) ([]string, error) { - var values []string - cmd := &serpent.Command{ - Handler: func(inv *serpent.Invocation) error { - selectedItems, err := cliui.MultiSelect(inv, cliui.MultiSelectOptions{ - Options: items, - Defaults: items, - EnableCustomInput: true, - }) - if err == nil { - values = selectedItems + var selectedItems []string + var err error + cmd := &serpent.Command{ + Handler: func(inv *serpent.Invocation) error { + selectedItems, err = cliui.RichMultiSelect(inv, cliui.RichMultiSelectOptions{ + Options: tt.options, + Defaults: tt.defaults, + EnableCustomInput: tt.allowCustom, + }) + return err + }, } - return err + + doneChan := make(chan struct{}) + go func() { + defer close(doneChan) + err := cmd.Invoke().Run() + assert.NoError(t, err) + }() + <-doneChan + + require.Equal(t, tt.want, selectedItems) + }) + } +} + +func TestMultiSelect(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + items []string + allowCustom bool + want []string + }{ + { + name: "MultiSelect", + items: []string{"aaa", "bbb", "ccc"}, + allowCustom: false, + want: []string{"aaa", "bbb", "ccc"}, + }, + { + name: "MultiSelectWithCustomInput", + items: []string{"Code", "Chairs", "Whale", "Diamond", "Carrot"}, + allowCustom: true, + want: []string{"Code", "Chairs", "Whale", "Diamond", "Carrot"}, }, } - inv := cmd.Invoke() - ptty.Attach(inv) - return values, inv.Run() + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + + ptty := ptytest.New(t) + msgChan := make(chan []string) + + go func() { + resp, err := newMultiSelect(ptty, tt.items, tt.allowCustom) + assert.NoError(t, err) + msgChan <- resp + }() + + require.Equal(t, tt.want, <-msgChan) + }) + } } -func newMultiSelect(ptty *ptytest.PTY, items []string) ([]string, error) { +func newMultiSelect(pty *ptytest.PTY, items []string, custom bool) ([]string, error) { var values []string cmd := &serpent.Command{ Handler: func(inv *serpent.Invocation) error { selectedItems, err := cliui.MultiSelect(inv, cliui.MultiSelectOptions{ - Options: items, - Defaults: items, + Options: items, + Defaults: items, + EnableCustomInput: custom, }) if err == nil { values = selectedItems @@ -151,6 +211,6 @@ func newMultiSelect(ptty *ptytest.PTY, items []string) ([]string, error) { }, } inv := cmd.Invoke() - ptty.Attach(inv) + pty.Attach(inv) return values, inv.Run() } diff --git a/cli/create.go b/cli/create.go index fbf26349b3b95..59ab0ba0fa6d7 100644 --- a/cli/create.go +++ b/cli/create.go @@ -2,6 +2,7 @@ package cli import ( "context" + "errors" "fmt" "io" "slices" @@ -21,10 +22,23 @@ import ( "github.com/coder/serpent" ) -func (r *RootCmd) create() *serpent.Command { +// PresetNone represents the special preset value "none". +// It is used when a user runs `create --preset none`, +// indicating that the CLI should not apply any preset. +const PresetNone = "none" + +var ErrNoPresetFound = xerrors.New("no preset found") + +type CreateOptions struct { + BeforeCreate func(ctx context.Context, client *codersdk.Client, template codersdk.Template, templateVersionID uuid.UUID) error + AfterCreate func(ctx context.Context, inv *serpent.Invocation, client *codersdk.Client, workspace codersdk.Workspace) error +} + +func (r *RootCmd) Create(opts CreateOptions) *serpent.Command { var ( templateName string templateVersion string + presetName string startAt string stopAfter time.Duration workspaceName string @@ -263,11 +277,52 @@ func (r *RootCmd) create() *serpent.Command { } } + // Get presets for the template version + tvPresets, err := client.TemplateVersionPresets(inv.Context(), templateVersionID) + if err != nil { + return xerrors.Errorf("failed to get presets: %w", err) + } + + var preset *codersdk.Preset + var presetParameters []codersdk.WorkspaceBuildParameter + + // If the template has no presets, or the user explicitly used --preset none, + // skip applying a preset + if len(tvPresets) > 0 && strings.ToLower(presetName) != PresetNone { + // Attempt to resolve which preset to use + preset, err = resolvePreset(tvPresets, presetName) + if err != nil { + if !errors.Is(err, ErrNoPresetFound) { + return xerrors.Errorf("unable to resolve preset: %w", err) + } + // If no preset found, prompt the user to choose a preset + if preset, err = promptPresetSelection(inv, tvPresets); err != nil { + return xerrors.Errorf("unable to prompt user for preset: %w", err) + } + } + + // Convert preset parameters into workspace build parameters + presetParameters = presetParameterAsWorkspaceBuildParameters(preset.Parameters) + // Inform the user which preset was applied and its parameters + displayAppliedPreset(inv, preset, presetParameters) + } else { + // Inform the user that no preset was applied + _, _ = fmt.Fprintf(inv.Stdout, "%s", cliui.Bold("No preset applied.")) + } + + if opts.BeforeCreate != nil { + err = opts.BeforeCreate(inv.Context(), client, template, templateVersionID) + if err != nil { + return xerrors.Errorf("before create: %w", err) + } + } + richParameters, err := prepWorkspaceBuild(inv, client, prepWorkspaceBuildArgs{ Action: WorkspaceCreate, TemplateVersionID: templateVersionID, NewWorkspaceName: workspaceName, + PresetParameters: presetParameters, RichParameterFile: parameterFlags.richParameterFile, RichParameters: cliBuildParameters, RichParameterDefaults: cliBuildParameterDefaults, @@ -291,14 +346,21 @@ func (r *RootCmd) create() *serpent.Command { ttlMillis = ptr.Ref(stopAfter.Milliseconds()) } - workspace, err := client.CreateUserWorkspace(inv.Context(), workspaceOwner, codersdk.CreateWorkspaceRequest{ + req := codersdk.CreateWorkspaceRequest{ TemplateVersionID: templateVersionID, Name: workspaceName, AutostartSchedule: schedSpec, TTLMillis: ttlMillis, RichParameterValues: richParameters, AutomaticUpdates: codersdk.AutomaticUpdates(autoUpdates), - }) + } + + // If a preset exists, update the create workspace request's preset ID + if preset != nil { + req.TemplateVersionPresetID = preset.ID + } + + workspace, err := client.CreateUserWorkspace(inv.Context(), workspaceOwner, req) if err != nil { return xerrors.Errorf("create workspace: %w", err) } @@ -316,6 +378,14 @@ func (r *RootCmd) create() *serpent.Command { cliui.Keyword(workspace.Name), cliui.Timestamp(time.Now()), ) + + if opts.AfterCreate != nil { + err = opts.AfterCreate(inv.Context(), inv, client, workspace) + if err != nil { + return err + } + } + return nil }, } @@ -333,6 +403,12 @@ func (r *RootCmd) create() *serpent.Command { Description: "Specify a template version name.", Value: serpent.StringOf(&templateVersion), }, + serpent.Option{ + Flag: "preset", + Env: "CODER_PRESET_NAME", + Description: "Specify the name of a template version preset. Use 'none' to explicitly indicate that no preset should be used.", + Value: serpent.StringOf(&presetName), + }, serpent.Option{ Flag: "start-at", Env: "CODER_WORKSPACE_START_AT", @@ -377,12 +453,81 @@ type prepWorkspaceBuildArgs struct { PromptEphemeralParameters bool EphemeralParameters []codersdk.WorkspaceBuildParameter + PresetParameters []codersdk.WorkspaceBuildParameter PromptRichParameters bool RichParameters []codersdk.WorkspaceBuildParameter RichParameterFile string RichParameterDefaults []codersdk.WorkspaceBuildParameter } +// resolvePreset returns the preset matching the given presetName (if specified), +// or the default preset (if any). +// Returns ErrNoPresetFound if no matching or default preset is found. +func resolvePreset(presets []codersdk.Preset, presetName string) (*codersdk.Preset, error) { + // If preset name is specified, find it + if presetName != "" { + for _, p := range presets { + if p.Name == presetName { + return &p, nil + } + } + return nil, xerrors.Errorf("preset %q not found", presetName) + } + + // No preset name specified, search for the default preset + for _, p := range presets { + if p.Default { + return &p, nil + } + } + + // No preset found + return nil, ErrNoPresetFound +} + +// promptPresetSelection shows a CLI selection menu of the presets defined in the template version. +// Returns the selected preset +func promptPresetSelection(inv *serpent.Invocation, presets []codersdk.Preset) (*codersdk.Preset, error) { + presetMap := make(map[string]*codersdk.Preset) + var presetOptions []string + + for _, preset := range presets { + var option string + if preset.Description == "" { + option = preset.Name + } else { + option = fmt.Sprintf("%s: %s", preset.Name, preset.Description) + } + presetOptions = append(presetOptions, option) + presetMap[option] = &preset + } + + // Show selection UI + _, _ = fmt.Fprintln(inv.Stdout, pretty.Sprint(cliui.DefaultStyles.Wrap, "Select a preset below:")) + selected, err := cliui.Select(inv, cliui.SelectOptions{ + Options: presetOptions, + HideSearch: true, + }) + if err != nil { + return nil, xerrors.Errorf("failed to select preset: %w", err) + } + + return presetMap[selected], nil +} + +// displayAppliedPreset shows the user which preset was applied and its parameters +func displayAppliedPreset(inv *serpent.Invocation, preset *codersdk.Preset, parameters []codersdk.WorkspaceBuildParameter) { + label := fmt.Sprintf("Preset '%s'", preset.Name) + if preset.Default { + label += " (default)" + } + + _, _ = fmt.Fprintf(inv.Stdout, "%s applied:\n", cliui.Bold(label)) + for _, param := range parameters { + _, _ = fmt.Fprintf(inv.Stdout, " %s: '%s'\n", cliui.Bold(param.Name), param.Value) + } +} + // prepWorkspaceBuild will ensure a workspace build will succeed on the latest template version. // Any missing params will be prompted to the user. It supports rich parameters. func prepWorkspaceBuild(inv *serpent.Invocation, client *codersdk.Client, args prepWorkspaceBuildArgs) ([]codersdk.WorkspaceBuildParameter, error) { @@ -411,6 +556,7 @@ func prepWorkspaceBuild(inv *serpent.Invocation, client *codersdk.Client, args p WithSourceWorkspaceParameters(args.SourceWorkspaceParameters). WithPromptEphemeralParameters(args.PromptEphemeralParameters). WithEphemeralParameters(args.EphemeralParameters). + WithPresetParameters(args.PresetParameters). WithPromptRichParameters(args.PromptRichParameters). WithRichParameters(args.RichParameters). WithRichParametersFile(parameterFile). diff --git a/cli/create_test.go b/cli/create_test.go index 668fd466d605c..dd26e450d3916 100644 --- a/cli/create_test.go +++ b/cli/create_test.go @@ -12,6 +12,7 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" + "github.com/coder/coder/v2/cli" "github.com/coder/coder/v2/cli/clitest" "github.com/coder/coder/v2/coderd/coderdtest" "github.com/coder/coder/v2/coderd/externalauth" @@ -298,7 +299,7 @@ func TestCreate(t *testing.T) { }) } -func prepareEchoResponses(parameters []*proto.RichParameter) *echo.Responses { +func prepareEchoResponses(parameters []*proto.RichParameter, presets ...*proto.Preset) *echo.Responses { return &echo.Responses{ Parse: echo.ParseComplete, ProvisionPlan: []*proto.Response{ @@ -306,6 +307,7 @@ func prepareEchoResponses(parameters []*proto.RichParameter) *echo.Responses { Type: &proto.Response_Plan{ Plan: &proto.PlanComplete{ Parameters: parameters, + Presets: presets, }, }, }, @@ -663,6 +665,642 @@ func TestCreateWithRichParameters(t *testing.T) { }) } +func TestCreateWithPreset(t *testing.T) { + t.Parallel() + + const ( + firstParameterName = "first_parameter" + firstParameterDisplayName = "First Parameter" + firstParameterDescription = "This is the first parameter" + firstParameterValue = "1" + + firstOptionalParameterName = "first_optional_parameter" + firstOptionalParameterDescription = "This is the first optional parameter" + firstOptionalParameterValue = "1" + secondOptionalParameterName = "second_optional_parameter" + secondOptionalParameterDescription = "This is the second optional parameter" + secondOptionalParameterValue = "2" + + thirdParameterName = "third_parameter" + thirdParameterDescription = "This is the third parameter" + thirdParameterValue = "3" + ) + + echoResponses := func(presets ...*proto.Preset) *echo.Responses { + return prepareEchoResponses([]*proto.RichParameter{ + { + Name: firstParameterName, + DisplayName: firstParameterDisplayName, + Description: firstParameterDescription, + Mutable: true, + DefaultValue: firstParameterValue, + Options: []*proto.RichParameterOption{ + { + Name: firstOptionalParameterName, + Description: firstOptionalParameterDescription, + Value: firstOptionalParameterValue, + }, + { + Name: secondOptionalParameterName, + Description: secondOptionalParameterDescription, + Value: secondOptionalParameterValue, + }, + }, + }, + { + Name: thirdParameterName, + Description: thirdParameterDescription, + DefaultValue: thirdParameterValue, + Mutable: true, + }, + }, presets...) + } + + // This test verifies that when a template has presets, + // including a default preset, and the user specifies a `--preset` flag, + // the CLI uses the specified preset instead of the default + t.Run("PresetFlag", func(t *testing.T) { + t.Parallel() + + client := coderdtest.New(t, &coderdtest.Options{IncludeProvisionerDaemon: true}) + owner := coderdtest.CreateFirstUser(t, client) + member, _ := coderdtest.CreateAnotherUser(t, client, owner.OrganizationID) + + // Given: a template and a template version with two presets, including a default + defaultPreset := proto.Preset{ + Name: "preset-default", + Default: true, + Parameters: []*proto.PresetParameter{ + {Name: thirdParameterName, Value: thirdParameterValue}, + }, + } + preset := proto.Preset{ + Name: "preset-test", + Parameters: []*proto.PresetParameter{ + {Name: firstParameterName, Value: secondOptionalParameterValue}, + {Name: thirdParameterName, Value: thirdParameterValue}, + }, + } + version := coderdtest.CreateTemplateVersion(t, client, owner.OrganizationID, echoResponses(&defaultPreset, &preset)) + coderdtest.AwaitTemplateVersionJobCompleted(t, client, version.ID) + template := coderdtest.CreateTemplate(t, client, owner.OrganizationID, version.ID) + + // When: running the create command with the specified preset + workspaceName := "my-workspace" + inv, root := clitest.New(t, "create", workspaceName, "--template", template.Name, "-y", "--preset", preset.Name) + clitest.SetupConfig(t, member, root) + pty := ptytest.New(t).Attach(inv) + inv.Stdout = pty.Output() + inv.Stderr = pty.Output() + err := inv.Run() + require.NoError(t, err) + + // Should: display the selected preset as well as its parameters + presetName := fmt.Sprintf("Preset '%s' applied:", preset.Name) + pty.ExpectMatch(presetName) + pty.ExpectMatch(fmt.Sprintf("%s: '%s'", firstParameterName, secondOptionalParameterValue)) + pty.ExpectMatch(fmt.Sprintf("%s: '%s'", thirdParameterName, thirdParameterValue)) + + // Verify if the new workspace uses expected parameters. + ctx, cancel := context.WithTimeout(context.Background(), testutil.WaitShort) + defer cancel() + + tvPresets, err := client.TemplateVersionPresets(ctx, version.ID) + require.NoError(t, err) + require.Len(t, tvPresets, 2) + var selectedPreset *codersdk.Preset + for _, tvPreset := range tvPresets { + if tvPreset.Name == preset.Name { + selectedPreset = &tvPreset + } + } + require.NotNil(t, selectedPreset) + + workspaces, err := client.Workspaces(ctx, codersdk.WorkspaceFilter{ + Name: workspaceName, + }) + require.NoError(t, err) + require.Len(t, workspaces.Workspaces, 1) + + // Should: create a workspace using the expected template version and the preset-defined parameters + workspaceLatestBuild := workspaces.Workspaces[0].LatestBuild + require.Equal(t, version.ID, workspaceLatestBuild.TemplateVersionID) + require.Equal(t, selectedPreset.ID, *workspaceLatestBuild.TemplateVersionPresetID) + buildParameters, err := client.WorkspaceBuildParameters(ctx, workspaceLatestBuild.ID) + require.NoError(t, err) + require.Len(t, buildParameters, 2) + require.Contains(t, buildParameters, codersdk.WorkspaceBuildParameter{Name: firstParameterName, Value: secondOptionalParameterValue}) + require.Contains(t, buildParameters, codersdk.WorkspaceBuildParameter{Name: thirdParameterName, Value: thirdParameterValue}) + }) + + // This test verifies that when a template has presets, + // including a default preset, and the user does not specify the `--preset` flag, + // the CLI automatically uses the default preset to create the workspace + t.Run("DefaultPreset", func(t *testing.T) { + t.Parallel() + + client := coderdtest.New(t, &coderdtest.Options{IncludeProvisionerDaemon: true}) + owner := coderdtest.CreateFirstUser(t, client) + member, _ := coderdtest.CreateAnotherUser(t, client, owner.OrganizationID) + + // Given: a template and a template version with two presets, including a default + defaultPreset := proto.Preset{ + Name: "preset-default", + Default: true, + Parameters: []*proto.PresetParameter{ + {Name: firstParameterName, Value: secondOptionalParameterValue}, + {Name: thirdParameterName, Value: thirdParameterValue}, + }, + } + preset := proto.Preset{ + Name: "preset-test", + Parameters: []*proto.PresetParameter{ + {Name: thirdParameterName, Value: thirdParameterValue}, + }, + } + version := coderdtest.CreateTemplateVersion(t, client, owner.OrganizationID, echoResponses(&defaultPreset, &preset)) + coderdtest.AwaitTemplateVersionJobCompleted(t, client, version.ID) + template := coderdtest.CreateTemplate(t, client, owner.OrganizationID, version.ID) + + // When: running the create command without a preset + workspaceName := "my-workspace" + inv, root := clitest.New(t, "create", workspaceName, "--template", template.Name, "-y") + clitest.SetupConfig(t, member, root) + pty := ptytest.New(t).Attach(inv) + inv.Stdout = pty.Output() + inv.Stderr = pty.Output() + err := inv.Run() + require.NoError(t, err) + + // Should: display the default preset as well as its parameters + presetName := fmt.Sprintf("Preset '%s' (default) applied:", defaultPreset.Name) + pty.ExpectMatch(presetName) + pty.ExpectMatch(fmt.Sprintf("%s: '%s'", firstParameterName, secondOptionalParameterValue)) + pty.ExpectMatch(fmt.Sprintf("%s: '%s'", thirdParameterName, thirdParameterValue)) + + // Verify if the new workspace uses expected parameters. + ctx, cancel := context.WithTimeout(context.Background(), testutil.WaitShort) + defer cancel() + + tvPresets, err := client.TemplateVersionPresets(ctx, version.ID) + require.NoError(t, err) + require.Len(t, tvPresets, 2) + var selectedPreset *codersdk.Preset + for _, tvPreset := range tvPresets { + if tvPreset.Default { + selectedPreset = &tvPreset + } + } + require.NotNil(t, selectedPreset) + + workspaces, err := client.Workspaces(ctx, codersdk.WorkspaceFilter{ + Name: workspaceName, + }) + require.NoError(t, err) + require.Len(t, workspaces.Workspaces, 1) + + // Should: create a workspace using the expected template version and the default preset parameters + workspaceLatestBuild := workspaces.Workspaces[0].LatestBuild + require.Equal(t, version.ID, workspaceLatestBuild.TemplateVersionID) + require.Equal(t, selectedPreset.ID, *workspaceLatestBuild.TemplateVersionPresetID) + buildParameters, err := client.WorkspaceBuildParameters(ctx, workspaceLatestBuild.ID) + require.NoError(t, err) + require.Len(t, buildParameters, 2) + require.Contains(t, buildParameters, codersdk.WorkspaceBuildParameter{Name: firstParameterName, Value: secondOptionalParameterValue}) + require.Contains(t, buildParameters, codersdk.WorkspaceBuildParameter{Name: thirdParameterName, Value: thirdParameterValue}) + }) + + // This test verifies that when a template has presets but no default preset, + // and the user does not provide the `--preset` flag, + // the CLI prompts the user to select a preset. + t.Run("NoDefaultPresetPromptUser", func(t *testing.T) { + t.Parallel() + + client := coderdtest.New(t, &coderdtest.Options{IncludeProvisionerDaemon: true}) + owner := coderdtest.CreateFirstUser(t, client) + member, _ := coderdtest.CreateAnotherUser(t, client, owner.OrganizationID) + + // Given: a template and a template version with two presets + preset := proto.Preset{ + Name: "preset-test", + Description: "Preset Test.", + Parameters: []*proto.PresetParameter{ + {Name: firstParameterName, Value: secondOptionalParameterValue}, + {Name: thirdParameterName, Value: thirdParameterValue}, + }, + } + version := coderdtest.CreateTemplateVersion(t, client, owner.OrganizationID, echoResponses(&preset)) + coderdtest.AwaitTemplateVersionJobCompleted(t, client, version.ID) + template := coderdtest.CreateTemplate(t, client, owner.OrganizationID, version.ID) + + // When: running the create command without specifying a preset + workspaceName := "my-workspace" + inv, root := clitest.New(t, "create", workspaceName, "--template", template.Name, + "--parameter", fmt.Sprintf("%s=%s", firstParameterName, firstOptionalParameterValue), + "--parameter", fmt.Sprintf("%s=%s", thirdParameterName, thirdParameterValue)) + clitest.SetupConfig(t, member, root) + doneChan := make(chan struct{}) + pty := ptytest.New(t).Attach(inv) + go func() { + defer close(doneChan) + err := inv.Run() + assert.NoError(t, err) + }() + + // Should: prompt the user for the preset + pty.ExpectMatch("Select a preset below:") + pty.WriteLine("\n") + pty.ExpectMatch("Preset 'preset-test' applied") + pty.ExpectMatch("Confirm create?") + pty.WriteLine("yes") + + <-doneChan + + // Verify if the new workspace uses expected parameters. + ctx, cancel := context.WithTimeout(context.Background(), testutil.WaitShort) + defer cancel() + + tvPresets, err := client.TemplateVersionPresets(ctx, version.ID) + require.NoError(t, err) + require.Len(t, tvPresets, 1) + + workspaces, err := client.Workspaces(ctx, codersdk.WorkspaceFilter{ + Name: workspaceName, + }) + require.NoError(t, err) + require.Len(t, workspaces.Workspaces, 1) + + // Should: create a workspace using the expected template version and the preset-defined parameters + workspaceLatestBuild := workspaces.Workspaces[0].LatestBuild + require.Equal(t, version.ID, workspaceLatestBuild.TemplateVersionID) + require.Equal(t, tvPresets[0].ID, *workspaceLatestBuild.TemplateVersionPresetID) + buildParameters, err := client.WorkspaceBuildParameters(ctx, workspaceLatestBuild.ID) + require.NoError(t, err) + require.Len(t, buildParameters, 2) + require.Contains(t, buildParameters, codersdk.WorkspaceBuildParameter{Name: firstParameterName, Value: secondOptionalParameterValue}) + require.Contains(t, buildParameters, codersdk.WorkspaceBuildParameter{Name: thirdParameterName, Value: thirdParameterValue}) + }) + + // This test verifies that when a template version has no presets, + // the CLI does not prompt the user to select a preset and proceeds + // with workspace creation without applying any preset. + t.Run("TemplateVersionWithoutPresets", func(t *testing.T) { + t.Parallel() + + client := coderdtest.New(t, &coderdtest.Options{IncludeProvisionerDaemon: true}) + owner := coderdtest.CreateFirstUser(t, client) + member, _ := coderdtest.CreateAnotherUser(t, client, owner.OrganizationID) + + // Given: a template and a template version without presets + version := coderdtest.CreateTemplateVersion(t, client, owner.OrganizationID, echoResponses()) + coderdtest.AwaitTemplateVersionJobCompleted(t, client, version.ID) + template := coderdtest.CreateTemplate(t, client, owner.OrganizationID, version.ID) + + // When: running the create command without a preset + workspaceName := "my-workspace" + inv, root := clitest.New(t, "create", workspaceName, "--template", template.Name, "-y", + "--parameter", fmt.Sprintf("%s=%s", firstParameterName, firstOptionalParameterValue), + "--parameter", fmt.Sprintf("%s=%s", thirdParameterName, thirdParameterValue)) + clitest.SetupConfig(t, member, root) + pty := ptytest.New(t).Attach(inv) + inv.Stdout = pty.Output() + inv.Stderr = pty.Output() + err := inv.Run() + require.NoError(t, err) + pty.ExpectMatch("No preset applied.") + + // Verify if the new workspace uses expected parameters. + ctx, cancel := context.WithTimeout(context.Background(), testutil.WaitShort) + defer cancel() + + workspaces, err := client.Workspaces(ctx, codersdk.WorkspaceFilter{ + Name: workspaceName, + }) + require.NoError(t, err) + require.Len(t, workspaces.Workspaces, 1) + + // Should: create a workspace using the expected template version and no preset + workspaceLatestBuild := workspaces.Workspaces[0].LatestBuild + require.Equal(t, version.ID, workspaceLatestBuild.TemplateVersionID) + require.Nil(t, workspaceLatestBuild.TemplateVersionPresetID) + buildParameters, err := client.WorkspaceBuildParameters(ctx, workspaceLatestBuild.ID) + require.NoError(t, err) + require.Len(t, buildParameters, 2) + require.Contains(t, buildParameters, codersdk.WorkspaceBuildParameter{Name: firstParameterName, Value: firstOptionalParameterValue}) + require.Contains(t, buildParameters, codersdk.WorkspaceBuildParameter{Name: thirdParameterName, Value: thirdParameterValue}) + }) + + // This test verifies that when the user provides `--preset none`, + // the CLI skips applying any preset, even if the template version has a default preset. + // The workspace should be created without using any preset-defined parameters. + t.Run("PresetFlagNone", func(t *testing.T) { + t.Parallel() + + client := coderdtest.New(t, &coderdtest.Options{IncludeProvisionerDaemon: true}) + owner := coderdtest.CreateFirstUser(t, client) + member, _ := coderdtest.CreateAnotherUser(t, client, owner.OrganizationID) + + // Given: a template and a template version with a default preset + preset := proto.Preset{ + Name: "preset-test", + Default: true, + Parameters: []*proto.PresetParameter{ + {Name: firstParameterName, Value: secondOptionalParameterValue}, + {Name: thirdParameterName, Value: thirdParameterValue}, + }, + } + version := coderdtest.CreateTemplateVersion(t, client, owner.OrganizationID, echoResponses(&preset)) + coderdtest.AwaitTemplateVersionJobCompleted(t, client, version.ID) + template := coderdtest.CreateTemplate(t, client, owner.OrganizationID, version.ID) + + // When: running the create command with flag '--preset none' + workspaceName := "my-workspace" + inv, root := clitest.New(t, "create", workspaceName, "--template", template.Name, "-y", "--preset", cli.PresetNone, + "--parameter", fmt.Sprintf("%s=%s", firstParameterName, firstOptionalParameterValue), + "--parameter", fmt.Sprintf("%s=%s", thirdParameterName, thirdParameterValue)) + clitest.SetupConfig(t, member, root) + pty := ptytest.New(t).Attach(inv) + inv.Stdout = pty.Output() + inv.Stderr = pty.Output() + err := inv.Run() + require.NoError(t, err) + pty.ExpectMatch("No preset applied.") + + // Verify that the new workspace doesn't use the preset parameters. + ctx, cancel := context.WithTimeout(context.Background(), testutil.WaitShort) + defer cancel() + + tvPresets, err := client.TemplateVersionPresets(ctx, version.ID) + require.NoError(t, err) + require.Len(t, tvPresets, 1) + + workspaces, err := client.Workspaces(ctx, codersdk.WorkspaceFilter{ + Name: workspaceName, + }) + require.NoError(t, err) + require.Len(t, workspaces.Workspaces, 1) + + // Should: create a workspace using the expected template version and no preset + workspaceLatestBuild := workspaces.Workspaces[0].LatestBuild + require.Equal(t, version.ID, workspaceLatestBuild.TemplateVersionID) + require.Nil(t, workspaceLatestBuild.TemplateVersionPresetID) + buildParameters, err := client.WorkspaceBuildParameters(ctx, workspaceLatestBuild.ID) + require.NoError(t, err) + require.Len(t, buildParameters, 2) + require.Contains(t, buildParameters, codersdk.WorkspaceBuildParameter{Name: firstParameterName, Value: firstOptionalParameterValue}) + require.Contains(t, buildParameters, codersdk.WorkspaceBuildParameter{Name: thirdParameterName, Value: thirdParameterValue}) + }) + + // This test verifies that the CLI returns an appropriate error + // when a user provides a `--preset` value that does not correspond + // to any existing preset in the template version. + t.Run("FailsWhenPresetDoesNotExist", func(t *testing.T) { + t.Parallel() + + client := coderdtest.New(t, &coderdtest.Options{IncludeProvisionerDaemon: true}) + owner := coderdtest.CreateFirstUser(t, client) + member, _ := coderdtest.CreateAnotherUser(t, client, owner.OrganizationID) + + // Given: a template and a template version where the preset defines values for all required parameters + preset := proto.Preset{ + Name: "preset-test", + Parameters: []*proto.PresetParameter{ + {Name: firstParameterName, Value: secondOptionalParameterValue}, + {Name: thirdParameterName, Value: thirdParameterValue}, + }, + } + version := coderdtest.CreateTemplateVersion(t, client, owner.OrganizationID, echoResponses(&preset)) + coderdtest.AwaitTemplateVersionJobCompleted(t, client, version.ID) + template := coderdtest.CreateTemplate(t, client, owner.OrganizationID, version.ID) + + // When: running the create command with a non-existent preset + workspaceName := "my-workspace" + inv, root := clitest.New(t, "create", workspaceName, "--template", template.Name, "-y", "--preset", "invalid-preset") + clitest.SetupConfig(t, member, root) + pty := ptytest.New(t).Attach(inv) + inv.Stdout = pty.Output() + inv.Stderr = pty.Output() + err := inv.Run() + + // Should: fail with an error indicating the preset was not found + require.Contains(t, err.Error(), "preset \"invalid-preset\" not found") + }) + + // This test verifies that when both a preset and a user-provided + // `--parameter` flag define a value for the same parameter, + // the preset's value takes precedence over the user's. + // + // The preset defines one parameter (A), and two `--parameter` flags provide A and B. + // The workspace should be created using: + // - the value of parameter A from the preset (overriding the parameter flag's value), + // - and the value of parameter B from the parameter flag. + t.Run("PresetOverridesParameterFlagValues", func(t *testing.T) { + t.Parallel() + + client := coderdtest.New(t, &coderdtest.Options{IncludeProvisionerDaemon: true}) + owner := coderdtest.CreateFirstUser(t, client) + member, _ := coderdtest.CreateAnotherUser(t, client, owner.OrganizationID) + + // Given: a template version with a preset that defines one parameter + preset := proto.Preset{ + Name: "preset-test", + Parameters: []*proto.PresetParameter{ + {Name: firstParameterName, Value: secondOptionalParameterValue}, + }, + } + version := coderdtest.CreateTemplateVersion(t, client, owner.OrganizationID, echoResponses(&preset)) + coderdtest.AwaitTemplateVersionJobCompleted(t, client, version.ID) + template := coderdtest.CreateTemplate(t, client, owner.OrganizationID, version.ID) + + // When: creating a workspace with a preset and passing overlapping and additional parameters via `--parameter` + workspaceName := "my-workspace" + inv, root := clitest.New(t, "create", workspaceName, "--template", template.Name, "-y", + "--preset", preset.Name, + "--parameter", fmt.Sprintf("%s=%s", firstParameterName, firstOptionalParameterValue), + "--parameter", fmt.Sprintf("%s=%s", thirdParameterName, thirdParameterValue)) + clitest.SetupConfig(t, member, root) + pty := ptytest.New(t).Attach(inv) + inv.Stdout = pty.Output() + inv.Stderr = pty.Output() + err := inv.Run() + require.NoError(t, err) + + // Should: display the selected preset as well as its parameter + presetName := fmt.Sprintf("Preset '%s' applied:", preset.Name) + pty.ExpectMatch(presetName) + pty.ExpectMatch(fmt.Sprintf("%s: '%s'", firstParameterName, secondOptionalParameterValue)) + + // Verify if the new workspace uses expected parameters. + ctx, cancel := context.WithTimeout(context.Background(), testutil.WaitShort) + defer cancel() + + tvPresets, err := client.TemplateVersionPresets(ctx, version.ID) + require.NoError(t, err) + require.Len(t, tvPresets, 1) + + workspaces, err := client.Workspaces(ctx, codersdk.WorkspaceFilter{ + Name: workspaceName, + }) + require.NoError(t, err) + require.Len(t, workspaces.Workspaces, 1) + + // Should: include both parameters, one from the preset and one from the `--parameter` flag + workspaceLatestBuild := workspaces.Workspaces[0].LatestBuild + require.Equal(t, version.ID, workspaceLatestBuild.TemplateVersionID) + require.Equal(t, tvPresets[0].ID, *workspaceLatestBuild.TemplateVersionPresetID) + buildParameters, err := client.WorkspaceBuildParameters(ctx, workspaceLatestBuild.ID) + require.NoError(t, err) + require.Len(t, buildParameters, 2) + require.Contains(t, buildParameters, codersdk.WorkspaceBuildParameter{Name: firstParameterName, Value: secondOptionalParameterValue}) + require.Contains(t, buildParameters, codersdk.WorkspaceBuildParameter{Name: thirdParameterName, Value: thirdParameterValue}) + }) + + // This test verifies that when both a preset and a user-provided + // `--rich-parameter-file` define a value for the same parameter, + // the preset's value takes precedence over the one in the file. + // + // The preset defines one parameter (A), and the parameter file provides two parameters (A and B). + // The workspace should be created using: + // - the value of parameter A from the preset (overriding the file's value), + // - and the value of parameter B from the file. + t.Run("PresetOverridesParameterFileValues", func(t *testing.T) { + t.Parallel() + + client := coderdtest.New(t, &coderdtest.Options{IncludeProvisionerDaemon: true}) + owner := coderdtest.CreateFirstUser(t, client) + member, _ := coderdtest.CreateAnotherUser(t, client, owner.OrganizationID) + + // Given: a template version with a preset that defines one parameter + preset := proto.Preset{ + Name: "preset-test", + Parameters: []*proto.PresetParameter{ + {Name: firstParameterName, Value: secondOptionalParameterValue}, + }, + } + version := coderdtest.CreateTemplateVersion(t, client, owner.OrganizationID, echoResponses(&preset)) + coderdtest.AwaitTemplateVersionJobCompleted(t, client, version.ID) + template := coderdtest.CreateTemplate(t, client, owner.OrganizationID, version.ID) + + // When: creating a workspace with the preset and passing the second required parameter via `--rich-parameter-file` + workspaceName := "my-workspace" + tempDir := t.TempDir() + removeTmpDirUntilSuccessAfterTest(t, tempDir) + parameterFile, _ := os.CreateTemp(tempDir, "testParameterFile*.yaml") + _, _ = parameterFile.WriteString( + firstParameterName + ": " + firstOptionalParameterValue + "\n" + + thirdParameterName + ": " + thirdParameterValue) + inv, root := clitest.New(t, "create", workspaceName, "--template", template.Name, "-y", + "--preset", preset.Name, + "--rich-parameter-file", parameterFile.Name()) + clitest.SetupConfig(t, member, root) + pty := ptytest.New(t).Attach(inv) + inv.Stdout = pty.Output() + inv.Stderr = pty.Output() + err := inv.Run() + require.NoError(t, err) + + // Should: display the selected preset as well as its parameter + presetName := fmt.Sprintf("Preset '%s' applied:", preset.Name) + pty.ExpectMatch(presetName) + pty.ExpectMatch(fmt.Sprintf("%s: '%s'", firstParameterName, secondOptionalParameterValue)) + + // Verify if the new workspace uses expected parameters. + ctx, cancel := context.WithTimeout(context.Background(), testutil.WaitShort) + defer cancel() + + tvPresets, err := client.TemplateVersionPresets(ctx, version.ID) + require.NoError(t, err) + require.Len(t, tvPresets, 1) + + workspaces, err := client.Workspaces(ctx, codersdk.WorkspaceFilter{ + Name: workspaceName, + }) + require.NoError(t, err) + require.Len(t, workspaces.Workspaces, 1) + + // Should: include both parameters, one from the preset and one from the `--rich-parameter-file` flag + workspaceLatestBuild := workspaces.Workspaces[0].LatestBuild + require.Equal(t, version.ID, workspaceLatestBuild.TemplateVersionID) + require.Equal(t, tvPresets[0].ID, *workspaceLatestBuild.TemplateVersionPresetID) + buildParameters, err := client.WorkspaceBuildParameters(ctx, workspaceLatestBuild.ID) + require.NoError(t, err) + require.Len(t, buildParameters, 2) + require.Contains(t, buildParameters, codersdk.WorkspaceBuildParameter{Name: firstParameterName, Value: secondOptionalParameterValue}) + require.Contains(t, buildParameters, codersdk.WorkspaceBuildParameter{Name: thirdParameterName, Value: thirdParameterValue}) + }) + + // This test verifies that when a preset provides only some parameters, + // and the remaining ones are not provided via flags, + // the CLI prompts the user for input to fill in the missing parameters. + t.Run("PromptsForMissingParametersWhenPresetIsIncomplete", func(t *testing.T) { + t.Parallel() + + client := coderdtest.New(t, &coderdtest.Options{IncludeProvisionerDaemon: true}) + owner := coderdtest.CreateFirstUser(t, client) + member, _ := coderdtest.CreateAnotherUser(t, client, owner.OrganizationID) + + // Given: a template version with a preset that defines one parameter + preset := proto.Preset{ + Name: "preset-test", + Parameters: []*proto.PresetParameter{ + {Name: firstParameterName, Value: secondOptionalParameterValue}, + }, + } + version := coderdtest.CreateTemplateVersion(t, client, owner.OrganizationID, echoResponses(&preset)) + coderdtest.AwaitTemplateVersionJobCompleted(t, client, version.ID) + template := coderdtest.CreateTemplate(t, client, owner.OrganizationID, version.ID) + + // When: running the create command with the specified preset + workspaceName := "my-workspace" + inv, root := clitest.New(t, "create", workspaceName, "--template", template.Name, "--preset", preset.Name) + clitest.SetupConfig(t, member, root) + doneChan := make(chan struct{}) + pty := ptytest.New(t).Attach(inv) + go func() { + defer close(doneChan) + err := inv.Run() + assert.NoError(t, err) + }() + + // Should: display the selected preset as well as its parameters + presetName := fmt.Sprintf("Preset '%s' applied:", preset.Name) + pty.ExpectMatch(presetName) + pty.ExpectMatch(fmt.Sprintf("%s: '%s'", firstParameterName, secondOptionalParameterValue)) + + // Should: prompt for the missing parameter + pty.ExpectMatch(thirdParameterDescription) + pty.WriteLine(thirdParameterValue) + pty.ExpectMatch("Confirm create?") + pty.WriteLine("yes") + + <-doneChan + + // Verify if the new workspace uses expected parameters. + ctx, cancel := context.WithTimeout(context.Background(), testutil.WaitLong) + defer cancel() + + tvPresets, err := client.TemplateVersionPresets(ctx, version.ID) + require.NoError(t, err) + require.Len(t, tvPresets, 1) + + workspaces, err := client.Workspaces(ctx, codersdk.WorkspaceFilter{ + Name: workspaceName, + }) + require.NoError(t, err) + require.Len(t, workspaces.Workspaces, 1) + + // Should: create a workspace using the expected template version and the preset-defined parameters + workspaceLatestBuild := workspaces.Workspaces[0].LatestBuild + require.Equal(t, version.ID, workspaceLatestBuild.TemplateVersionID) + require.Equal(t, tvPresets[0].ID, *workspaceLatestBuild.TemplateVersionPresetID) + buildParameters, err := client.WorkspaceBuildParameters(ctx, workspaceLatestBuild.ID) + require.NoError(t, err) + require.Len(t, buildParameters, 2) + require.Contains(t, buildParameters, codersdk.WorkspaceBuildParameter{Name: firstParameterName, Value: secondOptionalParameterValue}) + require.Contains(t, buildParameters, codersdk.WorkspaceBuildParameter{Name: thirdParameterName, Value: thirdParameterValue}) + }) +} + func TestCreateValidateRichParameters(t *testing.T) { t.Parallel() diff --git a/cli/delete_test.go b/cli/delete_test.go index c01893419f80f..2e550d74849ab 100644 --- a/cli/delete_test.go +++ b/cli/delete_test.go @@ -111,7 +111,6 @@ func TestDelete(t *testing.T) { // The API checks if the user has any workspaces, so we cannot delete a user // this way. ctx := testutil.Context(t, testutil.WaitShort) - // nolint:gocritic // Unit test err := api.Database.UpdateUserDeletedByID(dbauthz.AsSystemRestricted(ctx), deleteMeUser.ID) require.NoError(t, err) diff --git a/cli/exp_prompts.go b/cli/exp_prompts.go index 225685a0c375a..ef51a1ce04398 100644 --- a/cli/exp_prompts.go +++ b/cli/exp_prompts.go @@ -174,6 +174,20 @@ func (RootCmd) promptExample() *serpent.Command { _, _ = fmt.Fprintf(inv.Stdout, "%q are nice choices.\n", strings.Join(multiSelectValues, ", ")) return multiSelectError }, useThingsOption, enableCustomInputOption), + promptCmd("rich-multi-select", func(inv *serpent.Invocation) error { + if len(multiSelectValues) == 0 { + multiSelectValues, multiSelectError = cliui.MultiSelect(inv, cliui.MultiSelectOptions{ + Message: "Select some things:", + Options: []string{ + "Apples", "Plums", "Grapes", "Oranges", "Bananas", + }, + Defaults: []string{"Grapes", "Plums"}, + EnableCustomInput: enableCustomInput, + }) + } + _, _ = fmt.Fprintf(inv.Stdout, "%q are nice choices.\n", strings.Join(multiSelectValues, ", ")) + return multiSelectError + }, useThingsOption, enableCustomInputOption), promptCmd("rich-parameter", func(inv *serpent.Invocation) error { value, err := cliui.RichSelect(inv, cliui.RichSelectOptions{ Options: []codersdk.TemplateVersionParameterOption{ diff --git a/cli/exp_rpty.go b/cli/exp_rpty.go index 70154c57ea9bc..196328b64732c 100644 --- a/cli/exp_rpty.go +++ b/cli/exp_rpty.go @@ -97,7 +97,7 @@ func handleRPTY(inv *serpent.Invocation, client *codersdk.Client, args handleRPT reconnectID = uuid.New() } - ws, agt, _, err := getWorkspaceAndAgent(ctx, inv, client, true, args.NamedWorkspace) + ws, agt, _, err := GetWorkspaceAndAgent(ctx, inv, client, true, args.NamedWorkspace) if err != nil { return err } diff --git a/cli/list.go b/cli/list.go index 083d32c6e8fa1..278895dfd7218 100644 --- a/cli/list.go +++ b/cli/list.go @@ -18,7 +18,7 @@ import ( // workspaceListRow is the type provided to the OutputFormatter. This is a bit // dodgy but it's the only way to do complex display code for one format vs. the // other. -type workspaceListRow struct { +type WorkspaceListRow struct { // For JSON format: codersdk.Workspace `table:"-"` @@ -40,7 +40,7 @@ type workspaceListRow struct { DailyCost string `json:"-" table:"daily cost"` } -func workspaceListRowFromWorkspace(now time.Time, workspace codersdk.Workspace) workspaceListRow { +func WorkspaceListRowFromWorkspace(now time.Time, workspace codersdk.Workspace) WorkspaceListRow { status := codersdk.WorkspaceDisplayStatus(workspace.LatestBuild.Job.Status, workspace.LatestBuild.Transition) lastBuilt := now.UTC().Sub(workspace.LatestBuild.Job.CreatedAt).Truncate(time.Second) @@ -55,7 +55,7 @@ func workspaceListRowFromWorkspace(now time.Time, workspace codersdk.Workspace) favIco = "★" } workspaceName := favIco + " " + workspace.OwnerName + "/" + workspace.Name - return workspaceListRow{ + return WorkspaceListRow{ Favorite: workspace.Favorite, Workspace: workspace, WorkspaceName: workspaceName, @@ -80,7 +80,7 @@ func (r *RootCmd) list() *serpent.Command { filter cliui.WorkspaceFilter formatter = cliui.NewOutputFormatter( cliui.TableFormat( - []workspaceListRow{}, + []WorkspaceListRow{}, []string{ "workspace", "template", @@ -107,7 +107,7 @@ func (r *RootCmd) list() *serpent.Command { r.InitClient(client), ), Handler: func(inv *serpent.Invocation) error { - res, err := queryConvertWorkspaces(inv.Context(), client, filter.Filter(), workspaceListRowFromWorkspace) + res, err := QueryConvertWorkspaces(inv.Context(), client, filter.Filter(), WorkspaceListRowFromWorkspace) if err != nil { return err } @@ -137,9 +137,9 @@ func (r *RootCmd) list() *serpent.Command { // queryConvertWorkspaces is a helper function for converting // codersdk.Workspaces to a different type. // It's used by the list command to convert workspaces to -// workspaceListRow, and by the schedule command to +// WorkspaceListRow, and by the schedule command to // convert workspaces to scheduleListRow. -func queryConvertWorkspaces[T any](ctx context.Context, client *codersdk.Client, filter codersdk.WorkspaceFilter, convertF func(time.Time, codersdk.Workspace) T) ([]T, error) { +func QueryConvertWorkspaces[T any](ctx context.Context, client *codersdk.Client, filter codersdk.WorkspaceFilter, convertF func(time.Time, codersdk.Workspace) T) ([]T, error) { var empty []T workspaces, err := client.Workspaces(ctx, filter) if err != nil { diff --git a/cli/open.go b/cli/open.go index cc21ea863430d..83569e87e241a 100644 --- a/cli/open.go +++ b/cli/open.go @@ -72,7 +72,7 @@ func (r *RootCmd) openVSCode() *serpent.Command { // need to wait for the agent to start. workspaceQuery := inv.Args[0] autostart := true - workspace, workspaceAgent, otherWorkspaceAgents, err := getWorkspaceAndAgent(ctx, inv, client, autostart, workspaceQuery) + workspace, workspaceAgent, otherWorkspaceAgents, err := GetWorkspaceAndAgent(ctx, inv, client, autostart, workspaceQuery) if err != nil { return xerrors.Errorf("get workspace and agent: %w", err) } @@ -316,7 +316,7 @@ func (r *RootCmd) openApp() *serpent.Command { } workspaceName := inv.Args[0] - ws, agt, _, err := getWorkspaceAndAgent(ctx, inv, client, false, workspaceName) + ws, agt, _, err := GetWorkspaceAndAgent(ctx, inv, client, false, workspaceName) if err != nil { var sdkErr *codersdk.Error if errors.As(err, &sdkErr) && sdkErr.StatusCode() == http.StatusNotFound { diff --git a/cli/parameter.go b/cli/parameter.go index 97c551ffa5a7f..2b56c364faf23 100644 --- a/cli/parameter.go +++ b/cli/parameter.go @@ -100,6 +100,14 @@ func (wpf *workspaceParameterFlags) alwaysPrompt() serpent.Option { } } +func presetParameterAsWorkspaceBuildParameters(presetParameters []codersdk.PresetParameter) []codersdk.WorkspaceBuildParameter { + var params []codersdk.WorkspaceBuildParameter + for _, parameter := range presetParameters { + params = append(params, codersdk.WorkspaceBuildParameter(parameter)) + } + return params +} + func asWorkspaceBuildParameters(nameValuePairs []string) ([]codersdk.WorkspaceBuildParameter, error) { var params []codersdk.WorkspaceBuildParameter for _, nameValue := range nameValuePairs { diff --git a/cli/parameterresolver.go b/cli/parameterresolver.go index 40625331fa6aa..cbd00fb59623e 100644 --- a/cli/parameterresolver.go +++ b/cli/parameterresolver.go @@ -26,6 +26,7 @@ type ParameterResolver struct { lastBuildParameters []codersdk.WorkspaceBuildParameter sourceWorkspaceParameters []codersdk.WorkspaceBuildParameter + presetParameters []codersdk.WorkspaceBuildParameter richParameters []codersdk.WorkspaceBuildParameter richParametersDefaults map[string]string richParametersFile map[string]string @@ -45,6 +46,11 @@ func (pr *ParameterResolver) WithSourceWorkspaceParameters(params []codersdk.Wor return pr } +func (pr *ParameterResolver) WithPresetParameters(params []codersdk.WorkspaceBuildParameter) *ParameterResolver { + pr.presetParameters = params + return pr +} + func (pr *ParameterResolver) WithRichParameters(params []codersdk.WorkspaceBuildParameter) *ParameterResolver { pr.richParameters = params return pr @@ -80,6 +86,8 @@ func (pr *ParameterResolver) WithPromptEphemeralParameters(promptEphemeralParame return pr } +// Resolve gathers workspace build parameters in a layered fashion, applying values from various sources +// in order of precedence: parameter file < CLI/ENV < source build < last build < preset < user input. func (pr *ParameterResolver) Resolve(inv *serpent.Invocation, action WorkspaceCLIAction, templateVersionParameters []codersdk.TemplateVersionParameter) ([]codersdk.WorkspaceBuildParameter, error) { var staged []codersdk.WorkspaceBuildParameter var err error @@ -88,6 +96,7 @@ func (pr *ParameterResolver) Resolve(inv *serpent.Invocation, action WorkspaceCL staged = pr.resolveWithCommandLineOrEnv(staged) staged = pr.resolveWithSourceBuildParameters(staged, templateVersionParameters) staged = pr.resolveWithLastBuildParameters(staged, templateVersionParameters) + staged = pr.resolveWithPreset(staged) // Preset parameters take precedence from all other parameters if err = pr.verifyConstraints(staged, action, templateVersionParameters); err != nil { return nil, err } @@ -97,6 +106,21 @@ func (pr *ParameterResolver) Resolve(inv *serpent.Invocation, action WorkspaceCL return staged, nil } +func (pr *ParameterResolver) resolveWithPreset(resolved []codersdk.WorkspaceBuildParameter) []codersdk.WorkspaceBuildParameter { +next: + for _, presetParameter := range pr.presetParameters { + for i, r := range resolved { + if r.Name == presetParameter.Name { + resolved[i].Value = presetParameter.Value + continue next + } + } + resolved = append(resolved, presetParameter) + } + + return resolved +} + func (pr *ParameterResolver) resolveWithParametersMapFile(resolved []codersdk.WorkspaceBuildParameter) []codersdk.WorkspaceBuildParameter { next: for name, value := range pr.richParametersFile { diff --git a/cli/ping.go b/cli/ping.go index 0836aa8a135db..29af06affeaee 100644 --- a/cli/ping.go +++ b/cli/ping.go @@ -110,7 +110,7 @@ func (r *RootCmd) ping() *serpent.Command { defer notifyCancel() workspaceName := inv.Args[0] - _, workspaceAgent, _, err := getWorkspaceAndAgent( + _, workspaceAgent, _, err := GetWorkspaceAndAgent( ctx, inv, client, false, // Do not autostart for a ping. workspaceName, @@ -147,7 +147,7 @@ func (r *RootCmd) ping() *serpent.Command { } defer conn.Close() - derpMap := conn.DERPMap() + derpMap := conn.TailnetConn().DERPMap() diagCtx, diagCancel := context.WithTimeout(inv.Context(), 30*time.Second) defer diagCancel() @@ -156,7 +156,7 @@ func (r *RootCmd) ping() *serpent.Command { // Silent ping to determine whether we should show diags _, didP2p, _, _ := conn.Ping(ctx) - ni := conn.GetNetInfo() + ni := conn.TailnetConn().GetNetInfo() connDiags := cliui.ConnDiags{ DisableDirect: r.disableDirect, LocalNetInfo: ni, diff --git a/cli/portforward.go b/cli/portforward.go index 7a7723213f760..1b055d9e4362e 100644 --- a/cli/portforward.go +++ b/cli/portforward.go @@ -84,7 +84,7 @@ func (r *RootCmd) portForward() *serpent.Command { return xerrors.New("no port-forwards requested") } - workspace, workspaceAgent, _, err := getWorkspaceAndAgent(ctx, inv, client, !disableAutostart, inv.Args[0]) + workspace, workspaceAgent, _, err := GetWorkspaceAndAgent(ctx, inv, client, !disableAutostart, inv.Args[0]) if err != nil { return err } @@ -221,7 +221,7 @@ func (r *RootCmd) portForward() *serpent.Command { func listenAndPortForward( ctx context.Context, inv *serpent.Invocation, - conn *workspacesdk.AgentConn, + conn workspacesdk.AgentConn, wg *sync.WaitGroup, spec portForwardSpec, logger slog.Logger, diff --git a/cli/provisionerjobs_test.go b/cli/provisionerjobs_test.go index b33fd8b984dc7..4db42e8e3c9e7 100644 --- a/cli/provisionerjobs_test.go +++ b/cli/provisionerjobs_test.go @@ -8,7 +8,6 @@ import ( "testing" "time" - "github.com/aws/smithy-go/ptr" "github.com/google/uuid" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" @@ -20,6 +19,7 @@ import ( "github.com/coder/coder/v2/coderd/database/dbtestutil" "github.com/coder/coder/v2/coderd/rbac" "github.com/coder/coder/v2/codersdk" + "github.com/coder/coder/v2/provisionersdk" "github.com/coder/coder/v2/testutil" ) @@ -36,67 +36,43 @@ func TestProvisionerJobs(t *testing.T) { templateAdminClient, templateAdmin := coderdtest.CreateAnotherUser(t, client, owner.OrganizationID, rbac.ScopedRoleOrgTemplateAdmin(owner.OrganizationID)) memberClient, member := coderdtest.CreateAnotherUser(t, client, owner.OrganizationID) - // Create initial resources with a running provisioner. - firstProvisioner := coderdtest.NewTaggedProvisionerDaemon(t, coderdAPI, "default-provisioner", map[string]string{"owner": "", "scope": "organization"}) - t.Cleanup(func() { _ = firstProvisioner.Close() }) - version := coderdtest.CreateTemplateVersion(t, client, owner.OrganizationID, completeWithAgent()) - coderdtest.AwaitTemplateVersionJobCompleted(t, client, version.ID) - template := coderdtest.CreateTemplate(t, client, owner.OrganizationID, version.ID, func(req *codersdk.CreateTemplateRequest) { - req.AllowUserCancelWorkspaceJobs = ptr.Bool(true) + // These CLI tests are related to provisioner job CRUD operations and as such + // do not require the overhead of starting a provisioner. Other provisioner job + // functionalities (acquisition etc.) are tested elsewhere. + template := dbgen.Template(t, db, database.Template{ + OrganizationID: owner.OrganizationID, + CreatedBy: owner.UserID, + AllowUserCancelWorkspaceJobs: true, + }) + version := dbgen.TemplateVersion(t, db, database.TemplateVersion{ + OrganizationID: owner.OrganizationID, + CreatedBy: owner.UserID, + TemplateID: uuid.NullUUID{UUID: template.ID, Valid: true}, }) - - // Stop the provisioner so it doesn't grab any more jobs. - firstProvisioner.Close() t.Run("Cancel", func(t *testing.T) { t.Parallel() - // Set up test helpers. - type jobInput struct { - WorkspaceBuildID string `json:"workspace_build_id,omitempty"` - TemplateVersionID string `json:"template_version_id,omitempty"` - DryRun bool `json:"dry_run,omitempty"` - } - prepareJob := func(t *testing.T, input jobInput) database.ProvisionerJob { + // Test helper to create a provisioner job of a given type with a given input. + prepareJob := func(t *testing.T, jobType database.ProvisionerJobType, input json.RawMessage) database.ProvisionerJob { t.Helper() - - inputBytes, err := json.Marshal(input) - require.NoError(t, err) - - var typ database.ProvisionerJobType - switch { - case input.WorkspaceBuildID != "": - typ = database.ProvisionerJobTypeWorkspaceBuild - case input.TemplateVersionID != "": - if input.DryRun { - typ = database.ProvisionerJobTypeTemplateVersionDryRun - } else { - typ = database.ProvisionerJobTypeTemplateVersionImport - } - default: - t.Fatal("invalid input") - } - - var ( - tags = database.StringMap{"owner": "", "scope": "organization", "foo": uuid.New().String()} - _ = dbgen.ProvisionerDaemon(t, db, database.ProvisionerDaemon{Tags: tags}) - job = dbgen.ProvisionerJob(t, db, coderdAPI.Pubsub, database.ProvisionerJob{ - InitiatorID: member.ID, - Input: json.RawMessage(inputBytes), - Type: typ, - Tags: tags, - StartedAt: sql.NullTime{Time: coderdAPI.Clock.Now().Add(-time.Minute), Valid: true}, - }) - ) - return job + return dbgen.ProvisionerJob(t, db, coderdAPI.Pubsub, database.ProvisionerJob{ + InitiatorID: member.ID, + Input: input, + Type: jobType, + StartedAt: sql.NullTime{Time: coderdAPI.Clock.Now().Add(-time.Minute), Valid: true}, + Tags: database.StringMap{provisionersdk.TagOwner: "", provisionersdk.TagScope: provisionersdk.ScopeOrganization, "foo": uuid.NewString()}, + }) } + // Test helper to create a workspace build job with a predefined input. prepareWorkspaceBuildJob := func(t *testing.T) database.ProvisionerJob { t.Helper() var ( - wbID = uuid.New() - job = prepareJob(t, jobInput{WorkspaceBuildID: wbID.String()}) - w = dbgen.Workspace(t, db, database.WorkspaceTable{ + wbID = uuid.New() + input, _ = json.Marshal(map[string]string{"workspace_build_id": wbID.String()}) + job = prepareJob(t, database.ProvisionerJobTypeWorkspaceBuild, input) + w = dbgen.Workspace(t, db, database.WorkspaceTable{ OrganizationID: owner.OrganizationID, OwnerID: member.ID, TemplateID: template.ID, @@ -112,12 +88,14 @@ func TestProvisionerJobs(t *testing.T) { return job } - prepareTemplateVersionImportJobBuilder := func(t *testing.T, dryRun bool) database.ProvisionerJob { + // Test helper to create a template version import job with a predefined input. + prepareTemplateVersionImportJob := func(t *testing.T) database.ProvisionerJob { t.Helper() var ( - tvID = uuid.New() - job = prepareJob(t, jobInput{TemplateVersionID: tvID.String(), DryRun: dryRun}) - _ = dbgen.TemplateVersion(t, db, database.TemplateVersion{ + tvID = uuid.New() + input, _ = json.Marshal(map[string]string{"template_version_id": tvID.String()}) + job = prepareJob(t, database.ProvisionerJobTypeTemplateVersionImport, input) + _ = dbgen.TemplateVersion(t, db, database.TemplateVersion{ OrganizationID: owner.OrganizationID, CreatedBy: templateAdmin.ID, ID: tvID, @@ -127,11 +105,26 @@ func TestProvisionerJobs(t *testing.T) { ) return job } - prepareTemplateVersionImportJob := func(t *testing.T) database.ProvisionerJob { - return prepareTemplateVersionImportJobBuilder(t, false) - } + + // Test helper to create a template version import dry run job with a predefined input. prepareTemplateVersionImportJobDryRun := func(t *testing.T) database.ProvisionerJob { - return prepareTemplateVersionImportJobBuilder(t, true) + t.Helper() + var ( + tvID = uuid.New() + input, _ = json.Marshal(map[string]interface{}{ + "template_version_id": tvID.String(), + "dry_run": true, + }) + job = prepareJob(t, database.ProvisionerJobTypeTemplateVersionDryRun, input) + _ = dbgen.TemplateVersion(t, db, database.TemplateVersion{ + OrganizationID: owner.OrganizationID, + CreatedBy: templateAdmin.ID, + ID: tvID, + TemplateID: uuid.NullUUID{UUID: template.ID, Valid: true}, + JobID: job.ID, + }) + ) + return job } // Run the cancellation test suite. diff --git a/cli/provisioners.go b/cli/provisioners.go index 8f90a52589939..77f5e7705edd5 100644 --- a/cli/provisioners.go +++ b/cli/provisioners.go @@ -2,10 +2,12 @@ package cli import ( "fmt" + "time" "golang.org/x/xerrors" "github.com/coder/coder/v2/cli/cliui" + "github.com/coder/coder/v2/coderd/util/slice" "github.com/coder/coder/v2/codersdk" "github.com/coder/serpent" ) @@ -39,7 +41,10 @@ func (r *RootCmd) provisionerList() *serpent.Command { cliui.TableFormat([]provisionerDaemonRow{}, []string{"created at", "last seen at", "key name", "name", "version", "status", "tags"}), cliui.JSONFormat(), ) - limit int64 + limit int64 + offline bool + status []string + maxAge time.Duration ) cmd := &serpent.Command{ @@ -59,7 +64,10 @@ func (r *RootCmd) provisionerList() *serpent.Command { } daemons, err := client.OrganizationProvisionerDaemons(ctx, org.ID, &codersdk.OrganizationProvisionerDaemonsOptions{ - Limit: int(limit), + Limit: int(limit), + Offline: offline, + Status: slice.StringEnums[codersdk.ProvisionerDaemonStatus](status), + MaxAge: maxAge, }) if err != nil { return xerrors.Errorf("list provisioner daemons: %w", err) @@ -98,6 +106,27 @@ func (r *RootCmd) provisionerList() *serpent.Command { Default: "50", Value: serpent.Int64Of(&limit), }, + { + Flag: "show-offline", + FlagShorthand: "f", + Env: "CODER_PROVISIONER_SHOW_OFFLINE", + Description: "Show offline provisioners.", + Value: serpent.BoolOf(&offline), + }, + { + Flag: "status", + FlagShorthand: "s", + Env: "CODER_PROVISIONER_LIST_STATUS", + Description: "Filter by provisioner status.", + Value: serpent.EnumArrayOf(&status, slice.ToStrings(codersdk.ProvisionerDaemonStatusEnums())...), + }, + { + Flag: "max-age", + FlagShorthand: "m", + Env: "CODER_PROVISIONER_LIST_MAX_AGE", + Description: "Filter provisioners by maximum age.", + Value: serpent.DurationOf(&maxAge), + }, }...) orgContext.AttachOptions(cmd) diff --git a/cli/provisioners_test.go b/cli/provisioners_test.go index 30a89714ff57f..f70029e7fa366 100644 --- a/cli/provisioners_test.go +++ b/cli/provisioners_test.go @@ -31,7 +31,6 @@ func TestProvisioners_Golden(t *testing.T) { // Replace UUIDs with predictable values for golden files. replace := make(map[string]string) updateReplaceUUIDs := func(coderdAPI *coderd.API) { - //nolint:gocritic // This is a test. systemCtx := dbauthz.AsSystemRestricted(context.Background()) provisioners, err := coderdAPI.Database.GetProvisionerDaemons(systemCtx) require.NoError(t, err) @@ -198,6 +197,74 @@ func TestProvisioners_Golden(t *testing.T) { clitest.TestGoldenFile(t, t.Name(), got.Bytes(), replace) }) + t.Run("list with offline provisioner daemons", func(t *testing.T) { + t.Parallel() + + var got bytes.Buffer + inv, root := clitest.New(t, + "provisioners", + "list", + "--show-offline", + ) + inv.Stdout = &got + clitest.SetupConfig(t, templateAdminClient, root) + err := inv.Run() + require.NoError(t, err) + + clitest.TestGoldenFile(t, t.Name(), got.Bytes(), replace) + }) + + t.Run("list provisioner daemons by status", func(t *testing.T) { + t.Parallel() + + var got bytes.Buffer + inv, root := clitest.New(t, + "provisioners", + "list", + "--status=idle,offline,busy", + ) + inv.Stdout = &got + clitest.SetupConfig(t, templateAdminClient, root) + err := inv.Run() + require.NoError(t, err) + + clitest.TestGoldenFile(t, t.Name(), got.Bytes(), replace) + }) + + t.Run("list provisioner daemons without offline", func(t *testing.T) { + t.Parallel() + + var got bytes.Buffer + inv, root := clitest.New(t, + "provisioners", + "list", + "--status=idle,busy", + ) + inv.Stdout = &got + clitest.SetupConfig(t, templateAdminClient, root) + err := inv.Run() + require.NoError(t, err) + + clitest.TestGoldenFile(t, t.Name(), got.Bytes(), replace) + }) + + t.Run("list provisioner daemons by max age", func(t *testing.T) { + t.Parallel() + + var got bytes.Buffer + inv, root := clitest.New(t, + "provisioners", + "list", + "--max-age=1h", + ) + inv.Stdout = &got + clitest.SetupConfig(t, templateAdminClient, root) + err := inv.Run() + require.NoError(t, err) + + clitest.TestGoldenFile(t, t.Name(), got.Bytes(), replace) + }) + // Test jobs list with template admin as members are currently // unable to access provisioner jobs. In the future (with RBAC // changes), we may allow them to view _their_ jobs. diff --git a/cli/restart.go b/cli/restart.go index 156f506105c5a..20ee0b9b9de9d 100644 --- a/cli/restart.go +++ b/cli/restart.go @@ -51,8 +51,17 @@ func (r *RootCmd) restart() *serpent.Command { return err } + stopParamValues, err := asWorkspaceBuildParameters(parameterFlags.ephemeralParameters) + if err != nil { + return xerrors.Errorf("parse ephemeral parameters: %w", err) + } wbr := codersdk.CreateWorkspaceBuildRequest{ Transition: codersdk.WorkspaceTransitionStop, + // Ephemeral parameters should be passed to both stop and start builds. + // TODO: maybe these values should be sourced from the previous build? + // It has to be manually sourced, as ephemeral parameters do not carry across + // builds. + RichParameterValues: stopParamValues, } if bflags.provisionerLogDebug { wbr.LogLevel = codersdk.ProvisionerLogLevelDebug diff --git a/cli/restart_test.go b/cli/restart_test.go index d69344435bf28..01be7e590cebf 100644 --- a/cli/restart_test.go +++ b/cli/restart_test.go @@ -10,6 +10,7 @@ import ( "github.com/coder/coder/v2/cli/clitest" "github.com/coder/coder/v2/coderd/coderdtest" + "github.com/coder/coder/v2/coderd/util/ptr" "github.com/coder/coder/v2/codersdk" "github.com/coder/coder/v2/provisioner/echo" "github.com/coder/coder/v2/provisionersdk/proto" @@ -70,8 +71,14 @@ func TestRestart(t *testing.T) { member, memberUser := coderdtest.CreateAnotherUser(t, client, owner.OrganizationID) version := coderdtest.CreateTemplateVersion(t, client, owner.OrganizationID, echoResponses()) coderdtest.AwaitTemplateVersionJobCompleted(t, client, version.ID) - template := coderdtest.CreateTemplate(t, client, owner.OrganizationID, version.ID) - workspace := coderdtest.CreateWorkspace(t, member, template.ID) + template := coderdtest.CreateTemplate(t, client, owner.OrganizationID, version.ID, func(request *codersdk.CreateTemplateRequest) { + request.UseClassicParameterFlow = ptr.Ref(true) // TODO: Remove when dynamic parameters prompt missing ephemeral parameters. + }) + workspace := coderdtest.CreateWorkspace(t, member, template.ID, func(request *codersdk.CreateWorkspaceRequest) { + request.RichParameterValues = []codersdk.WorkspaceBuildParameter{ + {Name: ephemeralParameterName, Value: "placeholder"}, + } + }) coderdtest.AwaitWorkspaceBuildJobCompleted(t, client, workspace.LatestBuild.ID) inv, root := clitest.New(t, "restart", workspace.Name, "--prompt-ephemeral-parameters") @@ -125,7 +132,11 @@ func TestRestart(t *testing.T) { version := coderdtest.CreateTemplateVersion(t, client, owner.OrganizationID, echoResponses()) coderdtest.AwaitTemplateVersionJobCompleted(t, client, version.ID) template := coderdtest.CreateTemplate(t, client, owner.OrganizationID, version.ID) - workspace := coderdtest.CreateWorkspace(t, member, template.ID) + workspace := coderdtest.CreateWorkspace(t, member, template.ID, func(request *codersdk.CreateWorkspaceRequest) { + request.RichParameterValues = []codersdk.WorkspaceBuildParameter{ + {Name: ephemeralParameterName, Value: "placeholder"}, + } + }) coderdtest.AwaitWorkspaceBuildJobCompleted(t, client, workspace.LatestBuild.ID) inv, root := clitest.New(t, "restart", workspace.Name, @@ -178,8 +189,14 @@ func TestRestart(t *testing.T) { member, memberUser := coderdtest.CreateAnotherUser(t, client, owner.OrganizationID) version := coderdtest.CreateTemplateVersion(t, client, owner.OrganizationID, echoResponses()) coderdtest.AwaitTemplateVersionJobCompleted(t, client, version.ID) - template := coderdtest.CreateTemplate(t, client, owner.OrganizationID, version.ID) - workspace := coderdtest.CreateWorkspace(t, member, template.ID) + template := coderdtest.CreateTemplate(t, client, owner.OrganizationID, version.ID, func(request *codersdk.CreateTemplateRequest) { + request.UseClassicParameterFlow = ptr.Ref(true) // TODO: Remove when dynamic parameters prompts missing ephemeral parameters + }) + workspace := coderdtest.CreateWorkspace(t, member, template.ID, func(request *codersdk.CreateWorkspaceRequest) { + request.RichParameterValues = []codersdk.WorkspaceBuildParameter{ + {Name: ephemeralParameterName, Value: "placeholder"}, + } + }) coderdtest.AwaitWorkspaceBuildJobCompleted(t, client, workspace.LatestBuild.ID) inv, root := clitest.New(t, "restart", workspace.Name, "--build-options") @@ -233,7 +250,11 @@ func TestRestart(t *testing.T) { version := coderdtest.CreateTemplateVersion(t, client, owner.OrganizationID, echoResponses()) coderdtest.AwaitTemplateVersionJobCompleted(t, client, version.ID) template := coderdtest.CreateTemplate(t, client, owner.OrganizationID, version.ID) - workspace := coderdtest.CreateWorkspace(t, member, template.ID) + workspace := coderdtest.CreateWorkspace(t, member, template.ID, func(request *codersdk.CreateWorkspaceRequest) { + request.RichParameterValues = []codersdk.WorkspaceBuildParameter{ + {Name: ephemeralParameterName, Value: "placeholder"}, + } + }) coderdtest.AwaitWorkspaceBuildJobCompleted(t, client, workspace.LatestBuild.ID) inv, root := clitest.New(t, "restart", workspace.Name, diff --git a/cli/root.go b/cli/root.go index 54215a67401dd..b3e67a46ad463 100644 --- a/cli/root.go +++ b/cli/root.go @@ -108,7 +108,7 @@ func (r *RootCmd) CoreSubcommands() []*serpent.Command { // Workspace Commands r.autoupdate(), r.configSSH(), - r.create(), + r.Create(CreateOptions{}), r.deleteWorkspace(), r.favorite(), r.list(), diff --git a/cli/schedule.go b/cli/schedule.go index 9ade82b9c4a36..b7d1ff9b1f2bf 100644 --- a/cli/schedule.go +++ b/cli/schedule.go @@ -46,7 +46,7 @@ When enabling scheduled stop, enter a duration in one of the following formats: * 2m (2 minutes) * 2 (2 minutes) ` - scheduleExtendDescriptionLong = ` + scheduleExtendDescriptionLong = `Extends the workspace deadline. * The new stop time is calculated from *now*. * The new stop time must be at least 30 minutes in the future. * The workspace template may restrict the maximum workspace runtime. @@ -117,7 +117,7 @@ func (r *RootCmd) scheduleShow() *serpent.Command { f.FilterQuery = fmt.Sprintf("owner:me name:%s", inv.Args[0]) } } - res, err := queryConvertWorkspaces(inv.Context(), client, f, scheduleListRowFromWorkspace) + res, err := QueryConvertWorkspaces(inv.Context(), client, f, scheduleListRowFromWorkspace) if err != nil { return err } @@ -157,6 +157,13 @@ func (r *RootCmd) scheduleStart() *serpent.Command { return err } + // Autostart configuration is not supported for prebuilt workspaces. + // Prebuild lifecycle is managed by the reconciliation loop, with scheduling behavior + // defined per preset at the template level, not per workspace. + if workspace.IsPrebuild { + return xerrors.Errorf("autostart configuration is not supported for prebuilt workspaces") + } + var schedStr *string if inv.Args[1] != "manual" { sched, err := parseCLISchedule(inv.Args[1:]...) @@ -205,6 +212,13 @@ func (r *RootCmd) scheduleStop() *serpent.Command { return err } + // Autostop configuration is not supported for prebuilt workspaces. + // Prebuild lifecycle is managed by the reconciliation loop, with scheduling behavior + // defined per preset at the template level, not per workspace. + if workspace.IsPrebuild { + return xerrors.Errorf("autostop configuration is not supported for prebuilt workspaces") + } + var durMillis *int64 if inv.Args[1] != "manual" { dur, err := parseDuration(inv.Args[1]) @@ -255,6 +269,13 @@ func (r *RootCmd) scheduleExtend() *serpent.Command { return xerrors.Errorf("get workspace: %w", err) } + // Deadline extensions are not supported for prebuilt workspaces. + // Prebuild lifecycle is managed by the reconciliation loop, with TTL behavior + // defined per preset at the template level, not per workspace. + if workspace.IsPrebuild { + return xerrors.Errorf("extend configuration is not supported for prebuilt workspaces") + } + loc, err := tz.TimezoneIANA() if err != nil { loc = time.UTC // best effort @@ -286,7 +307,7 @@ func (r *RootCmd) scheduleExtend() *serpent.Command { } func displaySchedule(ws codersdk.Workspace, out io.Writer) error { - rows := []workspaceListRow{workspaceListRowFromWorkspace(time.Now(), ws)} + rows := []WorkspaceListRow{WorkspaceListRowFromWorkspace(time.Now(), ws)} rendered, err := cliui.DisplayTable(rows, "workspace", []string{ "workspace", "starts at", "starts next", "stops after", "stops next", }) diff --git a/cli/schedule_test.go b/cli/schedule_test.go index 02997a9a4c40d..b161f41cbcebc 100644 --- a/cli/schedule_test.go +++ b/cli/schedule_test.go @@ -353,7 +353,7 @@ func TestScheduleOverride(t *testing.T) { ownerClient, _, _, ws := setupTestSchedule(t, sched) now := time.Now() // To avoid the likelihood of time-related flakes, only matching up to the hour. - expectedDeadline := time.Now().In(loc).Add(10 * time.Hour).Format("2006-01-02T15:") + expectedDeadline := now.In(loc).Add(10 * time.Hour).Format("2006-01-02T15:") // When: we override the stop schedule inv, root := clitest.New(t, diff --git a/cli/server.go b/cli/server.go index 26d0c8f110403..f9e744761b22e 100644 --- a/cli/server.go +++ b/cli/server.go @@ -55,6 +55,7 @@ import ( "cdr.dev/slog" "cdr.dev/slog/sloggers/sloghuman" + "github.com/coder/coder/v2/coderd/pproflabel" "github.com/coder/pretty" "github.com/coder/quartz" "github.com/coder/retry" @@ -1459,14 +1460,14 @@ func newProvisionerDaemon( tracer := coderAPI.TracerProvider.Tracer(tracing.TracerName) terraformClient, terraformServer := drpcsdk.MemTransportPipe() wg.Add(1) - go func() { + pproflabel.Go(ctx, pproflabel.Service(pproflabel.ServiceTerraformProvisioner), func(ctx context.Context) { defer wg.Done() <-ctx.Done() _ = terraformClient.Close() _ = terraformServer.Close() - }() + }) wg.Add(1) - go func() { + pproflabel.Go(ctx, pproflabel.Service(pproflabel.ServiceTerraformProvisioner), func(ctx context.Context) { defer wg.Done() defer cancel() @@ -1485,7 +1486,7 @@ func newProvisionerDaemon( default: } } - }() + }) connector[string(database.ProvisionerTypeTerraform)] = sdkproto.NewDRPCProvisionerClient(terraformClient) default: diff --git a/cli/speedtest.go b/cli/speedtest.go index 08112f50cce2c..86d0e6a9ee63c 100644 --- a/cli/speedtest.go +++ b/cli/speedtest.go @@ -83,7 +83,7 @@ func (r *RootCmd) speedtest() *serpent.Command { return xerrors.Errorf("--direct (-d) is incompatible with --%s", varDisableDirect) } - _, workspaceAgent, _, err := getWorkspaceAndAgent(ctx, inv, client, false, inv.Args[0]) + _, workspaceAgent, _, err := GetWorkspaceAndAgent(ctx, inv, client, false, inv.Args[0]) if err != nil { return err } @@ -139,7 +139,7 @@ func (r *RootCmd) speedtest() *serpent.Command { if err != nil { continue } - status := conn.Status() + status := conn.TailnetConn().Status() if len(status.Peers()) != 1 { continue } @@ -189,7 +189,7 @@ func (r *RootCmd) speedtest() *serpent.Command { outputResult.Intervals[i] = interval } } - conn.Conn.SendSpeedtestTelemetry(outputResult.Overall.ThroughputMbits) + conn.TailnetConn().SendSpeedtestTelemetry(outputResult.Overall.ThroughputMbits) out, err := formatter.Format(inv.Context(), outputResult) if err != nil { return err diff --git a/cli/ssh.go b/cli/ssh.go index a2bca46c72f32..a2f0db7327bef 100644 --- a/cli/ssh.go +++ b/cli/ssh.go @@ -590,7 +590,7 @@ func (r *RootCmd) ssh() *serpent.Command { } err = sshSession.Wait() - conn.SendDisconnectedTelemetry() + conn.TailnetConn().SendDisconnectedTelemetry() if err != nil { if exitErr := (&gossh.ExitError{}); errors.As(err, &exitErr) { // Clear the error since it's not useful beyond @@ -754,7 +754,7 @@ func findWorkspaceAndAgentByHostname( hostname = strings.TrimSuffix(hostname, qualifiedSuffix) } hostname = normalizeWorkspaceInput(hostname) - ws, agent, _, err := getWorkspaceAndAgent(ctx, inv, client, !disableAutostart, hostname) + ws, agent, _, err := GetWorkspaceAndAgent(ctx, inv, client, !disableAutostart, hostname) return ws, agent, err } @@ -827,11 +827,11 @@ startWatchLoop: } } -// getWorkspaceAgent returns the workspace and agent selected using either the +// GetWorkspaceAndAgent returns the workspace and agent selected using either the // `[.]` syntax via `in`. It will also return any other agents // in the workspace as a slice for use in child->parent lookups. // If autoStart is true, the workspace will be started if it is not already running. -func getWorkspaceAndAgent(ctx context.Context, inv *serpent.Invocation, client *codersdk.Client, autostart bool, input string) (codersdk.Workspace, codersdk.WorkspaceAgent, []codersdk.WorkspaceAgent, error) { //nolint:revive +func GetWorkspaceAndAgent(ctx context.Context, inv *serpent.Invocation, client *codersdk.Client, autostart bool, input string) (codersdk.Workspace, codersdk.WorkspaceAgent, []codersdk.WorkspaceAgent, error) { //nolint:revive var ( workspace codersdk.Workspace // The input will be `owner/name.agent` @@ -880,7 +880,7 @@ func getWorkspaceAndAgent(ctx context.Context, inv *serpent.Invocation, client * switch cerr.StatusCode() { case http.StatusConflict: _, _ = fmt.Fprintln(inv.Stderr, "Unable to start the workspace due to conflict, the workspace may be starting, retrying without autostart...") - return getWorkspaceAndAgent(ctx, inv, client, false, input) + return GetWorkspaceAndAgent(ctx, inv, client, false, input) case http.StatusForbidden: _, err = startWorkspace(inv, client, workspace, workspaceParameterFlags{}, buildFlags{}, WorkspaceUpdate) @@ -1364,7 +1364,7 @@ func getUsageAppName(usageApp string) codersdk.UsageAppName { func setStatsCallback( ctx context.Context, - agentConn *workspacesdk.AgentConn, + agentConn workspacesdk.AgentConn, logger slog.Logger, networkInfoDir string, networkInfoInterval time.Duration, @@ -1437,7 +1437,7 @@ func setStatsCallback( now := time.Now() cb(now, now.Add(time.Nanosecond), map[netlogtype.Connection]netlogtype.Counts{}, map[netlogtype.Connection]netlogtype.Counts{}) - agentConn.SetConnStatsCallback(networkInfoInterval, 2048, cb) + agentConn.TailnetConn().SetConnStatsCallback(networkInfoInterval, 2048, cb) return errCh, nil } @@ -1451,13 +1451,13 @@ type sshNetworkStats struct { UsingCoderConnect bool `json:"using_coder_connect"` } -func collectNetworkStats(ctx context.Context, agentConn *workspacesdk.AgentConn, start, end time.Time, counts map[netlogtype.Connection]netlogtype.Counts) (*sshNetworkStats, error) { +func collectNetworkStats(ctx context.Context, agentConn workspacesdk.AgentConn, start, end time.Time, counts map[netlogtype.Connection]netlogtype.Counts) (*sshNetworkStats, error) { latency, p2p, pingResult, err := agentConn.Ping(ctx) if err != nil { return nil, err } - node := agentConn.Node() - derpMap := agentConn.DERPMap() + node := agentConn.TailnetConn().Node() + derpMap := agentConn.TailnetConn().DERPMap() totalRx := uint64(0) totalTx := uint64(0) diff --git a/cli/start_test.go b/cli/start_test.go index 85b7b88374f72..6e58b40e30778 100644 --- a/cli/start_test.go +++ b/cli/start_test.go @@ -113,10 +113,18 @@ func TestStart(t *testing.T) { version := coderdtest.CreateTemplateVersion(t, client, owner.OrganizationID, echoResponses()) coderdtest.AwaitTemplateVersionJobCompleted(t, client, version.ID) template := coderdtest.CreateTemplate(t, client, owner.OrganizationID, version.ID) - workspace := coderdtest.CreateWorkspace(t, member, template.ID) + workspace := coderdtest.CreateWorkspace(t, member, template.ID, func(request *codersdk.CreateWorkspaceRequest) { + request.RichParameterValues = []codersdk.WorkspaceBuildParameter{ + {Name: ephemeralParameterName, Value: "foo"}, // Value is required, set it to something + } + }) coderdtest.AwaitWorkspaceBuildJobCompleted(t, client, workspace.LatestBuild.ID) // Stop the workspace - workspaceBuild := coderdtest.CreateWorkspaceBuild(t, client, workspace, database.WorkspaceTransitionStop) + workspaceBuild := coderdtest.CreateWorkspaceBuild(t, client, workspace, database.WorkspaceTransitionStop, func(request *codersdk.CreateWorkspaceBuildRequest) { + request.RichParameterValues = []codersdk.WorkspaceBuildParameter{ + {Name: ephemeralParameterName, Value: "foo"}, // Value is required, set it to something + } + }) coderdtest.AwaitWorkspaceBuildJobCompleted(t, client, workspaceBuild.ID) inv, root := clitest.New(t, "start", workspace.Name, "--prompt-ephemeral-parameters") @@ -167,10 +175,18 @@ func TestStart(t *testing.T) { version := coderdtest.CreateTemplateVersion(t, client, owner.OrganizationID, echoResponses()) coderdtest.AwaitTemplateVersionJobCompleted(t, client, version.ID) template := coderdtest.CreateTemplate(t, client, owner.OrganizationID, version.ID) - workspace := coderdtest.CreateWorkspace(t, member, template.ID) + workspace := coderdtest.CreateWorkspace(t, member, template.ID, func(request *codersdk.CreateWorkspaceRequest) { + request.RichParameterValues = []codersdk.WorkspaceBuildParameter{ + {Name: ephemeralParameterName, Value: "foo"}, // Value is required, set it to something + } + }) coderdtest.AwaitWorkspaceBuildJobCompleted(t, client, workspace.LatestBuild.ID) // Stop the workspace - workspaceBuild := coderdtest.CreateWorkspaceBuild(t, client, workspace, database.WorkspaceTransitionStop) + workspaceBuild := coderdtest.CreateWorkspaceBuild(t, client, workspace, database.WorkspaceTransitionStop, func(request *codersdk.CreateWorkspaceBuildRequest) { + request.RichParameterValues = []codersdk.WorkspaceBuildParameter{ + {Name: ephemeralParameterName, Value: "foo"}, // Value is required, set it to something + } + }) coderdtest.AwaitWorkspaceBuildJobCompleted(t, client, workspaceBuild.ID) inv, root := clitest.New(t, "start", workspace.Name, diff --git a/cli/support.go b/cli/support.go index 70fadc3994580..c55bab92cd6ff 100644 --- a/cli/support.go +++ b/cli/support.go @@ -251,7 +251,7 @@ func summarizeBundle(inv *serpent.Invocation, bun *support.Bundle) { clientNetcheckSummary := bun.Network.Netcheck.Summarize("Client netcheck:", docsURL) if len(clientNetcheckSummary) > 0 { - cliui.Warn(inv.Stdout, "Networking issues detected:", deployHealthSummary...) + cliui.Warn(inv.Stdout, "Networking issues detected:", clientNetcheckSummary...) } } diff --git a/cli/templateedit.go b/cli/templateedit.go index b115350ab4437..fe0323449c9be 100644 --- a/cli/templateedit.go +++ b/cli/templateedit.go @@ -169,9 +169,9 @@ func (r *RootCmd) templateEdit() *serpent.Command { req := codersdk.UpdateTemplateMeta{ Name: name, - DisplayName: displayName, - Description: description, - Icon: icon, + DisplayName: &displayName, + Description: &description, + Icon: &icon, DefaultTTLMillis: defaultTTL.Milliseconds(), ActivityBumpMillis: activityBump.Milliseconds(), AutostopRequirement: &codersdk.TemplateAutostopRequirement{ diff --git a/cli/templatepresets.go b/cli/templatepresets.go index ab0d49725b99a..240abec313a16 100644 --- a/cli/templatepresets.go +++ b/cli/templatepresets.go @@ -41,12 +41,13 @@ func (r *RootCmd) templatePresets() *serpent.Command { func (r *RootCmd) templatePresetsList() *serpent.Command { defaultColumns := []string{ "name", + "description", "parameters", "default", "desired prebuild instances", } formatter := cliui.NewOutputFormatter( - cliui.TableFormat([]templatePresetRow{}, defaultColumns), + cliui.TableFormat([]TemplatePresetRow{}, defaultColumns), cliui.JSONFormat(), ) client := new(codersdk.Client) @@ -108,10 +109,13 @@ func (r *RootCmd) templatePresetsList() *serpent.Command { return nil } - cliui.Infof( - inv.Stdout, - "Showing presets for template %q and template version %q.\n", template.Name, version.Name, - ) + // Only display info message for table output + if formatter.FormatID() == "table" { + cliui.Infof( + inv.Stdout, + "Showing presets for template %q and template version %q.\n", template.Name, version.Name, + ) + } rows := templatePresetsToRows(presets...) out, err := formatter.Format(inv.Context(), rows) if err != nil { @@ -128,12 +132,13 @@ func (r *RootCmd) templatePresetsList() *serpent.Command { return cmd } -type templatePresetRow struct { - // For json format: +type TemplatePresetRow struct { + // For json format TemplatePreset codersdk.Preset `table:"-"` // For table format: Name string `json:"-" table:"name,default_sort"` + Description string `json:"-" table:"description"` Parameters string `json:"-" table:"parameters"` Default bool `json:"-" table:"default"` DesiredPrebuildInstances string `json:"-" table:"desired prebuild instances"` @@ -149,15 +154,19 @@ func formatPresetParameters(params []codersdk.PresetParameter) string { // templatePresetsToRows converts a list of presets to a list of rows // for outputting. -func templatePresetsToRows(presets ...codersdk.Preset) []templatePresetRow { - rows := make([]templatePresetRow, len(presets)) +func templatePresetsToRows(presets ...codersdk.Preset) []TemplatePresetRow { + rows := make([]TemplatePresetRow, len(presets)) for i, preset := range presets { prebuildInstances := "-" if preset.DesiredPrebuildInstances != nil { prebuildInstances = strconv.Itoa(*preset.DesiredPrebuildInstances) } - rows[i] = templatePresetRow{ + rows[i] = TemplatePresetRow{ + // For json format + TemplatePreset: preset, + // For table format Name: preset.Name, + Description: preset.Description, Parameters: formatPresetParameters(preset.Parameters), Default: preset.Default, DesiredPrebuildInstances: prebuildInstances, diff --git a/cli/templatepresets_test.go b/cli/templatepresets_test.go index 47d34af2dcf2d..3a8c8c39f0211 100644 --- a/cli/templatepresets_test.go +++ b/cli/templatepresets_test.go @@ -1,11 +1,14 @@ package cli_test import ( + "bytes" + "encoding/json" "fmt" "testing" "github.com/stretchr/testify/require" + "github.com/coder/coder/v2/cli" "github.com/coder/coder/v2/cli/clitest" "github.com/coder/coder/v2/coderd/coderdtest" "github.com/coder/coder/v2/codersdk" @@ -84,8 +87,9 @@ func TestTemplatePresets(t *testing.T) { }, }, { - Name: "preset-prebuilds", - Parameters: []*proto.PresetParameter{}, + Name: "preset-prebuilds", + Description: "Preset without parameters and 2 prebuild instances.", + Parameters: []*proto.PresetParameter{}, Prebuild: &proto.Prebuild{ Instances: 2, }, @@ -117,7 +121,7 @@ func TestTemplatePresets(t *testing.T) { pty.ExpectRegexMatch(`preset-default\s+k1=v2\s+true\s+0`) // The parameter order is not guaranteed in the output, so we match both possible orders pty.ExpectRegexMatch(`preset-multiple-params\s+(k1=v1,k2=v2)|(k2=v2,k1=v1)\s+false\s+-`) - pty.ExpectRegexMatch(`preset-prebuilds\s+\s+false\s+2`) + pty.ExpectRegexMatch(`preset-prebuilds\s+Preset without parameters and 2 prebuild instances.\s+\s+false\s+2`) }) t.Run("ListsPresetsForSpecifiedTemplateVersion", func(t *testing.T) { @@ -158,8 +162,9 @@ func TestTemplatePresets(t *testing.T) { }, }, { - Name: "preset-prebuilds", - Parameters: []*proto.PresetParameter{}, + Name: "preset-prebuilds", + Description: "Preset without parameters and 2 prebuild instances.", + Parameters: []*proto.PresetParameter{}, Prebuild: &proto.Prebuild{ Instances: 2, }, @@ -208,7 +213,69 @@ func TestTemplatePresets(t *testing.T) { pty.ExpectRegexMatch(`preset-default\s+k1=v2\s+true\s+0`) // The parameter order is not guaranteed in the output, so we match both possible orders pty.ExpectRegexMatch(`preset-multiple-params\s+(k1=v1,k2=v2)|(k2=v2,k1=v1)\s+false\s+-`) - pty.ExpectRegexMatch(`preset-prebuilds\s+\s+false\s+2`) + pty.ExpectRegexMatch(`preset-prebuilds\s+Preset without parameters and 2 prebuild instances.\s+\s+false\s+2`) + }) + + t.Run("ListsPresetsJSON", func(t *testing.T) { + t.Parallel() + + client := coderdtest.New(t, &coderdtest.Options{IncludeProvisionerDaemon: true}) + owner := coderdtest.CreateFirstUser(t, client) + member, _ := coderdtest.CreateAnotherUser(t, client, owner.OrganizationID) + + // Given: an active template version that includes presets + preset := proto.Preset{ + Name: "preset-default", + Description: "Preset with parameters and 2 prebuild instances.", + Icon: "/emojis/1f60e.png", + Default: true, + Parameters: []*proto.PresetParameter{ + { + Name: "k1", + Value: "v2", + }, + }, + Prebuild: &proto.Prebuild{ + Instances: 2, + }, + } + + version := coderdtest.CreateTemplateVersion(t, client, owner.OrganizationID, templateWithPresets([]*proto.Preset{&preset})) + _ = coderdtest.AwaitTemplateVersionJobCompleted(t, client, version.ID) + template := coderdtest.CreateTemplate(t, client, owner.OrganizationID, version.ID) + require.Equal(t, version.ID, template.ActiveVersionID) + + // When: listing presets for that template + inv, root := clitest.New(t, "templates", "presets", "list", template.Name, "-o", "json") + clitest.SetupConfig(t, member, root) + + buf := bytes.NewBuffer(nil) + inv.Stdout = buf + doneChan := make(chan struct{}) + var runErr error + go func() { + defer close(doneChan) + runErr = inv.Run() + }() + + <-doneChan + require.NoError(t, runErr) + + // Should: return the active version's preset + var jsonPresets []cli.TemplatePresetRow + err := json.Unmarshal(buf.Bytes(), &jsonPresets) + require.NoError(t, err, "unmarshal JSON output") + require.Len(t, jsonPresets, 1) + + jsonPreset := jsonPresets[0].TemplatePreset + require.Equal(t, preset.Name, jsonPreset.Name) + require.Equal(t, preset.Description, jsonPreset.Description) + require.Equal(t, preset.Icon, jsonPreset.Icon) + require.Equal(t, preset.Default, jsonPreset.Default) + require.Equal(t, len(preset.Parameters), len(jsonPreset.Parameters)) + require.Equal(t, preset.Parameters[0].Name, jsonPreset.Parameters[0].Name) + require.Equal(t, preset.Parameters[0].Value, jsonPreset.Parameters[0].Value) + require.Equal(t, int(preset.Prebuild.Instances), *jsonPreset.DesiredPrebuildInstances) }) } diff --git a/cli/templatepush_test.go b/cli/templatepush_test.go index f7a31d5e0c25f..732fdd5ee50b0 100644 --- a/cli/templatepush_test.go +++ b/cli/templatepush_test.go @@ -509,6 +509,7 @@ func TestTemplatePush(t *testing.T) { default = "1" } data "coder_parameter" "b" { + name = "b" type = string default = "2" } diff --git a/cli/testdata/TestProvisioners_Golden/list.golden b/cli/testdata/TestProvisioners_Golden/list.golden index 3f50f90746744..8f10eec458f7d 100644 --- a/cli/testdata/TestProvisioners_Golden/list.golden +++ b/cli/testdata/TestProvisioners_Golden/list.golden @@ -1,5 +1,4 @@ -ID CREATED AT LAST SEEN AT NAME VERSION TAGS KEY NAME STATUS CURRENT JOB ID CURRENT JOB STATUS PREVIOUS JOB ID PREVIOUS JOB STATUS ORGANIZATION -00000000-0000-0000-aaaa-000000000000 ====[timestamp]===== ====[timestamp]===== default-provisioner v0.0.0-devel map[owner: scope:organization] built-in idle 00000000-0000-0000-bbbb-000000000001 succeeded Coder -00000000-0000-0000-aaaa-000000000001 ====[timestamp]===== ====[timestamp]===== provisioner-1 v0.0.0 map[foo:bar owner: scope:organization] built-in busy 00000000-0000-0000-bbbb-000000000002 running Coder -00000000-0000-0000-aaaa-000000000002 ====[timestamp]===== ====[timestamp]===== provisioner-2 v0.0.0 map[owner: scope:organization] built-in offline 00000000-0000-0000-bbbb-000000000003 succeeded Coder -00000000-0000-0000-aaaa-000000000003 ====[timestamp]===== ====[timestamp]===== provisioner-3 v0.0.0 map[owner: scope:organization] built-in idle Coder +ID CREATED AT LAST SEEN AT NAME VERSION TAGS KEY NAME STATUS CURRENT JOB ID CURRENT JOB STATUS PREVIOUS JOB ID PREVIOUS JOB STATUS ORGANIZATION +00000000-0000-0000-aaaa-000000000000 ====[timestamp]===== ====[timestamp]===== default-provisioner v0.0.0-devel map[owner: scope:organization] built-in idle 00000000-0000-0000-bbbb-000000000001 succeeded Coder +00000000-0000-0000-aaaa-000000000001 ====[timestamp]===== ====[timestamp]===== provisioner-1 v0.0.0 map[foo:bar owner: scope:organization] built-in busy 00000000-0000-0000-bbbb-000000000002 running Coder +00000000-0000-0000-aaaa-000000000003 ====[timestamp]===== ====[timestamp]===== provisioner-3 v0.0.0 map[owner: scope:organization] built-in idle Coder diff --git a/cli/testdata/TestProvisioners_Golden/list_provisioner_daemons_by_max_age.golden b/cli/testdata/TestProvisioners_Golden/list_provisioner_daemons_by_max_age.golden new file mode 100644 index 0000000000000..bc383a839408d --- /dev/null +++ b/cli/testdata/TestProvisioners_Golden/list_provisioner_daemons_by_max_age.golden @@ -0,0 +1,4 @@ +CREATED AT LAST SEEN AT KEY NAME NAME VERSION STATUS TAGS +====[timestamp]===== ====[timestamp]===== built-in default-provisioner v0.0.0-devel idle map[owner: scope:organization] +====[timestamp]===== ====[timestamp]===== built-in provisioner-1 v0.0.0 busy map[foo:bar owner: scope:organization] +====[timestamp]===== ====[timestamp]===== built-in provisioner-3 v0.0.0 idle map[owner: scope:organization] diff --git a/cli/testdata/TestProvisioners_Golden/list_provisioner_daemons_by_status.golden b/cli/testdata/TestProvisioners_Golden/list_provisioner_daemons_by_status.golden new file mode 100644 index 0000000000000..fd7b966d8d982 --- /dev/null +++ b/cli/testdata/TestProvisioners_Golden/list_provisioner_daemons_by_status.golden @@ -0,0 +1,5 @@ +CREATED AT LAST SEEN AT KEY NAME NAME VERSION STATUS TAGS +====[timestamp]===== ====[timestamp]===== built-in default-provisioner v0.0.0-devel idle map[owner: scope:organization] +====[timestamp]===== ====[timestamp]===== built-in provisioner-1 v0.0.0 busy map[foo:bar owner: scope:organization] +====[timestamp]===== ====[timestamp]===== built-in provisioner-2 v0.0.0 offline map[owner: scope:organization] +====[timestamp]===== ====[timestamp]===== built-in provisioner-3 v0.0.0 idle map[owner: scope:organization] diff --git a/cli/testdata/TestProvisioners_Golden/list_provisioner_daemons_without_offline.golden b/cli/testdata/TestProvisioners_Golden/list_provisioner_daemons_without_offline.golden new file mode 100644 index 0000000000000..bc383a839408d --- /dev/null +++ b/cli/testdata/TestProvisioners_Golden/list_provisioner_daemons_without_offline.golden @@ -0,0 +1,4 @@ +CREATED AT LAST SEEN AT KEY NAME NAME VERSION STATUS TAGS +====[timestamp]===== ====[timestamp]===== built-in default-provisioner v0.0.0-devel idle map[owner: scope:organization] +====[timestamp]===== ====[timestamp]===== built-in provisioner-1 v0.0.0 busy map[foo:bar owner: scope:organization] +====[timestamp]===== ====[timestamp]===== built-in provisioner-3 v0.0.0 idle map[owner: scope:organization] diff --git a/cli/testdata/TestProvisioners_Golden/list_with_offline_provisioner_daemons.golden b/cli/testdata/TestProvisioners_Golden/list_with_offline_provisioner_daemons.golden new file mode 100644 index 0000000000000..fd7b966d8d982 --- /dev/null +++ b/cli/testdata/TestProvisioners_Golden/list_with_offline_provisioner_daemons.golden @@ -0,0 +1,5 @@ +CREATED AT LAST SEEN AT KEY NAME NAME VERSION STATUS TAGS +====[timestamp]===== ====[timestamp]===== built-in default-provisioner v0.0.0-devel idle map[owner: scope:organization] +====[timestamp]===== ====[timestamp]===== built-in provisioner-1 v0.0.0 busy map[foo:bar owner: scope:organization] +====[timestamp]===== ====[timestamp]===== built-in provisioner-2 v0.0.0 offline map[owner: scope:organization] +====[timestamp]===== ====[timestamp]===== built-in provisioner-3 v0.0.0 idle map[owner: scope:organization] diff --git a/cli/testdata/coder_agent_--help.golden b/cli/testdata/coder_agent_--help.golden index 0627016855e08..c6d75705a6eb4 100644 --- a/cli/testdata/coder_agent_--help.golden +++ b/cli/testdata/coder_agent_--help.golden @@ -33,6 +33,10 @@ OPTIONS: --debug-address string, $CODER_AGENT_DEBUG_ADDRESS (default: 127.0.0.1:2113) The bind address to serve a debug HTTP server. + --devcontainers-discovery-autostart-enable bool, $CODER_AGENT_DEVCONTAINERS_DISCOVERY_AUTOSTART_ENABLE (default: false) + Allow the agent to autostart devcontainer projects it discovers based + on their configuration. + --devcontainers-enable bool, $CODER_AGENT_DEVCONTAINERS_ENABLE (default: true) Allow the agent to automatically detect running devcontainers. diff --git a/cli/testdata/coder_create_--help.golden b/cli/testdata/coder_create_--help.golden index 8e8ea4a1701eb..47e809e8f5af6 100644 --- a/cli/testdata/coder_create_--help.golden +++ b/cli/testdata/coder_create_--help.golden @@ -26,6 +26,10 @@ OPTIONS: --parameter-default string-array, $CODER_RICH_PARAMETER_DEFAULT Rich parameter default values in the format "name=value". + --preset string, $CODER_PRESET_NAME + Specify the name of a template version preset. Use 'none' to + explicitly indicate that no preset should be used. + --rich-parameter-file string, $CODER_RICH_PARAMETER_FILE Specify a file path with values for rich parameters defined in the template. The file should be in YAML format, containing key-value diff --git a/cli/testdata/coder_list_--output_json.golden b/cli/testdata/coder_list_--output_json.golden index 51c2887cd1e4a..82b73f7b24989 100644 --- a/cli/testdata/coder_list_--output_json.golden +++ b/cli/testdata/coder_list_--output_json.golden @@ -15,7 +15,7 @@ "template_allow_user_cancel_workspace_jobs": false, "template_active_version_id": "============[version ID]============", "template_require_active_version": false, - "template_use_classic_parameter_flow": true, + "template_use_classic_parameter_flow": false, "latest_build": { "id": "========[workspace build ID]========", "created_at": "====[timestamp]=====", @@ -55,7 +55,8 @@ "template_name": "", "template_display_name": "", "template_icon": "" - } + }, + "logs_overflowed": false }, "reason": "initiator", "resources": [], @@ -69,7 +70,8 @@ "most_recently_seen": null }, "template_version_preset_id": null, - "has_ai_task": false + "has_ai_task": false, + "has_external_agent": false }, "latest_app_status": null, "outdated": false, diff --git a/cli/testdata/coder_provisioner_jobs_list_--help.golden b/cli/testdata/coder_provisioner_jobs_list_--help.golden index f380a0334867c..8e22f78e978f2 100644 --- a/cli/testdata/coder_provisioner_jobs_list_--help.golden +++ b/cli/testdata/coder_provisioner_jobs_list_--help.golden @@ -11,7 +11,7 @@ OPTIONS: -O, --org string, $CODER_ORGANIZATION Select which organization (uuid or name) to use. - -c, --column [id|created at|started at|completed at|canceled at|error|error code|status|worker id|worker name|file id|tags|queue position|queue size|organization id|template version id|workspace build id|type|available workers|template version name|template id|template name|template display name|template icon|workspace id|workspace name|organization|queue] (default: created at,id,type,template display name,status,queue,tags) + -c, --column [id|created at|started at|completed at|canceled at|error|error code|status|worker id|worker name|file id|tags|queue position|queue size|organization id|template version id|workspace build id|type|available workers|template version name|template id|template name|template display name|template icon|workspace id|workspace name|logs overflowed|organization|queue] (default: created at,id,type,template display name,status,queue,tags) Columns to display in table output. -l, --limit int, $CODER_PROVISIONER_JOB_LIST_LIMIT (default: 50) diff --git a/cli/testdata/coder_provisioner_jobs_list_--output_json.golden b/cli/testdata/coder_provisioner_jobs_list_--output_json.golden index e36723765b4df..6ccf672360a55 100644 --- a/cli/testdata/coder_provisioner_jobs_list_--output_json.golden +++ b/cli/testdata/coder_provisioner_jobs_list_--output_json.golden @@ -26,6 +26,7 @@ "template_display_name": "", "template_icon": "" }, + "logs_overflowed": false, "organization_name": "Coder" }, { @@ -57,6 +58,7 @@ "workspace_id": "===========[workspace ID]===========", "workspace_name": "test-workspace" }, + "logs_overflowed": false, "organization_name": "Coder" } ] diff --git a/cli/testdata/coder_provisioner_list_--help.golden b/cli/testdata/coder_provisioner_list_--help.golden index 7a1807bb012f5..ce6d0754073a4 100644 --- a/cli/testdata/coder_provisioner_list_--help.golden +++ b/cli/testdata/coder_provisioner_list_--help.golden @@ -17,8 +17,17 @@ OPTIONS: -l, --limit int, $CODER_PROVISIONER_LIST_LIMIT (default: 50) Limit the number of provisioners returned. + -m, --max-age duration, $CODER_PROVISIONER_LIST_MAX_AGE + Filter provisioners by maximum age. + -o, --output table|json (default: table) Output format. + -f, --show-offline bool, $CODER_PROVISIONER_SHOW_OFFLINE + Show offline provisioners. + + -s, --status [offline|idle|busy], $CODER_PROVISIONER_LIST_STATUS + Filter by provisioner status. + ——— Run `coder --help` for a list of global options. diff --git a/cli/testdata/coder_provisioner_list_--output_json.golden b/cli/testdata/coder_provisioner_list_--output_json.golden index cfa777e99c3f9..ad26225c2ed10 100644 --- a/cli/testdata/coder_provisioner_list_--output_json.golden +++ b/cli/testdata/coder_provisioner_list_--output_json.golden @@ -7,7 +7,7 @@ "last_seen_at": "====[timestamp]=====", "name": "test-daemon", "version": "v0.0.0-devel", - "api_version": "1.7", + "api_version": "1.9", "provisioners": [ "echo" ], diff --git a/cli/testdata/coder_schedule_extend_--help.golden b/cli/testdata/coder_schedule_extend_--help.golden index 2135b09dc7cc3..57992108cb7c0 100644 --- a/cli/testdata/coder_schedule_extend_--help.golden +++ b/cli/testdata/coder_schedule_extend_--help.golden @@ -7,7 +7,8 @@ USAGE: Aliases: override-stop - * The new stop time is calculated from *now*. + Extends the workspace deadline. + * The new stop time is calculated from *now*. * The new stop time must be at least 30 minutes in the future. * The workspace template may restrict the maximum workspace runtime. diff --git a/cli/testdata/coder_templates_presets_list_--help.golden b/cli/testdata/coder_templates_presets_list_--help.golden index 81445df03cc97..e64ef1ee36e96 100644 --- a/cli/testdata/coder_templates_presets_list_--help.golden +++ b/cli/testdata/coder_templates_presets_list_--help.golden @@ -10,7 +10,7 @@ OPTIONS: -O, --org string, $CODER_ORGANIZATION Select which organization (uuid or name) to use. - -c, --column [name|parameters|default|desired prebuild instances] (default: name,parameters,default,desired prebuild instances) + -c, --column [name|description|parameters|default|desired prebuild instances] (default: name,description,parameters,default,desired prebuild instances) Columns to display in table output. -o, --output table|json (default: table) diff --git a/cli/update_test.go b/cli/update_test.go index 7a7480353c01d..b80218f49ab45 100644 --- a/cli/update_test.go +++ b/cli/update_test.go @@ -182,7 +182,7 @@ func TestUpdateWithRichParameters(t *testing.T) { {Name: firstParameterName, Description: firstParameterDescription, Mutable: true}, {Name: immutableParameterName, Description: immutableParameterDescription, Mutable: false}, {Name: secondParameterName, Description: secondParameterDescription, Mutable: true}, - {Name: ephemeralParameterName, Description: ephemeralParameterDescription, Mutable: true, Ephemeral: true}, + {Name: ephemeralParameterName, Description: ephemeralParameterDescription, Mutable: true, Ephemeral: true, DefaultValue: "unset"}, }) } @@ -811,7 +811,9 @@ func TestUpdateValidateRichParameters(t *testing.T) { } version := coderdtest.CreateTemplateVersion(t, client, owner.OrganizationID, prepareEchoResponses(templateParameters)) coderdtest.AwaitTemplateVersionJobCompleted(t, client, version.ID) - template := coderdtest.CreateTemplate(t, client, owner.OrganizationID, version.ID) + template := coderdtest.CreateTemplate(t, client, owner.OrganizationID, version.ID, func(request *codersdk.CreateTemplateRequest) { + request.UseClassicParameterFlow = ptr.Ref(true) // TODO: Remove when dynamic parameters can pass this test + }) // Create new workspace inv, root := clitest.New(t, "create", "my-workspace", "--yes", "--template", template.Name, "--parameter", fmt.Sprintf("%s=%s", numberParameterName, tempVal)) diff --git a/cli/vpndaemon_darwin.go b/cli/vpndaemon_darwin.go new file mode 100644 index 0000000000000..a1b836dd6b0c3 --- /dev/null +++ b/cli/vpndaemon_darwin.go @@ -0,0 +1,73 @@ +//go:build darwin + +package cli + +import ( + "golang.org/x/xerrors" + + "cdr.dev/slog" + "github.com/coder/coder/v2/vpn" + "github.com/coder/serpent" +) + +func (r *RootCmd) vpnDaemonRun() *serpent.Command { + var ( + rpcReadFD int64 + rpcWriteFD int64 + ) + + cmd := &serpent.Command{ + Use: "run", + Short: "Run the VPN daemon on macOS.", + Middleware: serpent.Chain( + serpent.RequireNArgs(0), + ), + Options: serpent.OptionSet{ + { + Flag: "rpc-read-fd", + Env: "CODER_VPN_DAEMON_RPC_READ_FD", + Description: "The file descriptor for the pipe to read from the RPC connection.", + Value: serpent.Int64Of(&rpcReadFD), + Required: true, + }, + { + Flag: "rpc-write-fd", + Env: "CODER_VPN_DAEMON_RPC_WRITE_FD", + Description: "The file descriptor for the pipe to write to the RPC connection.", + Value: serpent.Int64Of(&rpcWriteFD), + Required: true, + }, + }, + Handler: func(inv *serpent.Invocation) error { + ctx := inv.Context() + + if rpcReadFD < 0 || rpcWriteFD < 0 { + return xerrors.Errorf("rpc-read-fd (%v) and rpc-write-fd (%v) must be positive", rpcReadFD, rpcWriteFD) + } + if rpcReadFD == rpcWriteFD { + return xerrors.Errorf("rpc-read-fd (%v) and rpc-write-fd (%v) must be different", rpcReadFD, rpcWriteFD) + } + + pipe, err := vpn.NewBidirectionalPipe(uintptr(rpcReadFD), uintptr(rpcWriteFD)) + if err != nil { + return xerrors.Errorf("create bidirectional RPC pipe: %w", err) + } + defer pipe.Close() + + tunnel, err := vpn.NewTunnel(ctx, slog.Make().Leveled(slog.LevelDebug), pipe, + vpn.NewClient(), + vpn.UseOSNetworkingStack(), + vpn.UseAsLogger(), + ) + if err != nil { + return xerrors.Errorf("create new tunnel for client: %w", err) + } + defer tunnel.Close() + + <-ctx.Done() + return nil + }, + } + + return cmd +} diff --git a/cli/vpndaemon_other.go b/cli/vpndaemon_other.go index 2e3e39b1b99ba..1526efb011889 100644 --- a/cli/vpndaemon_other.go +++ b/cli/vpndaemon_other.go @@ -1,4 +1,4 @@ -//go:build !windows +//go:build !windows && !darwin package cli diff --git a/cli/vscodessh.go b/cli/vscodessh.go index e0b963b7ed80d..bd249b0a6f4ca 100644 --- a/cli/vscodessh.go +++ b/cli/vscodessh.go @@ -102,7 +102,7 @@ func (r *RootCmd) vscodeSSH() *serpent.Command { // will call this command after the workspace is started. autostart := false - workspace, workspaceAgent, _, err := getWorkspaceAndAgent(ctx, inv, client, autostart, fmt.Sprintf("%s/%s", owner, name)) + workspace, workspaceAgent, _, err := GetWorkspaceAndAgent(ctx, inv, client, autostart, fmt.Sprintf("%s/%s", owner, name)) if err != nil { return xerrors.Errorf("find workspace and agent: %w", err) } diff --git a/coderd/agentapi/stats_test.go b/coderd/agentapi/stats_test.go index 3ebf99aa6bc4b..aec2d68b71c12 100644 --- a/coderd/agentapi/stats_test.go +++ b/coderd/agentapi/stats_test.go @@ -41,11 +41,12 @@ func TestUpdateStates(t *testing.T) { Name: "tpl", } workspace = database.Workspace{ - ID: uuid.New(), - OwnerID: user.ID, - TemplateID: template.ID, - Name: "xyz", - TemplateName: template.Name, + ID: uuid.New(), + OwnerID: user.ID, + OwnerUsername: user.Username, + TemplateID: template.ID, + Name: "xyz", + TemplateName: template.Name, } agent = database.WorkspaceAgent{ ID: uuid.New(), @@ -138,9 +139,6 @@ func TestUpdateStates(t *testing.T) { // Workspace gets fetched. dbM.EXPECT().GetWorkspaceByAgentID(gomock.Any(), agent.ID).Return(workspace, nil) - // User gets fetched to hit the UpdateAgentMetricsFn. - dbM.EXPECT().GetUserByID(gomock.Any(), user.ID).Return(user, nil) - // We expect an activity bump because ConnectionCount > 0. dbM.EXPECT().ActivityBumpWorkspace(gomock.Any(), database.ActivityBumpWorkspaceParams{ WorkspaceID: workspace.ID, @@ -380,9 +378,6 @@ func TestUpdateStates(t *testing.T) { LastUsedAt: now.UTC(), }).Return(nil) - // User gets fetched to hit the UpdateAgentMetricsFn. - dbM.EXPECT().GetUserByID(gomock.Any(), user.ID).Return(user, nil) - resp, err := api.UpdateStats(context.Background(), req) require.NoError(t, err) require.Equal(t, &agentproto.UpdateStatsResponse{ @@ -498,9 +493,6 @@ func TestUpdateStates(t *testing.T) { LastUsedAt: now, }).Return(nil) - // User gets fetched to hit the UpdateAgentMetricsFn. - dbM.EXPECT().GetUserByID(gomock.Any(), user.ID).Return(user, nil) - // Ensure that pubsub notifications are sent. notifyDescription := make(chan struct{}) ps.SubscribeWithErr(wspubsub.WorkspaceEventChannel(workspace.OwnerID), diff --git a/coderd/agentapi/subagent_test.go b/coderd/agentapi/subagent_test.go index 0a95a70e5216d..1b6eef936f827 100644 --- a/coderd/agentapi/subagent_test.go +++ b/coderd/agentapi/subagent_test.go @@ -163,7 +163,7 @@ func TestSubAgentAPI(t *testing.T) { agentID, err := uuid.FromBytes(createResp.Agent.Id) require.NoError(t, err) - agent, err := api.Database.GetWorkspaceAgentByID(dbauthz.AsSystemRestricted(ctx), agentID) //nolint:gocritic // this is a test. + agent, err := api.Database.GetWorkspaceAgentByID(dbauthz.AsSystemRestricted(ctx), agentID) require.NoError(t, err) assert.Equal(t, tt.agentName, agent.Name) @@ -621,7 +621,7 @@ func TestSubAgentAPI(t *testing.T) { agentID, err := uuid.FromBytes(createResp.Agent.Id) require.NoError(t, err) - apps, err := api.Database.GetWorkspaceAppsByAgentID(dbauthz.AsSystemRestricted(ctx), agentID) //nolint:gocritic // this is a test. + apps, err := api.Database.GetWorkspaceAppsByAgentID(dbauthz.AsSystemRestricted(ctx), agentID) require.NoError(t, err) // Sort the apps for determinism @@ -751,7 +751,7 @@ func TestSubAgentAPI(t *testing.T) { agentID, err := uuid.FromBytes(createResp.Agent.Id) require.NoError(t, err) - apps, err := db.GetWorkspaceAppsByAgentID(dbauthz.AsSystemRestricted(ctx), agentID) //nolint:gocritic // this is a test. + apps, err := db.GetWorkspaceAppsByAgentID(dbauthz.AsSystemRestricted(ctx), agentID) require.NoError(t, err) require.Len(t, apps, 1) require.Equal(t, "k5jd7a99-duplicate-slug", apps[0].Slug) @@ -789,7 +789,7 @@ func TestSubAgentAPI(t *testing.T) { require.NoError(t, err) // Then: It is deleted. - _, err = db.GetWorkspaceAgentByID(dbauthz.AsSystemRestricted(ctx), childAgent.ID) //nolint:gocritic // this is a test. + _, err = db.GetWorkspaceAgentByID(dbauthz.AsSystemRestricted(ctx), childAgent.ID) require.ErrorIs(t, err, sql.ErrNoRows) }) @@ -830,10 +830,10 @@ func TestSubAgentAPI(t *testing.T) { require.NoError(t, err) // Then: The correct one is deleted. - _, err = api.Database.GetWorkspaceAgentByID(dbauthz.AsSystemRestricted(ctx), childAgentOne.ID) //nolint:gocritic // this is a test. + _, err = api.Database.GetWorkspaceAgentByID(dbauthz.AsSystemRestricted(ctx), childAgentOne.ID) require.ErrorIs(t, err, sql.ErrNoRows) - _, err = api.Database.GetWorkspaceAgentByID(dbauthz.AsSystemRestricted(ctx), childAgentTwo.ID) //nolint:gocritic // this is a test. + _, err = api.Database.GetWorkspaceAgentByID(dbauthz.AsSystemRestricted(ctx), childAgentTwo.ID) require.NoError(t, err) }) @@ -871,7 +871,7 @@ func TestSubAgentAPI(t *testing.T) { var notAuthorizedError dbauthz.NotAuthorizedError require.ErrorAs(t, err, ¬AuthorizedError) - _, err = db.GetWorkspaceAgentByID(dbauthz.AsSystemRestricted(ctx), childAgentOne.ID) //nolint:gocritic // this is a test. + _, err = db.GetWorkspaceAgentByID(dbauthz.AsSystemRestricted(ctx), childAgentOne.ID) require.NoError(t, err) }) @@ -912,7 +912,7 @@ func TestSubAgentAPI(t *testing.T) { require.NoError(t, err) // Verify that the apps were created - apps, err := api.Database.GetWorkspaceAppsByAgentID(dbauthz.AsSystemRestricted(ctx), subAgentID) //nolint:gocritic // this is a test. + apps, err := api.Database.GetWorkspaceAppsByAgentID(dbauthz.AsSystemRestricted(ctx), subAgentID) require.NoError(t, err) require.Len(t, apps, 2) @@ -923,7 +923,7 @@ func TestSubAgentAPI(t *testing.T) { require.NoError(t, err) // Then: The agent is deleted - _, err = api.Database.GetWorkspaceAgentByID(dbauthz.AsSystemRestricted(ctx), subAgentID) //nolint:gocritic // this is a test. + _, err = api.Database.GetWorkspaceAgentByID(dbauthz.AsSystemRestricted(ctx), subAgentID) require.ErrorIs(t, err, sql.ErrNoRows) // And: The apps are *retained* to avoid causing issues @@ -1068,7 +1068,7 @@ func TestSubAgentAPI(t *testing.T) { agentID, err := uuid.FromBytes(createResp.Agent.Id) require.NoError(t, err) - subAgent, err := api.Database.GetWorkspaceAgentByID(dbauthz.AsSystemRestricted(ctx), agentID) //nolint:gocritic // this is a test. + subAgent, err := api.Database.GetWorkspaceAgentByID(dbauthz.AsSystemRestricted(ctx), agentID) require.NoError(t, err) require.Equal(t, len(tt.expectedApps), len(subAgent.DisplayApps), "display apps count mismatch") @@ -1118,14 +1118,14 @@ func TestSubAgentAPI(t *testing.T) { require.NoError(t, err) // Verify display apps - subAgent, err := api.Database.GetWorkspaceAgentByID(dbauthz.AsSystemRestricted(ctx), agentID) //nolint:gocritic // this is a test. + subAgent, err := api.Database.GetWorkspaceAgentByID(dbauthz.AsSystemRestricted(ctx), agentID) require.NoError(t, err) require.Len(t, subAgent.DisplayApps, 2) require.Equal(t, database.DisplayAppVscode, subAgent.DisplayApps[0]) require.Equal(t, database.DisplayAppWebTerminal, subAgent.DisplayApps[1]) // Verify regular apps - apps, err := api.Database.GetWorkspaceAppsByAgentID(dbauthz.AsSystemRestricted(ctx), agentID) //nolint:gocritic // this is a test. + apps, err := api.Database.GetWorkspaceAppsByAgentID(dbauthz.AsSystemRestricted(ctx), agentID) require.NoError(t, err) require.Len(t, apps, 1) require.Equal(t, "v4qhkq17-custom-app", apps[0].Slug) @@ -1190,7 +1190,7 @@ func TestSubAgentAPI(t *testing.T) { }) // When: We list the sub agents. - listResp, err := api.ListSubAgents(ctx, &proto.ListSubAgentsRequest{}) //nolint:gocritic // this is a test. + listResp, err := api.ListSubAgents(ctx, &proto.ListSubAgentsRequest{}) require.NoError(t, err) listedChildAgents := listResp.Agents diff --git a/coderd/aitasks.go b/coderd/aitasks.go index a982ccc39b26b..de607e7619f77 100644 --- a/coderd/aitasks.go +++ b/coderd/aitasks.go @@ -1,13 +1,28 @@ package coderd import ( + "context" + "database/sql" + "errors" "fmt" "net/http" + "slices" "strings" + "github.com/go-chi/chi/v5" "github.com/google/uuid" + "golang.org/x/xerrors" + "cdr.dev/slog" + + "github.com/coder/coder/v2/coderd/audit" + "github.com/coder/coder/v2/coderd/database" "github.com/coder/coder/v2/coderd/httpapi" + "github.com/coder/coder/v2/coderd/httpmw" + "github.com/coder/coder/v2/coderd/rbac" + "github.com/coder/coder/v2/coderd/rbac/policy" + "github.com/coder/coder/v2/coderd/searchquery" + "github.com/coder/coder/v2/coderd/taskname" "github.com/coder/coder/v2/codersdk" ) @@ -61,3 +76,367 @@ func (api *API) aiTasksPrompts(rw http.ResponseWriter, r *http.Request) { Prompts: promptsByBuildID, }) } + +// This endpoint is experimental and not guaranteed to be stable, so we're not +// generating public-facing documentation for it. +func (api *API) tasksCreate(rw http.ResponseWriter, r *http.Request) { + var ( + ctx = r.Context() + apiKey = httpmw.APIKey(r) + auditor = api.Auditor.Load() + mems = httpmw.OrganizationMembersParam(r) + ) + + var req codersdk.CreateTaskRequest + if !httpapi.Read(ctx, rw, r, &req) { + return + } + + hasAITask, err := api.Database.GetTemplateVersionHasAITask(ctx, req.TemplateVersionID) + if err != nil { + if errors.Is(err, sql.ErrNoRows) || rbac.IsUnauthorizedError(err) { + httpapi.ResourceNotFound(rw) + return + } + + httpapi.Write(ctx, rw, http.StatusInternalServerError, codersdk.Response{ + Message: "Internal error fetching whether the template version has an AI task.", + Detail: err.Error(), + }) + return + } + if !hasAITask { + httpapi.Write(ctx, rw, http.StatusBadRequest, codersdk.Response{ + Message: fmt.Sprintf(`Template does not have required parameter %q`, codersdk.AITaskPromptParameterName), + }) + return + } + + taskName := taskname.GenerateFallback() + if anthropicAPIKey := taskname.GetAnthropicAPIKeyFromEnv(); anthropicAPIKey != "" { + anthropicModel := taskname.GetAnthropicModelFromEnv() + + generatedName, err := taskname.Generate(ctx, req.Prompt, taskname.WithAPIKey(anthropicAPIKey), taskname.WithModel(anthropicModel)) + if err != nil { + api.Logger.Error(ctx, "unable to generate task name", slog.Error(err)) + } else { + taskName = generatedName + } + } + + createReq := codersdk.CreateWorkspaceRequest{ + Name: taskName, + TemplateVersionID: req.TemplateVersionID, + TemplateVersionPresetID: req.TemplateVersionPresetID, + RichParameterValues: []codersdk.WorkspaceBuildParameter{ + {Name: codersdk.AITaskPromptParameterName, Value: req.Prompt}, + }, + } + + var owner workspaceOwner + if mems.User != nil { + // This user fetch is an optimization path for the most common case of creating a + // task for 'Me'. + // + // This is also required to allow `owners` to create workspaces for users + // that are not in an organization. + owner = workspaceOwner{ + ID: mems.User.ID, + Username: mems.User.Username, + AvatarURL: mems.User.AvatarURL, + } + } else { + // A task can still be created if the caller can read the organization + // member. The organization is required, which can be sourced from the + // template. + // + // TODO: This code gets called twice for each workspace build request. + // This is inefficient and costs at most 2 extra RTTs to the DB. + // This can be optimized. It exists as it is now for code simplicity. + // The most common case is to create a workspace for 'Me'. Which does + // not enter this code branch. + template, ok := requestTemplate(ctx, rw, createReq, api.Database) + if !ok { + return + } + + // If the caller can find the organization membership in the same org + // as the template, then they can continue. + orgIndex := slices.IndexFunc(mems.Memberships, func(mem httpmw.OrganizationMember) bool { + return mem.OrganizationID == template.OrganizationID + }) + if orgIndex == -1 { + httpapi.ResourceNotFound(rw) + return + } + + member := mems.Memberships[orgIndex] + owner = workspaceOwner{ + ID: member.UserID, + Username: member.Username, + AvatarURL: member.AvatarURL, + } + } + + aReq, commitAudit := audit.InitRequest[database.WorkspaceTable](rw, &audit.RequestParams{ + Audit: *auditor, + Log: api.Logger, + Request: r, + Action: database.AuditActionCreate, + AdditionalFields: audit.AdditionalFields{ + WorkspaceOwner: owner.Username, + }, + }) + + defer commitAudit() + createWorkspace(ctx, aReq, apiKey.UserID, api, owner, createReq, rw, r) +} + +// tasksFromWorkspaces converts a slice of API workspaces into tasks, fetching +// prompts and mapping status/state. This method enforces that only AI task +// workspaces are given. +func (api *API) tasksFromWorkspaces(ctx context.Context, apiWorkspaces []codersdk.Workspace) ([]codersdk.Task, error) { + // Enforce that only AI task workspaces are given. + for _, ws := range apiWorkspaces { + if ws.LatestBuild.HasAITask == nil || !*ws.LatestBuild.HasAITask { + return nil, xerrors.Errorf("workspace %s is not an AI task workspace", ws.ID) + } + } + + // Fetch prompts for each workspace build and map by build ID. + buildIDs := make([]uuid.UUID, 0, len(apiWorkspaces)) + for _, ws := range apiWorkspaces { + buildIDs = append(buildIDs, ws.LatestBuild.ID) + } + parameters, err := api.Database.GetWorkspaceBuildParametersByBuildIDs(ctx, buildIDs) + if err != nil { + return nil, err + } + promptsByBuildID := make(map[uuid.UUID]string, len(parameters)) + for _, p := range parameters { + if p.Name == codersdk.AITaskPromptParameterName { + promptsByBuildID[p.WorkspaceBuildID] = p.Value + } + } + + tasks := make([]codersdk.Task, 0, len(apiWorkspaces)) + for _, ws := range apiWorkspaces { + var currentState *codersdk.TaskStateEntry + if ws.LatestAppStatus != nil { + currentState = &codersdk.TaskStateEntry{ + Timestamp: ws.LatestAppStatus.CreatedAt, + State: codersdk.TaskState(ws.LatestAppStatus.State), + Message: ws.LatestAppStatus.Message, + URI: ws.LatestAppStatus.URI, + } + } + tasks = append(tasks, codersdk.Task{ + ID: ws.ID, + OrganizationID: ws.OrganizationID, + OwnerID: ws.OwnerID, + Name: ws.Name, + TemplateID: ws.TemplateID, + WorkspaceID: uuid.NullUUID{Valid: true, UUID: ws.ID}, + CreatedAt: ws.CreatedAt, + UpdatedAt: ws.UpdatedAt, + InitialPrompt: promptsByBuildID[ws.LatestBuild.ID], + Status: ws.LatestBuild.Status, + CurrentState: currentState, + }) + } + + return tasks, nil +} + +// tasksListResponse wraps a list of experimental tasks. +// +// Experimental: Response shape is experimental and may change. +type tasksListResponse struct { + Tasks []codersdk.Task `json:"tasks"` + Count int `json:"count"` +} + +// tasksList is an experimental endpoint to list AI tasks by mapping +// workspaces to a task-shaped response. +func (api *API) tasksList(rw http.ResponseWriter, r *http.Request) { + ctx := r.Context() + apiKey := httpmw.APIKey(r) + + // Support standard pagination/filters for workspaces. + page, ok := ParsePagination(rw, r) + if !ok { + return + } + queryStr := r.URL.Query().Get("q") + filter, errs := searchquery.Workspaces(ctx, api.Database, queryStr, page, api.AgentInactiveDisconnectTimeout) + if len(errs) > 0 { + httpapi.Write(ctx, rw, http.StatusBadRequest, codersdk.Response{ + Message: "Invalid workspace search query.", + Validations: errs, + }) + return + } + + // Ensure that we only include AI task workspaces in the results. + filter.HasAITask = sql.NullBool{Valid: true, Bool: true} + + if filter.OwnerUsername == "me" || filter.OwnerUsername == "" { + filter.OwnerID = apiKey.UserID + filter.OwnerUsername = "" + } + + prepared, err := api.HTTPAuth.AuthorizeSQLFilter(r, policy.ActionRead, rbac.ResourceWorkspace.Type) + if err != nil { + httpapi.Write(ctx, rw, http.StatusInternalServerError, codersdk.Response{ + Message: "Internal error preparing sql filter.", + Detail: err.Error(), + }) + return + } + + // Order with requester's favorites first, include summary row. + filter.RequesterID = apiKey.UserID + filter.WithSummary = true + + workspaceRows, err := api.Database.GetAuthorizedWorkspaces(ctx, filter, prepared) + if err != nil { + httpapi.Write(ctx, rw, http.StatusInternalServerError, codersdk.Response{ + Message: "Internal error fetching workspaces.", + Detail: err.Error(), + }) + return + } + if len(workspaceRows) == 0 { + httpapi.Write(ctx, rw, http.StatusInternalServerError, codersdk.Response{ + Message: "Internal error fetching workspaces.", + Detail: "Workspace summary row is missing.", + }) + return + } + if len(workspaceRows) == 1 { + httpapi.Write(ctx, rw, http.StatusOK, tasksListResponse{ + Tasks: []codersdk.Task{}, + Count: 0, + }) + return + } + + // Skip summary row. + workspaceRows = workspaceRows[:len(workspaceRows)-1] + + workspaces := database.ConvertWorkspaceRows(workspaceRows) + + // Gather associated data and convert to API workspaces. + data, err := api.workspaceData(ctx, workspaces) + if err != nil { + httpapi.Write(ctx, rw, http.StatusInternalServerError, codersdk.Response{ + Message: "Internal error fetching workspace resources.", + Detail: err.Error(), + }) + return + } + apiWorkspaces, err := convertWorkspaces(apiKey.UserID, workspaces, data) + if err != nil { + httpapi.Write(ctx, rw, http.StatusInternalServerError, codersdk.Response{ + Message: "Internal error converting workspaces.", + Detail: err.Error(), + }) + return + } + + tasks, err := api.tasksFromWorkspaces(ctx, apiWorkspaces) + if err != nil { + httpapi.Write(ctx, rw, http.StatusInternalServerError, codersdk.Response{ + Message: "Internal error fetching task prompts and states.", + Detail: err.Error(), + }) + return + } + + httpapi.Write(ctx, rw, http.StatusOK, tasksListResponse{ + Tasks: tasks, + Count: len(tasks), + }) +} + +// taskGet is an experimental endpoint to fetch a single AI task by ID +// (workspace ID). It returns a synthesized task response including +// prompt and status. +func (api *API) taskGet(rw http.ResponseWriter, r *http.Request) { + ctx := r.Context() + apiKey := httpmw.APIKey(r) + + idStr := chi.URLParam(r, "id") + taskID, err := uuid.Parse(idStr) + if err != nil { + httpapi.Write(ctx, rw, http.StatusBadRequest, codersdk.Response{ + Message: fmt.Sprintf("Invalid UUID %q for task ID.", idStr), + }) + return + } + + // For now, taskID = workspaceID, once we have a task data model in + // the DB, we can change this lookup. + workspaceID := taskID + workspace, err := api.Database.GetWorkspaceByID(ctx, workspaceID) + if httpapi.Is404Error(err) { + httpapi.ResourceNotFound(rw) + return + } + if err != nil { + httpapi.Write(ctx, rw, http.StatusInternalServerError, codersdk.Response{ + Message: "Internal error fetching workspace.", + Detail: err.Error(), + }) + return + } + + data, err := api.workspaceData(ctx, []database.Workspace{workspace}) + if err != nil { + httpapi.Write(ctx, rw, http.StatusInternalServerError, codersdk.Response{ + Message: "Internal error fetching workspace resources.", + Detail: err.Error(), + }) + return + } + if len(data.builds) == 0 || len(data.templates) == 0 { + httpapi.ResourceNotFound(rw) + return + } + if data.builds[0].HasAITask == nil || !*data.builds[0].HasAITask { + httpapi.ResourceNotFound(rw) + return + } + + appStatus := codersdk.WorkspaceAppStatus{} + if len(data.appStatuses) > 0 { + appStatus = data.appStatuses[0] + } + + ws, err := convertWorkspace( + apiKey.UserID, + workspace, + data.builds[0], + data.templates[0], + api.Options.AllowWorkspaceRenames, + appStatus, + ) + if err != nil { + httpapi.Write(ctx, rw, http.StatusInternalServerError, codersdk.Response{ + Message: "Internal error converting workspace.", + Detail: err.Error(), + }) + return + } + + tasks, err := api.tasksFromWorkspaces(ctx, []codersdk.Workspace{ws}) + if err != nil { + httpapi.Write(ctx, rw, http.StatusInternalServerError, codersdk.Response{ + Message: "Internal error fetching task prompt and state.", + Detail: err.Error(), + }) + return + } + + httpapi.Write(ctx, rw, http.StatusOK, tasks[0]) +} diff --git a/coderd/aitasks_test.go b/coderd/aitasks_test.go index 53f0174d6f03d..131238de8a5bd 100644 --- a/coderd/aitasks_test.go +++ b/coderd/aitasks_test.go @@ -1,13 +1,16 @@ package coderd_test import ( + "net/http" "testing" "github.com/google/uuid" + "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" "github.com/coder/coder/v2/coderd/coderdtest" "github.com/coder/coder/v2/coderd/database/dbtestutil" + "github.com/coder/coder/v2/coderd/util/slice" "github.com/coder/coder/v2/codersdk" "github.com/coder/coder/v2/provisioner/echo" "github.com/coder/coder/v2/provisionersdk/proto" @@ -139,3 +142,243 @@ func TestAITasksPrompts(t *testing.T) { require.Empty(t, prompts.Prompts) }) } + +func TestTasks(t *testing.T) { + t.Parallel() + + createAITemplate := func(t *testing.T, client *codersdk.Client, user codersdk.CreateFirstUserResponse) codersdk.Template { + t.Helper() + + // Create a template version that supports AI tasks with the AI Prompt parameter. + taskAppID := uuid.New() + version := coderdtest.CreateTemplateVersion(t, client, user.OrganizationID, &echo.Responses{ + Parse: echo.ParseComplete, + ProvisionPlan: []*proto.Response{ + { + Type: &proto.Response_Plan{ + Plan: &proto.PlanComplete{ + Parameters: []*proto.RichParameter{{Name: codersdk.AITaskPromptParameterName, Type: "string"}}, + HasAiTasks: true, + }, + }, + }, + }, + ProvisionApply: []*proto.Response{ + { + Type: &proto.Response_Apply{ + Apply: &proto.ApplyComplete{ + Resources: []*proto.Resource{ + { + Name: "example", + Type: "aws_instance", + Agents: []*proto.Agent{ + { + Id: uuid.NewString(), + Name: "example", + Apps: []*proto.App{ + { + Id: taskAppID.String(), + Slug: "task-sidebar", + DisplayName: "Task Sidebar", + }, + }, + }, + }, + }, + }, + AiTasks: []*proto.AITask{ + { + SidebarApp: &proto.AITaskSidebarApp{ + Id: taskAppID.String(), + }, + }, + }, + }, + }, + }, + }, + }) + coderdtest.AwaitTemplateVersionJobCompleted(t, client, version.ID) + template := coderdtest.CreateTemplate(t, client, user.OrganizationID, version.ID) + + return template + } + + t.Run("List", func(t *testing.T) { + t.Parallel() + + client := coderdtest.New(t, &coderdtest.Options{IncludeProvisionerDaemon: true}) + user := coderdtest.CreateFirstUser(t, client) + ctx := testutil.Context(t, testutil.WaitLong) + + template := createAITemplate(t, client, user) + + // Create a workspace (task) with a specific prompt. + wantPrompt := "build me a web app" + workspace := coderdtest.CreateWorkspace(t, client, template.ID, func(req *codersdk.CreateWorkspaceRequest) { + req.RichParameterValues = []codersdk.WorkspaceBuildParameter{ + {Name: codersdk.AITaskPromptParameterName, Value: wantPrompt}, + } + }) + coderdtest.AwaitWorkspaceBuildJobCompleted(t, client, workspace.LatestBuild.ID) + + // List tasks via experimental API and verify the prompt and status mapping. + exp := codersdk.NewExperimentalClient(client) + tasks, err := exp.Tasks(ctx, &codersdk.TasksFilter{Owner: codersdk.Me}) + require.NoError(t, err) + + got, ok := slice.Find(tasks, func(task codersdk.Task) bool { return task.ID == workspace.ID }) + require.True(t, ok, "task should be found in the list") + assert.Equal(t, wantPrompt, got.InitialPrompt, "task prompt should match the AI Prompt parameter") + assert.Equal(t, workspace.Name, got.Name, "task name should map from workspace name") + assert.Equal(t, workspace.ID, got.WorkspaceID.UUID, "workspace id should match") + // Status should be populated via app status or workspace status mapping. + assert.NotEmpty(t, got.Status, "task status should not be empty") + }) + + t.Run("Get", func(t *testing.T) { + t.Parallel() + + client := coderdtest.New(t, &coderdtest.Options{IncludeProvisionerDaemon: true}) + user := coderdtest.CreateFirstUser(t, client) + ctx := testutil.Context(t, testutil.WaitLong) + + template := createAITemplate(t, client, user) + + // Create a workspace (task) with a specific prompt. + wantPrompt := "review my code" + workspace := coderdtest.CreateWorkspace(t, client, template.ID, func(req *codersdk.CreateWorkspaceRequest) { + req.RichParameterValues = []codersdk.WorkspaceBuildParameter{ + {Name: codersdk.AITaskPromptParameterName, Value: wantPrompt}, + } + }) + coderdtest.AwaitWorkspaceBuildJobCompleted(t, client, workspace.LatestBuild.ID) + + // Fetch the task by ID via experimental API and verify fields. + exp := codersdk.NewExperimentalClient(client) + task, err := exp.TaskByID(ctx, workspace.ID) + require.NoError(t, err) + + assert.Equal(t, workspace.ID, task.ID, "task ID should match workspace ID") + assert.Equal(t, workspace.Name, task.Name, "task name should map from workspace name") + assert.Equal(t, wantPrompt, task.InitialPrompt, "task prompt should match the AI Prompt parameter") + assert.Equal(t, workspace.ID, task.WorkspaceID.UUID, "workspace id should match") + assert.NotEmpty(t, task.Status, "task status should not be empty") + }) +} + +func TestTasksCreate(t *testing.T) { + t.Parallel() + + t.Run("OK", func(t *testing.T) { + t.Parallel() + + var ( + ctx = testutil.Context(t, testutil.WaitShort) + + taskPrompt = "Some task prompt" + ) + + client := coderdtest.New(t, &coderdtest.Options{IncludeProvisionerDaemon: true}) + user := coderdtest.CreateFirstUser(t, client) + + // Given: A template with an "AI Prompt" parameter + version := coderdtest.CreateTemplateVersion(t, client, user.OrganizationID, &echo.Responses{ + Parse: echo.ParseComplete, + ProvisionApply: echo.ApplyComplete, + ProvisionPlan: []*proto.Response{ + {Type: &proto.Response_Plan{Plan: &proto.PlanComplete{ + Parameters: []*proto.RichParameter{{Name: "AI Prompt", Type: "string"}}, + HasAiTasks: true, + }}}, + }, + }) + coderdtest.AwaitTemplateVersionJobCompleted(t, client, version.ID) + template := coderdtest.CreateTemplate(t, client, user.OrganizationID, version.ID) + + expClient := codersdk.NewExperimentalClient(client) + + // When: We attempt to create a Task. + workspace, err := expClient.CreateTask(ctx, "me", codersdk.CreateTaskRequest{ + TemplateVersionID: template.ActiveVersionID, + Prompt: taskPrompt, + }) + require.NoError(t, err) + coderdtest.AwaitWorkspaceBuildJobCompleted(t, client, workspace.LatestBuild.ID) + + // Then: We expect a workspace to have been created. + assert.NotEmpty(t, workspace.Name) + assert.Equal(t, template.ID, workspace.TemplateID) + + // And: We expect it to have the "AI Prompt" parameter correctly set. + parameters, err := client.WorkspaceBuildParameters(ctx, workspace.LatestBuild.ID) + require.NoError(t, err) + require.Len(t, parameters, 1) + assert.Equal(t, codersdk.AITaskPromptParameterName, parameters[0].Name) + assert.Equal(t, taskPrompt, parameters[0].Value) + }) + + t.Run("FailsOnNonTaskTemplate", func(t *testing.T) { + t.Parallel() + + var ( + ctx = testutil.Context(t, testutil.WaitShort) + + taskPrompt = "Some task prompt" + ) + + client := coderdtest.New(t, &coderdtest.Options{IncludeProvisionerDaemon: true}) + user := coderdtest.CreateFirstUser(t, client) + + // Given: A template without an "AI Prompt" parameter + version := coderdtest.CreateTemplateVersion(t, client, user.OrganizationID, nil) + coderdtest.AwaitTemplateVersionJobCompleted(t, client, version.ID) + template := coderdtest.CreateTemplate(t, client, user.OrganizationID, version.ID) + + expClient := codersdk.NewExperimentalClient(client) + + // When: We attempt to create a Task. + _, err := expClient.CreateTask(ctx, "me", codersdk.CreateTaskRequest{ + TemplateVersionID: template.ActiveVersionID, + Prompt: taskPrompt, + }) + + // Then: We expect it to fail. + var sdkErr *codersdk.Error + require.Error(t, err) + require.ErrorAsf(t, err, &sdkErr, "error should be of type *codersdk.Error") + assert.Equal(t, http.StatusBadRequest, sdkErr.StatusCode()) + }) + + t.Run("FailsOnInvalidTemplate", func(t *testing.T) { + t.Parallel() + + var ( + ctx = testutil.Context(t, testutil.WaitShort) + + taskPrompt = "Some task prompt" + ) + + client := coderdtest.New(t, &coderdtest.Options{IncludeProvisionerDaemon: true}) + user := coderdtest.CreateFirstUser(t, client) + + // Given: A template + version := coderdtest.CreateTemplateVersion(t, client, user.OrganizationID, nil) + coderdtest.AwaitTemplateVersionJobCompleted(t, client, version.ID) + _ = coderdtest.CreateTemplate(t, client, user.OrganizationID, version.ID) + + expClient := codersdk.NewExperimentalClient(client) + + // When: We attempt to create a Task with an invalid template version ID. + _, err := expClient.CreateTask(ctx, "me", codersdk.CreateTaskRequest{ + TemplateVersionID: uuid.New(), + Prompt: taskPrompt, + }) + + // Then: We expect it to fail. + var sdkErr *codersdk.Error + require.Error(t, err) + require.ErrorAsf(t, err, &sdkErr, "error should be of type *codersdk.Error") + assert.Equal(t, http.StatusNotFound, sdkErr.StatusCode()) + }) +} diff --git a/coderd/apidoc/docs.go b/coderd/apidoc/docs.go index 3192860d6a0ca..96034721a5af2 100644 --- a/coderd/apidoc/docs.go +++ b/coderd/apidoc/docs.go @@ -1280,6 +1280,39 @@ const docTemplate = `{ } } }, + "/init-script/{os}/{arch}": { + "get": { + "produces": [ + "text/plain" + ], + "tags": [ + "InitScript" + ], + "summary": "Get agent init script", + "operationId": "get-agent-init-script", + "parameters": [ + { + "type": "string", + "description": "Operating system", + "name": "os", + "in": "path", + "required": true + }, + { + "type": "string", + "description": "Architecture", + "name": "arch", + "in": "path", + "required": true + } + ], + "responses": { + "200": { + "description": "Success" + } + } + } + }, "/insights/daus": { "get": { "security": [ @@ -5140,8 +5173,8 @@ const docTemplate = `{ "tags": [ "Templates" ], - "summary": "Get template metadata by ID", - "operationId": "get-template-metadata-by-id", + "summary": "Get template settings by ID", + "operationId": "get-template-settings-by-id", "parameters": [ { "type": "string", @@ -5200,14 +5233,17 @@ const docTemplate = `{ "CoderSessionToken": [] } ], + "consumes": [ + "application/json" + ], "produces": [ "application/json" ], "tags": [ "Templates" ], - "summary": "Update template metadata by ID", - "operationId": "update-template-metadata-by-id", + "summary": "Update template settings by ID", + "operationId": "update-template-settings-by-id", "parameters": [ { "type": "string", @@ -5216,6 +5252,15 @@ const docTemplate = `{ "name": "template", "in": "path", "required": true + }, + { + "description": "Patch template settings request", + "name": "request", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/codersdk.UpdateTemplateMeta" + } } ], "responses": { @@ -5289,7 +5334,7 @@ const docTemplate = `{ "required": true }, { - "description": "Update template request", + "description": "Update template ACL request", "name": "request", "in": "body", "required": true, @@ -7383,7 +7428,7 @@ const docTemplate = `{ }, { "type": "string", - "format": "uuid", + "format": "string", "description": "Key ID", "name": "keyid", "in": "path", @@ -7420,7 +7465,7 @@ const docTemplate = `{ }, { "type": "string", - "format": "uuid", + "format": "string", "description": "Key ID", "name": "keyid", "in": "path", @@ -9835,7 +9880,7 @@ const docTemplate = `{ "parameters": [ { "type": "string", - "description": "Search query in the format ` + "`" + `key:value` + "`" + `. Available keys are: owner, template, name, status, has-agent, dormant, last_used_after, last_used_before, has-ai-task.", + "description": "Search query in the format ` + "`" + `key:value` + "`" + `. Available keys are: owner, template, name, status, has-agent, dormant, last_used_after, last_used_before, has-ai-task, has_external_agent.", "name": "q", "in": "query" }, @@ -9942,6 +9987,50 @@ const docTemplate = `{ } } }, + "/workspaces/{workspace}/acl": { + "patch": { + "security": [ + { + "CoderSessionToken": [] + } + ], + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Workspaces" + ], + "summary": "Update workspace ACL", + "operationId": "update-workspace-acl", + "parameters": [ + { + "type": "string", + "format": "uuid", + "description": "Workspace ID", + "name": "workspace", + "in": "path", + "required": true + }, + { + "description": "Update workspace ACL request", + "name": "request", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/codersdk.UpdateWorkspaceACL" + } + } + ], + "responses": { + "204": { + "description": "No Content" + } + } + } + }, "/workspaces/{workspace}/autostart": { "put": { "security": [ @@ -10227,6 +10316,48 @@ const docTemplate = `{ } } }, + "/workspaces/{workspace}/external-agent/{agent}/credentials": { + "get": { + "security": [ + { + "CoderSessionToken": [] + } + ], + "produces": [ + "application/json" + ], + "tags": [ + "Enterprise" + ], + "summary": "Get workspace external agent credentials", + "operationId": "get-workspace-external-agent-credentials", + "parameters": [ + { + "type": "string", + "format": "uuid", + "description": "Workspace ID", + "name": "workspace", + "in": "path", + "required": true + }, + { + "type": "string", + "description": "Agent name", + "name": "agent", + "in": "path", + "required": true + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "$ref": "#/definitions/codersdk.ExternalAgentCredentials" + } + } + } + } + }, "/workspaces/{workspace}/favorite": { "put": { "security": [ @@ -11467,6 +11598,17 @@ const docTemplate = `{ "BuildReasonJetbrainsConnection" ] }, + "codersdk.CORSBehavior": { + "type": "string", + "enum": [ + "simple", + "passthru" + ], + "x-enum-varnames": [ + "CORSBehaviorSimple", + "CORSBehaviorPassthru" + ] + }, "codersdk.ChangePasswordWithOneTimePasscodeRequest": { "type": "object", "required": [ @@ -11808,6 +11950,14 @@ const docTemplate = `{ } ] }, + "cors_behavior": { + "description": "CORSBehavior allows optionally specifying the CORS behavior for all shared ports.", + "allOf": [ + { + "$ref": "#/definitions/codersdk.CORSBehavior" + } + ] + }, "default_ttl_ms": { "description": "DefaultTTLMillis allows optionally specifying the default TTL\nfor all workspaces created from this template.", "type": "integer" @@ -12814,7 +12964,8 @@ const docTemplate = `{ "workspace-usage", "web-push", "oauth2", - "mcp-server-http" + "mcp-server-http", + "workspace-sharing" ], "x-enum-comments": { "ExperimentAutoFillParameters": "This should not be taken out of experiments until we have redesigned the feature.", @@ -12823,6 +12974,7 @@ const docTemplate = `{ "ExperimentNotifications": "Sends notifications via SMTP and webhooks following certain events.", "ExperimentOAuth2": "Enables OAuth2 provider functionality.", "ExperimentWebPush": "Enables web push notifications through the browser.", + "ExperimentWorkspaceSharing": "Enables updating workspace ACLs for sharing with users and groups.", "ExperimentWorkspaceUsage": "Enables the new workspace usage tracking." }, "x-enum-varnames": [ @@ -12832,9 +12984,21 @@ const docTemplate = `{ "ExperimentWorkspaceUsage", "ExperimentWebPush", "ExperimentOAuth2", - "ExperimentMCPServerHTTP" + "ExperimentMCPServerHTTP", + "ExperimentWorkspaceSharing" ] }, + "codersdk.ExternalAgentCredentials": { + "type": "object", + "properties": { + "agent_token": { + "type": "string" + }, + "command": { + "type": "string" + } + } + }, "codersdk.ExternalAuth": { "type": "object", "properties": { @@ -14880,9 +15044,15 @@ const docTemplate = `{ "default": { "type": "boolean" }, + "description": { + "type": "string" + }, "desiredPrebuildInstances": { "type": "integer" }, + "icon": { + "type": "string" + }, "id": { "type": "string" }, @@ -15236,6 +15406,9 @@ const docTemplate = `{ "input": { "$ref": "#/definitions/codersdk.ProvisionerJobInput" }, + "logs_overflowed": { + "type": "boolean" + }, "metadata": { "$ref": "#/definitions/codersdk.ProvisionerJobMetadata" }, @@ -15626,7 +15799,9 @@ const docTemplate = `{ "system", "tailnet_coordinator", "template", + "usage_event", "user", + "user_secret", "webpush_subscription", "workspace", "workspace_agent_devcontainers", @@ -15666,7 +15841,9 @@ const docTemplate = `{ "ResourceSystem", "ResourceTailnetCoordinator", "ResourceTemplate", + "ResourceUsageEvent", "ResourceUser", + "ResourceUserSecret", "ResourceWebpushSubscription", "ResourceWorkspace", "ResourceWorkspaceAgentDevcontainers", @@ -16209,6 +16386,9 @@ const docTemplate = `{ "build_time_stats": { "$ref": "#/definitions/codersdk.TemplateBuildTimeStats" }, + "cors_behavior": { + "$ref": "#/definitions/codersdk.CORSBehavior" + }, "created_at": { "type": "string", "format": "date-time" @@ -16734,6 +16914,9 @@ const docTemplate = `{ "created_by": { "$ref": "#/definitions/codersdk.MinimalUser" }, + "has_external_agent": { + "type": "boolean" + }, "id": { "type": "string", "format": "uuid" @@ -17133,6 +17316,89 @@ const docTemplate = `{ } } }, + "codersdk.UpdateTemplateMeta": { + "type": "object", + "properties": { + "activity_bump_ms": { + "description": "ActivityBumpMillis allows optionally specifying the activity bump\nduration for all workspaces created from this template. Defaults to 1h\nbut can be set to 0 to disable activity bumping.", + "type": "integer" + }, + "allow_user_autostart": { + "type": "boolean" + }, + "allow_user_autostop": { + "type": "boolean" + }, + "allow_user_cancel_workspace_jobs": { + "type": "boolean" + }, + "autostart_requirement": { + "$ref": "#/definitions/codersdk.TemplateAutostartRequirement" + }, + "autostop_requirement": { + "description": "AutostopRequirement and AutostartRequirement can only be set if your license\nincludes the advanced template scheduling feature. If you attempt to set this\nvalue while unlicensed, it will be ignored.", + "allOf": [ + { + "$ref": "#/definitions/codersdk.TemplateAutostopRequirement" + } + ] + }, + "cors_behavior": { + "$ref": "#/definitions/codersdk.CORSBehavior" + }, + "default_ttl_ms": { + "type": "integer" + }, + "deprecation_message": { + "description": "DeprecationMessage if set, will mark the template as deprecated and block\nany new workspaces from using this template.\nIf passed an empty string, will remove the deprecated message, making\nthe template usable for new workspaces again.", + "type": "string" + }, + "description": { + "type": "string" + }, + "disable_everyone_group_access": { + "description": "DisableEveryoneGroupAccess allows optionally disabling the default\nbehavior of granting the 'everyone' group access to use the template.\nIf this is set to true, the template will not be available to all users,\nand must be explicitly granted to users or groups in the permissions settings\nof the template.", + "type": "boolean" + }, + "display_name": { + "type": "string" + }, + "failure_ttl_ms": { + "type": "integer" + }, + "icon": { + "type": "string" + }, + "max_port_share_level": { + "$ref": "#/definitions/codersdk.WorkspaceAgentPortShareLevel" + }, + "name": { + "type": "string" + }, + "require_active_version": { + "description": "RequireActiveVersion mandates workspaces built using this template\nuse the active version of the template. This option has no\neffect on template admins.", + "type": "boolean" + }, + "time_til_dormant_autodelete_ms": { + "type": "integer" + }, + "time_til_dormant_ms": { + "type": "integer" + }, + "update_workspace_dormant_at": { + "description": "UpdateWorkspaceDormant updates the dormant_at field of workspaces spawned\nfrom the template. This is useful for preventing dormant workspaces being immediately\ndeleted when updating the dormant_ttl field to a new, shorter value.", + "type": "boolean" + }, + "update_workspace_last_used_at": { + "description": "UpdateWorkspaceLastUsedAt updates the last_used_at field of workspaces\nspawned from the template. This is useful for preventing workspaces being\nimmediately locked when updating the inactivity_ttl field to a new, shorter\nvalue.", + "type": "boolean" + }, + "use_classic_parameter_flow": { + "description": "UseClassicParameterFlow is a flag that switches the default behavior to use the classic\nparameter flow when creating a workspace. This only affects deployments with the experiment\n\"dynamic-parameters\" enabled. This setting will live for a period after the experiment is\nmade the default.\nAn \"opt-out\" is present in case the new feature breaks some existing templates.", + "type": "boolean" + } + } + }, "codersdk.UpdateUserAppearanceSettingsRequest": { "type": "object", "required": [ @@ -17199,6 +17465,24 @@ const docTemplate = `{ } } }, + "codersdk.UpdateWorkspaceACL": { + "type": "object", + "properties": { + "group_roles": { + "type": "object", + "additionalProperties": { + "$ref": "#/definitions/codersdk.WorkspaceRole" + } + }, + "user_roles": { + "description": "Keys must be valid UUIDs. To remove a user/group from the ACL use \"\" as the\nrole name (available as a constant named ` + "`" + `codersdk.WorkspaceRoleDeleted` + "`" + `)", + "type": "object", + "additionalProperties": { + "$ref": "#/definitions/codersdk.WorkspaceRole" + } + } + } + }, "codersdk.UpdateWorkspaceAutomaticUpdatesRequest": { "type": "object", "properties": { @@ -18575,6 +18859,9 @@ const docTemplate = `{ "has_ai_task": { "type": "boolean" }, + "has_external_agent": { + "type": "boolean" + }, "id": { "type": "string", "format": "uuid" @@ -18931,6 +19218,19 @@ const docTemplate = `{ } } }, + "codersdk.WorkspaceRole": { + "type": "string", + "enum": [ + "admin", + "use", + "" + ], + "x-enum-varnames": [ + "WorkspaceRoleAdmin", + "WorkspaceRoleUse", + "WorkspaceRoleDeleted" + ] + }, "codersdk.WorkspaceStatus": { "type": "string", "enum": [ diff --git a/coderd/apidoc/swagger.json b/coderd/apidoc/swagger.json index 26ee0c0cd05e4..107943e186c40 100644 --- a/coderd/apidoc/swagger.json +++ b/coderd/apidoc/swagger.json @@ -1108,6 +1108,35 @@ } } }, + "/init-script/{os}/{arch}": { + "get": { + "produces": ["text/plain"], + "tags": ["InitScript"], + "summary": "Get agent init script", + "operationId": "get-agent-init-script", + "parameters": [ + { + "type": "string", + "description": "Operating system", + "name": "os", + "in": "path", + "required": true + }, + { + "type": "string", + "description": "Architecture", + "name": "arch", + "in": "path", + "required": true + } + ], + "responses": { + "200": { + "description": "Success" + } + } + } + }, "/insights/daus": { "get": { "security": [ @@ -4527,8 +4556,8 @@ ], "produces": ["application/json"], "tags": ["Templates"], - "summary": "Get template metadata by ID", - "operationId": "get-template-metadata-by-id", + "summary": "Get template settings by ID", + "operationId": "get-template-settings-by-id", "parameters": [ { "type": "string", @@ -4583,10 +4612,11 @@ "CoderSessionToken": [] } ], + "consumes": ["application/json"], "produces": ["application/json"], "tags": ["Templates"], - "summary": "Update template metadata by ID", - "operationId": "update-template-metadata-by-id", + "summary": "Update template settings by ID", + "operationId": "update-template-settings-by-id", "parameters": [ { "type": "string", @@ -4595,6 +4625,15 @@ "name": "template", "in": "path", "required": true + }, + { + "description": "Patch template settings request", + "name": "request", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/codersdk.UpdateTemplateMeta" + } } ], "responses": { @@ -4658,7 +4697,7 @@ "required": true }, { - "description": "Update template request", + "description": "Update template ACL request", "name": "request", "in": "body", "required": true, @@ -6516,7 +6555,7 @@ }, { "type": "string", - "format": "uuid", + "format": "string", "description": "Key ID", "name": "keyid", "in": "path", @@ -6551,7 +6590,7 @@ }, { "type": "string", - "format": "uuid", + "format": "string", "description": "Key ID", "name": "keyid", "in": "path", @@ -8693,7 +8732,7 @@ "parameters": [ { "type": "string", - "description": "Search query in the format `key:value`. Available keys are: owner, template, name, status, has-agent, dormant, last_used_after, last_used_before, has-ai-task.", + "description": "Search query in the format `key:value`. Available keys are: owner, template, name, status, has-agent, dormant, last_used_after, last_used_before, has-ai-task, has_external_agent.", "name": "q", "in": "query" }, @@ -8792,6 +8831,44 @@ } } }, + "/workspaces/{workspace}/acl": { + "patch": { + "security": [ + { + "CoderSessionToken": [] + } + ], + "consumes": ["application/json"], + "produces": ["application/json"], + "tags": ["Workspaces"], + "summary": "Update workspace ACL", + "operationId": "update-workspace-acl", + "parameters": [ + { + "type": "string", + "format": "uuid", + "description": "Workspace ID", + "name": "workspace", + "in": "path", + "required": true + }, + { + "description": "Update workspace ACL request", + "name": "request", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/codersdk.UpdateWorkspaceACL" + } + } + ], + "responses": { + "204": { + "description": "No Content" + } + } + } + }, "/workspaces/{workspace}/autostart": { "put": { "security": [ @@ -9047,6 +9124,44 @@ } } }, + "/workspaces/{workspace}/external-agent/{agent}/credentials": { + "get": { + "security": [ + { + "CoderSessionToken": [] + } + ], + "produces": ["application/json"], + "tags": ["Enterprise"], + "summary": "Get workspace external agent credentials", + "operationId": "get-workspace-external-agent-credentials", + "parameters": [ + { + "type": "string", + "format": "uuid", + "description": "Workspace ID", + "name": "workspace", + "in": "path", + "required": true + }, + { + "type": "string", + "description": "Agent name", + "name": "agent", + "in": "path", + "required": true + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "$ref": "#/definitions/codersdk.ExternalAgentCredentials" + } + } + } + } + }, "/workspaces/{workspace}/favorite": { "put": { "security": [ @@ -10202,6 +10317,11 @@ "BuildReasonJetbrainsConnection" ] }, + "codersdk.CORSBehavior": { + "type": "string", + "enum": ["simple", "passthru"], + "x-enum-varnames": ["CORSBehaviorSimple", "CORSBehaviorPassthru"] + }, "codersdk.ChangePasswordWithOneTimePasscodeRequest": { "type": "object", "required": ["email", "one_time_passcode", "password"], @@ -10525,6 +10645,14 @@ } ] }, + "cors_behavior": { + "description": "CORSBehavior allows optionally specifying the CORS behavior for all shared ports.", + "allOf": [ + { + "$ref": "#/definitions/codersdk.CORSBehavior" + } + ] + }, "default_ttl_ms": { "description": "DefaultTTLMillis allows optionally specifying the default TTL\nfor all workspaces created from this template.", "type": "integer" @@ -11488,7 +11616,8 @@ "workspace-usage", "web-push", "oauth2", - "mcp-server-http" + "mcp-server-http", + "workspace-sharing" ], "x-enum-comments": { "ExperimentAutoFillParameters": "This should not be taken out of experiments until we have redesigned the feature.", @@ -11497,6 +11626,7 @@ "ExperimentNotifications": "Sends notifications via SMTP and webhooks following certain events.", "ExperimentOAuth2": "Enables OAuth2 provider functionality.", "ExperimentWebPush": "Enables web push notifications through the browser.", + "ExperimentWorkspaceSharing": "Enables updating workspace ACLs for sharing with users and groups.", "ExperimentWorkspaceUsage": "Enables the new workspace usage tracking." }, "x-enum-varnames": [ @@ -11506,9 +11636,21 @@ "ExperimentWorkspaceUsage", "ExperimentWebPush", "ExperimentOAuth2", - "ExperimentMCPServerHTTP" + "ExperimentMCPServerHTTP", + "ExperimentWorkspaceSharing" ] }, + "codersdk.ExternalAgentCredentials": { + "type": "object", + "properties": { + "agent_token": { + "type": "string" + }, + "command": { + "type": "string" + } + } + }, "codersdk.ExternalAuth": { "type": "object", "properties": { @@ -13487,9 +13629,15 @@ "default": { "type": "boolean" }, + "description": { + "type": "string" + }, "desiredPrebuildInstances": { "type": "integer" }, + "icon": { + "type": "string" + }, "id": { "type": "string" }, @@ -13833,6 +13981,9 @@ "input": { "$ref": "#/definitions/codersdk.ProvisionerJobInput" }, + "logs_overflowed": { + "type": "boolean" + }, "metadata": { "$ref": "#/definitions/codersdk.ProvisionerJobMetadata" }, @@ -14199,7 +14350,9 @@ "system", "tailnet_coordinator", "template", + "usage_event", "user", + "user_secret", "webpush_subscription", "workspace", "workspace_agent_devcontainers", @@ -14239,7 +14392,9 @@ "ResourceSystem", "ResourceTailnetCoordinator", "ResourceTemplate", + "ResourceUsageEvent", "ResourceUser", + "ResourceUserSecret", "ResourceWebpushSubscription", "ResourceWorkspace", "ResourceWorkspaceAgentDevcontainers", @@ -14768,6 +14923,9 @@ "build_time_stats": { "$ref": "#/definitions/codersdk.TemplateBuildTimeStats" }, + "cors_behavior": { + "$ref": "#/definitions/codersdk.CORSBehavior" + }, "created_at": { "type": "string", "format": "date-time" @@ -15267,6 +15425,9 @@ "created_by": { "$ref": "#/definitions/codersdk.MinimalUser" }, + "has_external_agent": { + "type": "boolean" + }, "id": { "type": "string", "format": "uuid" @@ -15646,6 +15807,89 @@ } } }, + "codersdk.UpdateTemplateMeta": { + "type": "object", + "properties": { + "activity_bump_ms": { + "description": "ActivityBumpMillis allows optionally specifying the activity bump\nduration for all workspaces created from this template. Defaults to 1h\nbut can be set to 0 to disable activity bumping.", + "type": "integer" + }, + "allow_user_autostart": { + "type": "boolean" + }, + "allow_user_autostop": { + "type": "boolean" + }, + "allow_user_cancel_workspace_jobs": { + "type": "boolean" + }, + "autostart_requirement": { + "$ref": "#/definitions/codersdk.TemplateAutostartRequirement" + }, + "autostop_requirement": { + "description": "AutostopRequirement and AutostartRequirement can only be set if your license\nincludes the advanced template scheduling feature. If you attempt to set this\nvalue while unlicensed, it will be ignored.", + "allOf": [ + { + "$ref": "#/definitions/codersdk.TemplateAutostopRequirement" + } + ] + }, + "cors_behavior": { + "$ref": "#/definitions/codersdk.CORSBehavior" + }, + "default_ttl_ms": { + "type": "integer" + }, + "deprecation_message": { + "description": "DeprecationMessage if set, will mark the template as deprecated and block\nany new workspaces from using this template.\nIf passed an empty string, will remove the deprecated message, making\nthe template usable for new workspaces again.", + "type": "string" + }, + "description": { + "type": "string" + }, + "disable_everyone_group_access": { + "description": "DisableEveryoneGroupAccess allows optionally disabling the default\nbehavior of granting the 'everyone' group access to use the template.\nIf this is set to true, the template will not be available to all users,\nand must be explicitly granted to users or groups in the permissions settings\nof the template.", + "type": "boolean" + }, + "display_name": { + "type": "string" + }, + "failure_ttl_ms": { + "type": "integer" + }, + "icon": { + "type": "string" + }, + "max_port_share_level": { + "$ref": "#/definitions/codersdk.WorkspaceAgentPortShareLevel" + }, + "name": { + "type": "string" + }, + "require_active_version": { + "description": "RequireActiveVersion mandates workspaces built using this template\nuse the active version of the template. This option has no\neffect on template admins.", + "type": "boolean" + }, + "time_til_dormant_autodelete_ms": { + "type": "integer" + }, + "time_til_dormant_ms": { + "type": "integer" + }, + "update_workspace_dormant_at": { + "description": "UpdateWorkspaceDormant updates the dormant_at field of workspaces spawned\nfrom the template. This is useful for preventing dormant workspaces being immediately\ndeleted when updating the dormant_ttl field to a new, shorter value.", + "type": "boolean" + }, + "update_workspace_last_used_at": { + "description": "UpdateWorkspaceLastUsedAt updates the last_used_at field of workspaces\nspawned from the template. This is useful for preventing workspaces being\nimmediately locked when updating the inactivity_ttl field to a new, shorter\nvalue.", + "type": "boolean" + }, + "use_classic_parameter_flow": { + "description": "UseClassicParameterFlow is a flag that switches the default behavior to use the classic\nparameter flow when creating a workspace. This only affects deployments with the experiment\n\"dynamic-parameters\" enabled. This setting will live for a period after the experiment is\nmade the default.\nAn \"opt-out\" is present in case the new feature breaks some existing templates.", + "type": "boolean" + } + } + }, "codersdk.UpdateUserAppearanceSettingsRequest": { "type": "object", "required": ["terminal_font", "theme_preference"], @@ -15703,6 +15947,24 @@ } } }, + "codersdk.UpdateWorkspaceACL": { + "type": "object", + "properties": { + "group_roles": { + "type": "object", + "additionalProperties": { + "$ref": "#/definitions/codersdk.WorkspaceRole" + } + }, + "user_roles": { + "description": "Keys must be valid UUIDs. To remove a user/group from the ACL use \"\" as the\nrole name (available as a constant named `codersdk.WorkspaceRoleDeleted`)", + "type": "object", + "additionalProperties": { + "$ref": "#/definitions/codersdk.WorkspaceRole" + } + } + } + }, "codersdk.UpdateWorkspaceAutomaticUpdatesRequest": { "type": "object", "properties": { @@ -16991,6 +17253,9 @@ "has_ai_task": { "type": "boolean" }, + "has_external_agent": { + "type": "boolean" + }, "id": { "type": "string", "format": "uuid" @@ -17335,6 +17600,15 @@ } } }, + "codersdk.WorkspaceRole": { + "type": "string", + "enum": ["admin", "use", ""], + "x-enum-varnames": [ + "WorkspaceRoleAdmin", + "WorkspaceRoleUse", + "WorkspaceRoleDeleted" + ] + }, "codersdk.WorkspaceStatus": { "type": "string", "enum": [ diff --git a/coderd/apikey.go b/coderd/apikey.go index 895be440ef930..0bf2d6ca19a22 100644 --- a/coderd/apikey.go +++ b/coderd/apikey.go @@ -151,7 +151,7 @@ func (api *API) postAPIKey(rw http.ResponseWriter, r *http.Request) { // @Produce json // @Tags Users // @Param user path string true "User ID, name, or me" -// @Param keyid path string true "Key ID" format(uuid) +// @Param keyid path string true "Key ID" format(string) // @Success 200 {object} codersdk.APIKey // @Router /users/{user}/keys/{keyid} [get] func (api *API) apiKeyByID(rw http.ResponseWriter, r *http.Request) { @@ -292,7 +292,7 @@ func (api *API) tokens(rw http.ResponseWriter, r *http.Request) { // @Security CoderSessionToken // @Tags Users // @Param user path string true "User ID, name, or me" -// @Param keyid path string true "Key ID" format(uuid) +// @Param keyid path string true "Key ID" format(string) // @Success 204 // @Router /users/{user}/keys/{keyid} [delete] func (api *API) deleteAPIKey(rw http.ResponseWriter, r *http.Request) { diff --git a/coderd/autobuild/lifecycle_executor.go b/coderd/autobuild/lifecycle_executor.go index 234a72de04c50..945b5f8c7cd6d 100644 --- a/coderd/autobuild/lifecycle_executor.go +++ b/coderd/autobuild/lifecycle_executor.go @@ -20,6 +20,7 @@ import ( "cdr.dev/slog" "github.com/coder/coder/v2/coderd/files" + "github.com/coder/coder/v2/coderd/pproflabel" "github.com/coder/coder/v2/coderd/audit" "github.com/coder/coder/v2/coderd/database" @@ -28,6 +29,7 @@ import ( "github.com/coder/coder/v2/coderd/database/provisionerjobs" "github.com/coder/coder/v2/coderd/database/pubsub" "github.com/coder/coder/v2/coderd/notifications" + "github.com/coder/coder/v2/coderd/provisionerdserver" "github.com/coder/coder/v2/coderd/schedule" "github.com/coder/coder/v2/coderd/wsbuilder" "github.com/coder/coder/v2/codersdk" @@ -107,10 +109,10 @@ func (e *Executor) WithStatsChannel(ch chan<- Stats) *Executor { // tick from its channel. It will stop when its context is Done, or when // its channel is closed. func (e *Executor) Run() { - go func() { + pproflabel.Go(e.ctx, pproflabel.Service(pproflabel.ServiceLifecycles), func(ctx context.Context) { for { select { - case <-e.ctx.Done(): + case <-ctx.Done(): return case t, ok := <-e.tick: if !ok { @@ -120,15 +122,48 @@ func (e *Executor) Run() { e.metrics.autobuildExecutionDuration.Observe(stats.Elapsed.Seconds()) if e.statsCh != nil { select { - case <-e.ctx.Done(): + case <-ctx.Done(): return case e.statsCh <- stats: } } - e.log.Debug(e.ctx, "run stats", slog.F("elapsed", stats.Elapsed), slog.F("transitions", stats.Transitions)) + e.log.Debug(ctx, "run stats", slog.F("elapsed", stats.Elapsed), slog.F("transitions", stats.Transitions)) } } - }() + }) +} + +// hasValidProvisioner checks whether there is at least one valid (non-stale, correct tags) provisioner +// based on time t and the tags maps (such as from a templateVersionJob). +func (e *Executor) hasValidProvisioner(ctx context.Context, tx database.Store, t time.Time, ws database.Workspace, tags map[string]string) (bool, error) { + queryParams := database.GetProvisionerDaemonsByOrganizationParams{ + OrganizationID: ws.OrganizationID, + WantTags: tags, + } + + // nolint: gocritic // The user (in this case, the user/context for autostart builds) may not have the full + // permissions to read provisioner daemons, but we need to check if there's any for the job prior to the + // execution of the job via autostart to fix: https://github.com/coder/coder/issues/17941 + provisionerDaemons, err := tx.GetProvisionerDaemonsByOrganization(dbauthz.AsSystemReadProvisionerDaemons(ctx), queryParams) + if err != nil { + return false, xerrors.Errorf("get provisioner daemons: %w", err) + } + + logger := e.log.With(slog.F("tags", tags)) + // Check if any provisioners are active (not stale) + for _, pd := range provisionerDaemons { + if pd.LastSeenAt.Valid { + age := t.Sub(pd.LastSeenAt.Time) + if age <= provisionerdserver.StaleInterval { + logger.Debug(ctx, "hasValidProvisioner: found active provisioner", + slog.F("daemon_id", pd.ID), + ) + return true, nil + } + } + } + logger.Debug(ctx, "hasValidProvisioner: no active provisioners found") + return false, nil } func (e *Executor) runOnce(t time.Time) Stats { @@ -280,6 +315,22 @@ func (e *Executor) runOnce(t time.Time) Stats { return nil } + // Get the template version job to access tags + templateVersionJob, err := tx.GetProvisionerJobByID(e.ctx, activeTemplateVersion.JobID) + if err != nil { + return xerrors.Errorf("get template version job: %w", err) + } + + // Before creating the workspace build, check for available provisioners + hasProvisioners, err := e.hasValidProvisioner(e.ctx, tx, t, ws, templateVersionJob.Tags) + if err != nil { + return xerrors.Errorf("check provisioner availability: %w", err) + } + if !hasProvisioners { + log.Warn(e.ctx, "skipping autostart - no available provisioners") + return nil // Skip this workspace + } + if nextTransition != "" { builder := wsbuilder.New(ws, nextTransition, *e.buildUsageChecker.Load()). SetLastWorkspaceBuildInTx(&latestBuild). diff --git a/coderd/autobuild/lifecycle_executor_test.go b/coderd/autobuild/lifecycle_executor_test.go index 0229a907cbb2e..df7a7ad231e59 100644 --- a/coderd/autobuild/lifecycle_executor_test.go +++ b/coderd/autobuild/lifecycle_executor_test.go @@ -9,6 +9,7 @@ import ( "github.com/coder/coder/v2/coderd/database/dbgen" "github.com/coder/coder/v2/coderd/database/pubsub" + "github.com/coder/coder/v2/coderd/provisionerdserver" "github.com/coder/coder/v2/coderd/rbac" "github.com/coder/quartz" @@ -36,14 +37,18 @@ import ( "github.com/coder/coder/v2/testutil" ) +func TestMain(m *testing.M) { + goleak.VerifyTestMain(m, testutil.GoleakOptions...) +} + func TestExecutorAutostartOK(t *testing.T) { t.Parallel() var ( - sched = mustSchedule(t, "CRON_TZ=UTC 0 * * * *") - tickCh = make(chan time.Time) - statsCh = make(chan autobuild.Stats) - client = coderdtest.New(t, &coderdtest.Options{ + sched = mustSchedule(t, "CRON_TZ=UTC 0 * * * *") + tickCh = make(chan time.Time) + statsCh = make(chan autobuild.Stats) + client, db = coderdtest.NewWithDatabase(t, &coderdtest.Options{ AutobuildTicker: tickCh, IncludeProvisionerDaemon: true, AutobuildStats: statsCh, @@ -55,10 +60,13 @@ func TestExecutorAutostartOK(t *testing.T) { ) // Given: workspace is stopped workspace = coderdtest.MustTransitionWorkspace(t, client, workspace.ID, codersdk.WorkspaceTransitionStart, codersdk.WorkspaceTransitionStop) - + p, err := coderdtest.GetProvisionerForTags(db, time.Now(), workspace.OrganizationID, map[string]string{}) + require.NoError(t, err) // When: the autobuild executor ticks after the scheduled time go func() { - tickCh <- sched.Next(workspace.LatestBuild.CreatedAt) + tickTime := sched.Next(workspace.LatestBuild.CreatedAt) + coderdtest.UpdateProvisionerLastSeenAt(t, db, p.ID, tickTime) + tickCh <- tickTime close(tickCh) }() @@ -114,8 +122,11 @@ func TestMultipleLifecycleExecutors(t *testing.T) { // Have the workspace stopped so we can perform an autostart workspace = coderdtest.MustTransitionWorkspace(t, clientA, workspace.ID, codersdk.WorkspaceTransitionStart, codersdk.WorkspaceTransitionStop) + p, err := coderdtest.GetProvisionerForTags(db, time.Now(), workspace.OrganizationID, nil) + require.NoError(t, err) // Get both clients to perform a lifecycle execution tick next := sched.Next(workspace.LatestBuild.CreatedAt) + coderdtest.UpdateProvisionerLastSeenAt(t, db, p.ID, next) startCh := make(chan struct{}) go func() { @@ -187,14 +198,14 @@ func TestExecutorAutostartTemplateUpdated(t *testing.T) { t.Run(tc.name, func(t *testing.T) { t.Parallel() var ( - sched = mustSchedule(t, "CRON_TZ=UTC 0 * * * *") - ctx = context.Background() - err error - tickCh = make(chan time.Time) - statsCh = make(chan autobuild.Stats) - logger = slogtest.Make(t, &slogtest.Options{IgnoreErrors: !tc.expectStart}).Leveled(slog.LevelDebug) - enqueuer = notificationstest.FakeEnqueuer{} - client = coderdtest.New(t, &coderdtest.Options{ + sched = mustSchedule(t, "CRON_TZ=UTC 0 * * * *") + ctx = context.Background() + err error + tickCh = make(chan time.Time) + statsCh = make(chan autobuild.Stats) + logger = slogtest.Make(t, &slogtest.Options{IgnoreErrors: !tc.expectStart}).Leveled(slog.LevelDebug) + enqueuer = notificationstest.FakeEnqueuer{} + client, db = coderdtest.NewWithDatabase(t, &coderdtest.Options{ AutobuildTicker: tickCh, IncludeProvisionerDaemon: true, AutobuildStats: statsCh, @@ -247,10 +258,15 @@ func TestExecutorAutostartTemplateUpdated(t *testing.T) { }, )) + p, err := coderdtest.GetProvisionerForTags(db, time.Now(), workspace.OrganizationID, nil) + require.NoError(t, err) + t.Log("sending autobuild tick") // When: the autobuild executor ticks after the scheduled time go func() { - tickCh <- sched.Next(workspace.LatestBuild.CreatedAt) + tickTime := sched.Next(workspace.LatestBuild.CreatedAt) + coderdtest.UpdateProvisionerLastSeenAt(t, db, p.ID, tickTime) + tickCh <- tickTime close(tickCh) }() @@ -414,9 +430,9 @@ func TestExecutorAutostopOK(t *testing.T) { t.Parallel() var ( - tickCh = make(chan time.Time) - statsCh = make(chan autobuild.Stats) - client = coderdtest.New(t, &coderdtest.Options{ + tickCh = make(chan time.Time) + statsCh = make(chan autobuild.Stats) + client, db = coderdtest.NewWithDatabase(t, &coderdtest.Options{ AutobuildTicker: tickCh, IncludeProvisionerDaemon: true, AutobuildStats: statsCh, @@ -428,9 +444,14 @@ func TestExecutorAutostopOK(t *testing.T) { require.Equal(t, codersdk.WorkspaceTransitionStart, workspace.LatestBuild.Transition) require.NotZero(t, workspace.LatestBuild.Deadline) + p, err := coderdtest.GetProvisionerForTags(db, time.Now(), workspace.OrganizationID, nil) + require.NoError(t, err) + // When: the autobuild executor ticks *after* the deadline: go func() { - tickCh <- workspace.LatestBuild.Deadline.Time.Add(time.Minute) + tickTime := workspace.LatestBuild.Deadline.Time.Add(time.Minute) + coderdtest.UpdateProvisionerLastSeenAt(t, db, p.ID, tickTime) + tickCh <- tickTime close(tickCh) }() @@ -449,10 +470,10 @@ func TestExecutorAutostopExtend(t *testing.T) { t.Parallel() var ( - ctx = context.Background() - tickCh = make(chan time.Time) - statsCh = make(chan autobuild.Stats) - client = coderdtest.New(t, &coderdtest.Options{ + ctx = context.Background() + tickCh = make(chan time.Time) + statsCh = make(chan autobuild.Stats) + client, db = coderdtest.NewWithDatabase(t, &coderdtest.Options{ AutobuildTicker: tickCh, IncludeProvisionerDaemon: true, AutobuildStats: statsCh, @@ -472,9 +493,14 @@ func TestExecutorAutostopExtend(t *testing.T) { }) require.NoError(t, err, "extend workspace deadline") + p, err := coderdtest.GetProvisionerForTags(db, time.Now(), workspace.OrganizationID, nil) + require.NoError(t, err) + // When: the autobuild executor ticks *after* the original deadline: go func() { - tickCh <- originalDeadline.Time.Add(time.Minute) + tickTime := originalDeadline.Time.Add(time.Minute) + coderdtest.UpdateProvisionerLastSeenAt(t, db, p.ID, tickTime) + tickCh <- tickTime }() // Then: nothing should happen and the workspace should stay running @@ -484,7 +510,9 @@ func TestExecutorAutostopExtend(t *testing.T) { // When: the autobuild executor ticks after the *new* deadline: go func() { - tickCh <- newDeadline.Add(time.Minute) + tickTime := newDeadline.Add(time.Minute) + coderdtest.UpdateProvisionerLastSeenAt(t, db, p.ID, tickTime) + tickCh <- tickTime close(tickCh) }() @@ -666,9 +694,9 @@ func TestExecuteAutostopSuspendedUser(t *testing.T) { t.Parallel() var ( - tickCh = make(chan time.Time) - statsCh = make(chan autobuild.Stats) - client = coderdtest.New(t, &coderdtest.Options{ + tickCh = make(chan time.Time) + statsCh = make(chan autobuild.Stats) + client, db = coderdtest.NewWithDatabase(t, &coderdtest.Options{ AutobuildTicker: tickCh, IncludeProvisionerDaemon: true, AutobuildStats: statsCh, @@ -676,6 +704,8 @@ func TestExecuteAutostopSuspendedUser(t *testing.T) { ) admin := coderdtest.CreateFirstUser(t, client) + // Wait for provisioner to be available + coderdtest.MustWaitForAnyProvisioner(t, db) version := coderdtest.CreateTemplateVersion(t, client, admin.OrganizationID, nil) coderdtest.AwaitTemplateVersionJobCompleted(t, client, version.ID) template := coderdtest.CreateTemplate(t, client, admin.OrganizationID, version.ID) @@ -753,17 +783,17 @@ func TestExecutorAutostartMultipleOK(t *testing.T) { t.Parallel() var ( - sched = mustSchedule(t, "CRON_TZ=UTC 0 * * * *") - tickCh = make(chan time.Time) - tickCh2 = make(chan time.Time) - statsCh1 = make(chan autobuild.Stats) - statsCh2 = make(chan autobuild.Stats) - client = coderdtest.New(t, &coderdtest.Options{ + sched = mustSchedule(t, "CRON_TZ=UTC 0 * * * *") + tickCh = make(chan time.Time) + tickCh2 = make(chan time.Time) + statsCh1 = make(chan autobuild.Stats) + statsCh2 = make(chan autobuild.Stats) + client, db = coderdtest.NewWithDatabase(t, &coderdtest.Options{ AutobuildTicker: tickCh, IncludeProvisionerDaemon: true, AutobuildStats: statsCh1, }) - _ = coderdtest.New(t, &coderdtest.Options{ + _, _ = coderdtest.NewWithDatabase(t, &coderdtest.Options{ AutobuildTicker: tickCh2, IncludeProvisionerDaemon: true, AutobuildStats: statsCh2, @@ -776,10 +806,15 @@ func TestExecutorAutostartMultipleOK(t *testing.T) { // Given: workspace is stopped workspace = coderdtest.MustTransitionWorkspace(t, client, workspace.ID, codersdk.WorkspaceTransitionStart, codersdk.WorkspaceTransitionStop) + p, err := coderdtest.GetProvisionerForTags(db, time.Now(), workspace.OrganizationID, nil) + require.NoError(t, err) + // When: the autobuild executor ticks past the scheduled time go func() { - tickCh <- sched.Next(workspace.LatestBuild.CreatedAt) - tickCh2 <- sched.Next(workspace.LatestBuild.CreatedAt) + tickTime := sched.Next(workspace.LatestBuild.CreatedAt) + coderdtest.UpdateProvisionerLastSeenAt(t, db, p.ID, tickTime) + tickCh <- tickTime + tickCh2 <- tickTime close(tickCh) close(tickCh2) }() @@ -809,10 +844,10 @@ func TestExecutorAutostartWithParameters(t *testing.T) { ) var ( - sched = mustSchedule(t, "CRON_TZ=UTC 0 * * * *") - tickCh = make(chan time.Time) - statsCh = make(chan autobuild.Stats) - client = coderdtest.New(t, &coderdtest.Options{ + sched = mustSchedule(t, "CRON_TZ=UTC 0 * * * *") + tickCh = make(chan time.Time) + statsCh = make(chan autobuild.Stats) + client, db = coderdtest.NewWithDatabase(t, &coderdtest.Options{ AutobuildTicker: tickCh, IncludeProvisionerDaemon: true, AutobuildStats: statsCh, @@ -841,9 +876,14 @@ func TestExecutorAutostartWithParameters(t *testing.T) { // Given: workspace is stopped workspace = coderdtest.MustTransitionWorkspace(t, client, workspace.ID, codersdk.WorkspaceTransitionStart, codersdk.WorkspaceTransitionStop) + p, err := coderdtest.GetProvisionerForTags(db, time.Now(), workspace.OrganizationID, nil) + require.NoError(t, err) + // When: the autobuild executor ticks after the scheduled time go func() { - tickCh <- sched.Next(workspace.LatestBuild.CreatedAt) + tickTime := sched.Next(workspace.LatestBuild.CreatedAt) + coderdtest.UpdateProvisionerLastSeenAt(t, db, p.ID, tickTime) + tickCh <- tickTime close(tickCh) }() @@ -911,7 +951,7 @@ func TestExecutorAutostopTemplateDisabled(t *testing.T) { tickCh = make(chan time.Time) statsCh = make(chan autobuild.Stats) - client = coderdtest.New(t, &coderdtest.Options{ + client, db = coderdtest.NewWithDatabase(t, &coderdtest.Options{ AutobuildTicker: tickCh, IncludeProvisionerDaemon: true, AutobuildStats: statsCh, @@ -935,9 +975,14 @@ func TestExecutorAutostopTemplateDisabled(t *testing.T) { // Then: the deadline should be set to the template default TTL assert.WithinDuration(t, workspace.LatestBuild.CreatedAt.Add(time.Hour), workspace.LatestBuild.Deadline.Time, time.Minute) + p, err := coderdtest.GetProvisionerForTags(db, time.Now(), workspace.OrganizationID, nil) + require.NoError(t, err) + // When: the autobuild executor ticks after the workspace setting, but before the template setting: go func() { - tickCh <- workspace.LatestBuild.Job.CompletedAt.Add(45 * time.Minute) + tickTime := workspace.LatestBuild.Job.CompletedAt.Add(45 * time.Minute) + coderdtest.UpdateProvisionerLastSeenAt(t, db, p.ID, tickTime) + tickCh <- tickTime }() // Then: nothing should happen @@ -947,7 +992,9 @@ func TestExecutorAutostopTemplateDisabled(t *testing.T) { // When: the autobuild executor ticks after the template setting: go func() { - tickCh <- workspace.LatestBuild.Job.CompletedAt.Add(61 * time.Minute) + tickTime := workspace.LatestBuild.Job.CompletedAt.Add(61 * time.Minute) + coderdtest.UpdateProvisionerLastSeenAt(t, db, p.ID, tickTime) + tickCh <- tickTime close(tickCh) }() @@ -976,6 +1023,9 @@ func TestExecutorRequireActiveVersion(t *testing.T) { TemplateScheduleStore: schedule.NewAGPLTemplateScheduleStore(), }) ) + // Wait for provisioner to be available + coderdtest.MustWaitForAnyProvisioner(t, db) + ctx := testutil.Context(t, testutil.WaitShort) owner := coderdtest.CreateFirstUser(t, ownerClient) me, err := ownerClient.User(ctx, codersdk.Me) @@ -1012,7 +1062,13 @@ func TestExecutorRequireActiveVersion(t *testing.T) { req.TemplateVersionID = inactiveVersion.ID }) require.Equal(t, inactiveVersion.ID, ws.LatestBuild.TemplateVersionID) - ticker <- sched.Next(ws.LatestBuild.CreatedAt) + + p, err := coderdtest.GetProvisionerForTags(db, time.Now(), ws.OrganizationID, nil) + require.NoError(t, err) + + tickTime := sched.Next(ws.LatestBuild.CreatedAt) + coderdtest.UpdateProvisionerLastSeenAt(t, db, p.ID, tickTime) + ticker <- tickTime stats := <-statCh require.Len(t, stats.Transitions, 1) @@ -1132,7 +1188,7 @@ func TestNotifications(t *testing.T) { statCh = make(chan autobuild.Stats) notifyEnq = notificationstest.FakeEnqueuer{} timeTilDormant = time.Minute - client = coderdtest.New(t, &coderdtest.Options{ + client, db = coderdtest.NewWithDatabase(t, &coderdtest.Options{ AutobuildTicker: ticker, AutobuildStats: statCh, IncludeProvisionerDaemon: true, @@ -1169,9 +1225,14 @@ func TestNotifications(t *testing.T) { workspace = coderdtest.MustTransitionWorkspace(t, client, workspace.ID, codersdk.WorkspaceTransitionStart, codersdk.WorkspaceTransitionStop) _ = coderdtest.AwaitWorkspaceBuildJobCompleted(t, userClient, workspace.LatestBuild.ID) + p, err := coderdtest.GetProvisionerForTags(db, time.Now(), workspace.OrganizationID, nil) + require.NoError(t, err) + // Wait for workspace to become dormant notifyEnq.Clear() - ticker <- workspace.LastUsedAt.Add(timeTilDormant * 3) + tickTime := workspace.LastUsedAt.Add(timeTilDormant * 3) + coderdtest.UpdateProvisionerLastSeenAt(t, db, p.ID, tickTime) + ticker <- tickTime _ = testutil.TryReceive(testutil.Context(t, testutil.WaitShort), t, statCh) // Check that the workspace is dormant @@ -1245,13 +1306,18 @@ func TestExecutorPrebuilds(t *testing.T) { require.Equal(t, codersdk.WorkspaceTransitionStart, prebuild.LatestBuild.Transition) require.NotZero(t, prebuild.LatestBuild.Deadline) + p, err := coderdtest.GetProvisionerForTags(db, time.Now(), prebuild.OrganizationID, nil) + require.NoError(t, err) + // When: the autobuild executor ticks *after* the deadline: go func() { - tickCh <- prebuild.LatestBuild.Deadline.Time.Add(time.Minute) + tickTime := prebuild.LatestBuild.Deadline.Time.Add(time.Minute) + coderdtest.UpdateProvisionerLastSeenAt(t, db, p.ID, tickTime) + tickCh <- tickTime }() // Then: the prebuilt workspace should remain in a start transition - prebuildStats := <-statsCh + prebuildStats := testutil.RequireReceive(ctx, t, statsCh) require.Len(t, prebuildStats.Errors, 0) require.Len(t, prebuildStats.Transitions, 0) require.Equal(t, codersdk.WorkspaceTransitionStart, prebuild.LatestBuild.Transition) @@ -1259,17 +1325,27 @@ func TestExecutorPrebuilds(t *testing.T) { require.Equal(t, codersdk.BuildReasonInitiator, prebuild.LatestBuild.Reason) // Given: a user claims the prebuilt workspace - dbWorkspace := dbgen.ClaimPrebuild(t, db, user.ID, "claimedWorkspace-autostop", preset.ID) + dbWorkspace := dbgen.ClaimPrebuild( + t, db, + clock.Now(), + user.ID, + "claimedWorkspace-autostop", + preset.ID, + sql.NullString{}, + sql.NullTime{}, + sql.NullInt64{}) workspace := coderdtest.MustWorkspace(t, client, dbWorkspace.ID) // When: the autobuild executor ticks *after* the deadline: go func() { - tickCh <- workspace.LatestBuild.Deadline.Time.Add(time.Minute) + tickTime := workspace.LatestBuild.Deadline.Time.Add(time.Minute) + coderdtest.UpdateProvisionerLastSeenAt(t, db, p.ID, tickTime) + tickCh <- tickTime close(tickCh) }() // Then: the workspace should be stopped - workspaceStats := <-statsCh + workspaceStats := testutil.RequireReceive(ctx, t, statsCh) require.Len(t, workspaceStats.Errors, 0) require.Len(t, workspaceStats.Transitions, 1) require.Contains(t, workspaceStats.Transitions, workspace.ID) @@ -1336,7 +1412,7 @@ func TestExecutorPrebuilds(t *testing.T) { }() // Then: the prebuilt workspace should remain in a stop transition - prebuildStats := <-statsCh + prebuildStats := testutil.RequireReceive(ctx, t, statsCh) require.Len(t, prebuildStats.Errors, 0) require.Len(t, prebuildStats.Transitions, 0) require.Equal(t, codersdk.WorkspaceTransitionStop, prebuild.LatestBuild.Transition) @@ -1353,7 +1429,15 @@ func TestExecutorPrebuilds(t *testing.T) { database.WorkspaceTransitionStart) // Given: a user claims the prebuilt workspace - dbWorkspace := dbgen.ClaimPrebuild(t, db, user.ID, "claimedWorkspace-autostart", preset.ID) + dbWorkspace := dbgen.ClaimPrebuild( + t, db, + clock.Now(), + user.ID, + "claimedWorkspace-autostart", + preset.ID, + autostartSched, + sql.NullTime{}, + sql.NullInt64{}) workspace := coderdtest.MustWorkspace(t, client, dbWorkspace.ID) // Given: the prebuilt workspace goes to a stop status @@ -1374,7 +1458,7 @@ func TestExecutorPrebuilds(t *testing.T) { }() // Then: the workspace should eventually be started - workspaceStats := <-statsCh + workspaceStats := testutil.RequireReceive(ctx, t, statsCh) require.Len(t, workspaceStats.Errors, 0) require.Len(t, workspaceStats.Transitions, 1) require.Contains(t, workspaceStats.Transitions, workspace.ID) @@ -1486,8 +1570,8 @@ func setupTestDBWorkspaceBuild( Architecture: "i386", OperatingSystem: "linux", LifecycleState: database.WorkspaceAgentLifecycleStateReady, - StartedAt: sql.NullTime{Time: time.Now().Add(time.Hour), Valid: true}, - ReadyAt: sql.NullTime{Time: time.Now().Add(-1 * time.Hour), Valid: true}, + StartedAt: sql.NullTime{Time: clock.Now().Add(time.Hour), Valid: true}, + ReadyAt: sql.NullTime{Time: clock.Now().Add(-1 * time.Hour), Valid: true}, APIKeyScope: database.AgentKeyScopeEnumAll, }) @@ -1524,8 +1608,9 @@ func setupTestDBPrebuiltWorkspace( OrganizationID: orgID, OwnerID: database.PrebuildsSystemUserID, Deleted: false, - CreatedAt: time.Now().Add(-time.Hour * 2), + CreatedAt: clock.Now().Add(-time.Hour * 2), AutostartSchedule: options.AutostartSchedule, + LastUsedAt: clock.Now(), }) setupTestDBWorkspaceBuild(ctx, t, clock, db, ps, orgID, workspace.ID, templateVersionID, presetID, buildTransition) @@ -1543,6 +1628,25 @@ func mustProvisionWorkspace(t *testing.T, client *codersdk.Client, mut ...func(* return coderdtest.MustWorkspace(t, client, ws.ID) } +// mustProvisionWorkspaceWithProvisionerTags creates a workspace with a template version that has specific provisioner tags +func mustProvisionWorkspaceWithProvisionerTags(t *testing.T, client *codersdk.Client, provisionerTags map[string]string, mut ...func(*codersdk.CreateWorkspaceRequest)) codersdk.Workspace { + t.Helper() + user := coderdtest.CreateFirstUser(t, client) + + // Create template version with specific provisioner tags + version := coderdtest.CreateTemplateVersion(t, client, user.OrganizationID, nil, func(request *codersdk.CreateTemplateVersionRequest) { + request.ProvisionerTags = provisionerTags + }) + coderdtest.AwaitTemplateVersionJobCompleted(t, client, version.ID) + t.Logf("template version %s job has completed with provisioner tags %v", version.ID, provisionerTags) + + template := coderdtest.CreateTemplate(t, client, user.OrganizationID, version.ID) + + ws := coderdtest.CreateWorkspace(t, client, template.ID, mut...) + coderdtest.AwaitWorkspaceBuildJobCompleted(t, client, ws.LatestBuild.ID) + return coderdtest.MustWorkspace(t, client, ws.ID) +} + func mustProvisionWorkspaceWithParameters(t *testing.T, client *codersdk.Client, richParameters []*proto.RichParameter, mut ...func(*codersdk.CreateWorkspaceRequest)) codersdk.Workspace { t.Helper() user := coderdtest.CreateFirstUser(t, client) @@ -1580,6 +1684,79 @@ func mustWorkspaceParameters(t *testing.T, client *codersdk.Client, workspaceID require.NotEmpty(t, buildParameters) } -func TestMain(m *testing.M) { - goleak.VerifyTestMain(m, testutil.GoleakOptions...) +func TestExecutorAutostartSkipsWhenNoProvisionersAvailable(t *testing.T) { + t.Parallel() + + var ( + sched = mustSchedule(t, "CRON_TZ=UTC 0 * * * *") + tickCh = make(chan time.Time) + statsCh = make(chan autobuild.Stats) + ) + + // Use provisioner daemon tags so we can test `hasAvailableProvisioner` more thoroughly. + // We can't overwrite owner or scope as there's a `provisionersdk.MutateTags` function that has restrictions on those. + provisionerDaemonTags := map[string]string{"test-tag": "asdf"} + t.Logf("Setting provisioner daemon tags: %v", provisionerDaemonTags) + + db, ps := dbtestutil.NewDB(t) + client, _, api := coderdtest.NewWithAPI(t, &coderdtest.Options{ + Database: db, + Pubsub: ps, + IncludeProvisionerDaemon: false, + AutobuildTicker: tickCh, + AutobuildStats: statsCh, + }) + + daemon1Closer := coderdtest.NewTaggedProvisionerDaemon(t, api, "name", provisionerDaemonTags) + t.Cleanup(func() { + _ = daemon1Closer.Close() + }) + + // Create workspace with autostart enabled and matching provisioner tags + workspace := mustProvisionWorkspaceWithProvisionerTags(t, client, provisionerDaemonTags, func(cwr *codersdk.CreateWorkspaceRequest) { + cwr.AutostartSchedule = ptr.Ref(sched.String()) + }) + + // Stop the workspace while provisioner is available + workspace = coderdtest.MustTransitionWorkspace(t, client, workspace.ID, codersdk.WorkspaceTransitionStart, codersdk.WorkspaceTransitionStop) + + // Wait for provisioner to be available for this specific workspace + coderdtest.MustWaitForProvisionersAvailable(t, db, workspace) + p, err := coderdtest.GetProvisionerForTags(db, time.Now(), workspace.OrganizationID, provisionerDaemonTags) + require.NoError(t, err, "Error getting provisioner for workspace") + + daemon1Closer.Close() + + // Ensure the provisioner is stale + staleTime := sched.Next(workspace.LatestBuild.CreatedAt).Add((-1 * provisionerdserver.StaleInterval) + -10*time.Second) + coderdtest.UpdateProvisionerLastSeenAt(t, db, p.ID, staleTime) + + // Trigger autobuild + tickCh <- sched.Next(workspace.LatestBuild.CreatedAt) + + stats := <-statsCh + + // This assertion should FAIL when provisioner is available (not stale), can confirm by commenting out the + // UpdateProvisionerLastSeenAt call above. + assert.Len(t, stats.Transitions, 0, "should not create builds when no provisioners available") + + daemon2Closer := coderdtest.NewTaggedProvisionerDaemon(t, api, "name", provisionerDaemonTags) + t.Cleanup(func() { + _ = daemon2Closer.Close() + }) + + // Ensure the provisioner is NOT stale, and see if we get a successful state transition. + p, err = coderdtest.GetProvisionerForTags(db, time.Now(), workspace.OrganizationID, provisionerDaemonTags) + require.NoError(t, err, "Error getting provisioner for workspace") + notStaleTime := sched.Next(workspace.LatestBuild.CreatedAt).Add((-1 * provisionerdserver.StaleInterval) + 10*time.Second) + coderdtest.UpdateProvisionerLastSeenAt(t, db, p.ID, notStaleTime) + + // Trigger autobuild + go func() { + tickCh <- sched.Next(workspace.LatestBuild.CreatedAt) + close(tickCh) + }() + stats = <-statsCh + + assert.Len(t, stats.Transitions, 1, "should not create builds when no provisioners available") } diff --git a/coderd/coderd.go b/coderd/coderd.go index 9115888fc566b..bb6f7b4fef4e5 100644 --- a/coderd/coderd.go +++ b/coderd/coderd.go @@ -14,13 +14,16 @@ import ( "net/url" "path/filepath" "regexp" + "runtime/pprof" "strings" "sync" "sync/atomic" "time" "github.com/coder/coder/v2/coderd/oauth2provider" + "github.com/coder/coder/v2/coderd/pproflabel" "github.com/coder/coder/v2/coderd/prebuilds" + "github.com/coder/coder/v2/coderd/usage" "github.com/coder/coder/v2/coderd/wsbuilder" "github.com/andybalholm/brotli" @@ -198,6 +201,7 @@ type Options struct { TemplateScheduleStore *atomic.Pointer[schedule.TemplateScheduleStore] UserQuietHoursScheduleStore *atomic.Pointer[schedule.UserQuietHoursScheduleStore] AccessControlStore *atomic.Pointer[dbauthz.AccessControlStore] + UsageInserter *atomic.Pointer[usage.Inserter] // CoordinatorResumeTokenProvider is used to provide and validate resume // tokens issued by and passed to the coordinator DRPC API. CoordinatorResumeTokenProvider tailnet.ResumeTokenProvider @@ -323,6 +327,9 @@ func New(options *Options) *API { }) } + if options.PrometheusRegistry == nil { + options.PrometheusRegistry = prometheus.NewRegistry() + } if options.Authorizer == nil { options.Authorizer = rbac.NewCachingAuthorizer(options.PrometheusRegistry) if buildinfo.IsDev() { @@ -379,9 +386,6 @@ func New(options *Options) *API { if options.FilesRateLimit == 0 { options.FilesRateLimit = 12 } - if options.PrometheusRegistry == nil { - options.PrometheusRegistry = prometheus.NewRegistry() - } if options.Clock == nil { options.Clock = quartz.NewReal() } @@ -426,6 +430,13 @@ func New(options *Options) *API { v := schedule.NewAGPLUserQuietHoursScheduleStore() options.UserQuietHoursScheduleStore.Store(&v) } + if options.UsageInserter == nil { + options.UsageInserter = &atomic.Pointer[usage.Inserter]{} + } + if options.UsageInserter.Load() == nil { + inserter := usage.NewAGPLInserter() + options.UsageInserter.Store(&inserter) + } if options.OneTimePasscodeValidityPeriod == 0 { options.OneTimePasscodeValidityPeriod = 20 * time.Minute } @@ -588,6 +599,7 @@ func New(options *Options) *API { UserQuietHoursScheduleStore: options.UserQuietHoursScheduleStore, AccessControlStore: options.AccessControlStore, BuildUsageChecker: &buildUsageChecker, + UsageInserter: options.UsageInserter, FileCache: files.New(options.PrometheusRegistry, options.Authorizer), Experiments: experiments, WebpushDispatcher: options.WebPushDispatcher, @@ -852,6 +864,7 @@ func New(options *Options) *API { r.Use( httpmw.Recover(api.Logger), + httpmw.WithProfilingLabels, tracing.StatusWriterMiddleware, tracing.Middleware(api.TracerProvider), httpmw.AttachRequestID, @@ -992,6 +1005,17 @@ func New(options *Options) *API { r.Route("/aitasks", func(r chi.Router) { r.Get("/prompts", api.aiTasksPrompts) }) + r.Route("/tasks", func(r chi.Router) { + r.Use(apiRateLimiter) + + r.Route("/{user}", func(r chi.Router) { + r.Use(httpmw.ExtractOrganizationMembersParam(options.Database, api.HTTPAuth.Authorize)) + + r.Get("/", api.tasksList) + r.Get("/{id}", api.taskGet) + r.Post("/", api.tasksCreate) + }) + }) r.Route("/mcp", func(r chi.Router) { r.Use( httpmw.RequireExperimentWithDevBypass(api.Experiments, codersdk.ExperimentOAuth2, codersdk.ExperimentMCPServerHTTP), @@ -1339,7 +1363,13 @@ func New(options *Options) *API { ).Get("/connection", api.workspaceAgentConnectionGeneric) r.Route("/me", func(r chi.Router) { r.Use(workspaceAgentInfo) - r.Get("/rpc", api.workspaceAgentRPC) + r.Group(func(r chi.Router) { + r.Use( + // Override the request_type for agent rpc traffic. + httpmw.WithStaticProfilingLabels(pprof.Labels(pproflabel.RequestTypeTag, "agent-rpc")), + ) + r.Get("/rpc", api.workspaceAgentRPC) + }) r.Patch("/logs", api.patchWorkspaceAgentLogs) r.Patch("/app-status", api.patchWorkspaceAgentAppStatus) // Deprecated: Required to support legacy agents @@ -1413,6 +1443,13 @@ func New(options *Options) *API { r.Delete("/", api.deleteWorkspaceAgentPortShare) }) r.Get("/timings", api.workspaceTimings) + r.Route("/acl", func(r chi.Router) { + r.Use( + httpmw.RequireExperiment(api.Experiments, codersdk.ExperimentWorkspaceSharing), + ) + + r.Patch("/", api.patchWorkspaceACL) + }) }) }) r.Route("/workspacebuilds/{workspacebuild}", func(r chi.Router) { @@ -1541,6 +1578,9 @@ func New(options *Options) *API { r.Use(apiKeyMiddleware) r.Get("/", api.tailnetRPCConn) }) + r.Route("/init-script", func(r chi.Router) { + r.Get("/{os}/{arch}", api.initScript) + }) }) if options.SwaggerEndpoint { @@ -1662,6 +1702,9 @@ type API struct { // BuildUsageChecker is a pointer as it's passed around to multiple // components. BuildUsageChecker *atomic.Pointer[wsbuilder.UsageChecker] + // UsageInserter is a pointer to an atomic pointer because it is passed to + // multiple components. + UsageInserter *atomic.Pointer[usage.Inserter] UpdatesProvider tailnet.WorkspaceUpdatesProvider @@ -1877,6 +1920,7 @@ func (api *API) CreateInMemoryTaggedProvisionerDaemon(dialCtx context.Context, n &api.Auditor, api.TemplateScheduleStore, api.UserQuietHoursScheduleStore, + api.UsageInserter, api.DeploymentValues, provisionerdserver.Options{ OIDCConfig: api.OIDCConfig, diff --git a/coderd/coderdtest/coderdtest.go b/coderd/coderdtest/coderdtest.go index 7085068e97ff4..34ba84a85e33a 100644 --- a/coderd/coderdtest/coderdtest.go +++ b/coderd/coderdtest/coderdtest.go @@ -55,6 +55,7 @@ import ( "cdr.dev/slog/sloggers/slogtest" "github.com/coder/coder/v2/archive" "github.com/coder/coder/v2/coderd/files" + "github.com/coder/coder/v2/coderd/provisionerdserver" "github.com/coder/coder/v2/coderd/wsbuilder" "github.com/coder/quartz" @@ -386,6 +387,7 @@ func NewOptions(t testing.TB, options *Options) (func(http.Handler), context.Can options.NotificationsEnqueuer, experiments, ).WithStatsChannel(options.AutobuildStats) + lifecycleExecutor.Run() jobReaperTicker := time.NewTicker(options.DeploymentValues.JobReaperDetectorInterval.Value()) @@ -469,7 +471,7 @@ func NewOptions(t testing.TB, options *Options) (func(http.Handler), context.Can serverURL, err := url.Parse(srv.URL) require.NoError(t, err) - serverURL.Host = fmt.Sprintf("localhost:%d", tcpAddr.Port) + serverURL.Host = fmt.Sprintf("127.0.0.1:%d", tcpAddr.Port) derpPort, err := strconv.Atoi(serverURL.Port()) require.NoError(t, err) @@ -1590,3 +1592,112 @@ func DeploymentValues(t testing.TB, mut ...func(*codersdk.DeploymentValues)) *co } return cfg } + +// GetProvisionerForTags returns the first valid provisioner for a workspace + template tags. +func GetProvisionerForTags(tx database.Store, curTime time.Time, orgID uuid.UUID, tags map[string]string) (database.ProvisionerDaemon, error) { + if tags == nil { + tags = map[string]string{} + } + queryParams := database.GetProvisionerDaemonsByOrganizationParams{ + OrganizationID: orgID, + WantTags: tags, + } + + // nolint: gocritic // The user (in this case, the user/context for autostart builds) may not have the full + // permissions to read provisioner daemons, but we need to check if there's any for the job prior to the + // execution of the job via autostart to fix: https://github.com/coder/coder/issues/17941 + provisionerDaemons, err := tx.GetProvisionerDaemonsByOrganization(dbauthz.AsSystemReadProvisionerDaemons(context.Background()), queryParams) + if err != nil { + return database.ProvisionerDaemon{}, xerrors.Errorf("get provisioner daemons: %w", err) + } + + // Check if any provisioners are active (not stale) + for _, pd := range provisionerDaemons { + if pd.LastSeenAt.Valid { + age := curTime.Sub(pd.LastSeenAt.Time) + if age <= provisionerdserver.StaleInterval { + return pd, nil + } + } + } + return database.ProvisionerDaemon{}, xerrors.New("no available provisioners found") +} + +func ctxWithProvisionerPermissions(ctx context.Context) context.Context { + // Use system restricted context which has permissions to update provisioner daemons + //nolint: gocritic // We need system context to modify this. + return dbauthz.AsSystemRestricted(ctx) +} + +// UpdateProvisionerLastSeenAt updates the provisioner daemon's LastSeenAt timestamp +// to the specified time to prevent it from appearing stale during autobuild operations +func UpdateProvisionerLastSeenAt(t *testing.T, db database.Store, id uuid.UUID, tickTime time.Time) { + t.Helper() + ctx := ctxWithProvisionerPermissions(context.Background()) + t.Logf("Updating provisioner %s LastSeenAt to %v", id, tickTime) + err := db.UpdateProvisionerDaemonLastSeenAt(ctx, database.UpdateProvisionerDaemonLastSeenAtParams{ + ID: id, + LastSeenAt: sql.NullTime{Time: tickTime, Valid: true}, + }) + require.NoError(t, err) + t.Logf("Successfully updated provisioner LastSeenAt") +} + +func MustWaitForAnyProvisioner(t *testing.T, db database.Store) { + t.Helper() + ctx := ctxWithProvisionerPermissions(testutil.Context(t, testutil.WaitShort)) + require.Eventually(t, func() bool { + daemons, err := db.GetProvisionerDaemons(ctx) + return err == nil && len(daemons) > 0 + }, testutil.WaitShort, testutil.IntervalFast) +} + +// MustWaitForProvisionersAvailable waits for provisioners to be available for a specific workspace. +func MustWaitForProvisionersAvailable(t *testing.T, db database.Store, workspace codersdk.Workspace) uuid.UUID { + t.Helper() + ctx := ctxWithProvisionerPermissions(testutil.Context(t, testutil.WaitShort)) + id := uuid.UUID{} + // Get the workspace from the database + require.Eventually(t, func() bool { + ws, err := db.GetWorkspaceByID(ctx, workspace.ID) + if err != nil { + return false + } + + // Get the latest build + latestBuild, err := db.GetWorkspaceBuildByID(ctx, workspace.LatestBuild.ID) + if err != nil { + return false + } + + // Get the template version job + templateVersionJob, err := db.GetProvisionerJobByID(ctx, latestBuild.JobID) + if err != nil { + return false + } + + // Check if provisioners are available using the same logic as hasAvailableProvisioners + provisionerDaemons, err := db.GetProvisionerDaemonsByOrganization(ctx, database.GetProvisionerDaemonsByOrganizationParams{ + OrganizationID: ws.OrganizationID, + WantTags: templateVersionJob.Tags, + }) + if err != nil { + return false + } + + // Check if any provisioners are active (not stale) + now := time.Now() + for _, pd := range provisionerDaemons { + if pd.LastSeenAt.Valid { + age := now.Sub(pd.LastSeenAt.Time) + if age <= provisionerdserver.StaleInterval { + id = pd.ID + return true // Found an active provisioner + } + } + } + return false // No active provisioners found + }, testutil.WaitLong, testutil.IntervalFast) + + return id +} diff --git a/coderd/coderdtest/swaggerparser.go b/coderd/coderdtest/swaggerparser.go index d7d46711a9df6..b94473ee83bda 100644 --- a/coderd/coderdtest/swaggerparser.go +++ b/coderd/coderdtest/swaggerparser.go @@ -310,7 +310,8 @@ func assertSecurityDefined(t *testing.T, comment SwaggerComment) { comment.router == "/" || comment.router == "/users/login" || comment.router == "/users/otp/request" || - comment.router == "/users/otp/change-password" { + comment.router == "/users/otp/change-password" || + comment.router == "/init-script/{os}/{arch}" { return // endpoints do not require authorization } assert.Containsf(t, authorizedSecurityTags, comment.security, "@Security must be either of these options: %v", authorizedSecurityTags) @@ -360,7 +361,9 @@ func assertProduce(t *testing.T, comment SwaggerComment) { (comment.router == "/workspaceagents/me/startup/logs" && comment.method == "patch") || (comment.router == "/licenses/{id}" && comment.method == "delete") || (comment.router == "/debug/coordinator" && comment.method == "get") || - (comment.router == "/debug/tailnet" && comment.method == "get") { + (comment.router == "/debug/tailnet" && comment.method == "get") || + (comment.router == "/workspaces/{workspace}/acl" && comment.method == "patch") || + (comment.router == "/init-script/{os}/{arch}" && comment.method == "get") { return // Exception: HTTP 200 is returned without response entity } diff --git a/coderd/database/check_constraint.go b/coderd/database/check_constraint.go new file mode 100644 index 0000000000000..ac204f85f5603 --- /dev/null +++ b/coderd/database/check_constraint.go @@ -0,0 +1,18 @@ +// Code generated by scripts/dbgen/main.go. DO NOT EDIT. +package database + +// CheckConstraint represents a named check constraint on a table. +type CheckConstraint string + +// CheckConstraint enums. +const ( + CheckOneTimePasscodeSet CheckConstraint = "one_time_passcode_set" // users + CheckUsersUsernameMinLength CheckConstraint = "users_username_min_length" // users + CheckMaxProvisionerLogsLength CheckConstraint = "max_provisioner_logs_length" // provisioner_jobs + CheckValidationMonotonicOrder CheckConstraint = "validation_monotonic_order" // template_version_parameters + CheckUsageEventTypeCheck CheckConstraint = "usage_event_type_check" // usage_events + CheckMaxLogsLength CheckConstraint = "max_logs_length" // workspace_agents + CheckSubsystemsNotNone CheckConstraint = "subsystems_not_none" // workspace_agents + CheckWorkspaceBuildsAiTaskSidebarAppIDRequired CheckConstraint = "workspace_builds_ai_task_sidebar_app_id_required" // workspace_builds + CheckWorkspaceBuildsDeadlineBelowMaxDeadline CheckConstraint = "workspace_builds_deadline_below_max_deadline" // workspace_builds +) diff --git a/coderd/database/db2sdk/db2sdk.go b/coderd/database/db2sdk/db2sdk.go index 320a90b09430b..48f6ff44af70f 100644 --- a/coderd/database/db2sdk/db2sdk.go +++ b/coderd/database/db2sdk/db2sdk.go @@ -24,6 +24,7 @@ import ( "github.com/coder/coder/v2/coderd/rbac/policy" "github.com/coder/coder/v2/coderd/render" "github.com/coder/coder/v2/coderd/util/ptr" + "github.com/coder/coder/v2/coderd/util/slice" "github.com/coder/coder/v2/coderd/workspaceapps/appurl" "github.com/coder/coder/v2/codersdk" "github.com/coder/coder/v2/provisionersdk/proto" @@ -781,6 +782,29 @@ func TemplateRoleActions(role codersdk.TemplateRole) []policy.Action { return []policy.Action{} } +func WorkspaceRoleActions(role codersdk.WorkspaceRole) []policy.Action { + switch role { + case codersdk.WorkspaceRoleAdmin: + return slice.Omit( + // Small note: This intentionally includes "create" because it's sort of + // double purposed as "can edit ACL". That's maybe a bit "incorrect", but + // it's what templates do already and we're copying that implementation. + rbac.ResourceWorkspace.AvailableActions(), + // Don't let anyone delete something they can't recreate. + policy.ActionDelete, + ) + case codersdk.WorkspaceRoleUse: + return []policy.Action{ + policy.ActionApplicationConnect, + policy.ActionRead, + policy.ActionSSH, + policy.ActionWorkspaceStart, + policy.ActionWorkspaceStop, + } + } + return []policy.Action{} +} + func ConnectionLogConnectionTypeFromAgentProtoConnectionType(typ agentproto.Connection_Type) (database.ConnectionType, error) { switch typ { case agentproto.Connection_SSH: diff --git a/coderd/database/dbauthz/dbauthz.go b/coderd/database/dbauthz/dbauthz.go index 257cbc6e6b142..94e60db47cb30 100644 --- a/coderd/database/dbauthz/dbauthz.go +++ b/coderd/database/dbauthz/dbauthz.go @@ -213,6 +213,8 @@ var ( // Provisionerd creates workspaces resources monitor rbac.ResourceWorkspaceAgentResourceMonitor.Type: {policy.ActionCreate}, rbac.ResourceWorkspaceAgentDevcontainers.Type: {policy.ActionCreate}, + // Provisionerd creates usage events + rbac.ResourceUsageEvent.Type: {policy.ActionCreate}, }), Org: map[string][]rbac.Permission{}, User: []rbac.Permission{}, @@ -509,6 +511,27 @@ var ( }), Scope: rbac.ScopeAll, }.WithCachedASTValue() + + subjectUsagePublisher = rbac.Subject{ + Type: rbac.SubjectTypeUsagePublisher, + FriendlyName: "Usage Publisher", + ID: uuid.Nil.String(), + Roles: rbac.Roles([]rbac.Role{ + { + Identifier: rbac.RoleIdentifier{Name: "usage-publisher"}, + DisplayName: "Usage Publisher", + Site: rbac.Permissions(map[string][]policy.Action{ + rbac.ResourceLicense.Type: {policy.ActionRead}, + // The usage publisher doesn't create events, just + // reads/processes them. + rbac.ResourceUsageEvent.Type: {policy.ActionRead, policy.ActionUpdate}, + }), + Org: map[string][]rbac.Permission{}, + User: []rbac.Permission{}, + }, + }), + Scope: rbac.ScopeAll, + }.WithCachedASTValue() ) // AsProvisionerd returns a context with an actor that has permissions required @@ -579,10 +602,18 @@ func AsPrebuildsOrchestrator(ctx context.Context) context.Context { return As(ctx, subjectPrebuildsOrchestrator) } +// AsFileReader returns a context with an actor that has permissions required +// for reading all files. func AsFileReader(ctx context.Context) context.Context { return As(ctx, subjectFileReader) } +// AsUsagePublisher returns a context with an actor that has permissions +// required for creating, reading, and updating usage events. +func AsUsagePublisher(ctx context.Context) context.Context { + return As(ctx, subjectUsagePublisher) +} + var AsRemoveActor = rbac.Subject{ ID: "remove-actor", } @@ -1387,6 +1418,14 @@ func (q *querier) CountUnreadInboxNotificationsByUserID(ctx context.Context, use return q.db.CountUnreadInboxNotificationsByUserID(ctx, userID) } +func (q *querier) CreateUserSecret(ctx context.Context, arg database.CreateUserSecretParams) (database.UserSecret, error) { + obj := rbac.ResourceUserSecret.WithOwner(arg.UserID.String()) + if err := q.authorizeContext(ctx, policy.ActionCreate, obj); err != nil { + return database.UserSecret{}, err + } + return q.db.CreateUserSecret(ctx, arg) +} + // TODO: Handle org scoped lookups func (q *querier) CustomRoles(ctx context.Context, arg database.CustomRolesParams) ([]database.CustomRole, error) { roleObject := rbac.ResourceAssignRole @@ -1657,6 +1696,19 @@ func (q *querier) DeleteTailnetTunnel(ctx context.Context, arg database.DeleteTa return q.db.DeleteTailnetTunnel(ctx, arg) } +func (q *querier) DeleteUserSecret(ctx context.Context, id uuid.UUID) error { + // First get the secret to check ownership + secret, err := q.GetUserSecret(ctx, id) + if err != nil { + return err + } + + if err := q.authorizeContext(ctx, policy.ActionDelete, secret); err != nil { + return err + } + return q.db.DeleteUserSecret(ctx, id) +} + func (q *querier) DeleteWebpushSubscriptionByUserIDAndEndpoint(ctx context.Context, arg database.DeleteWebpushSubscriptionByUserIDAndEndpointParams) error { if err := q.authorizeContext(ctx, policy.ActionDelete, rbac.ResourceWebpushSubscription.WithOwner(arg.UserID.String())); err != nil { return err @@ -1789,6 +1841,14 @@ func (q *querier) FetchVolumesResourceMonitorsUpdatedAfter(ctx context.Context, return q.db.FetchVolumesResourceMonitorsUpdatedAfter(ctx, updatedAt) } +func (q *querier) FindMatchingPresetID(ctx context.Context, arg database.FindMatchingPresetIDParams) (uuid.UUID, error) { + _, err := q.GetTemplateVersionByID(ctx, arg.TemplateVersionID) + if err != nil { + return uuid.Nil, err + } + return q.db.FindMatchingPresetID(ctx, arg) +} + func (q *querier) GetAPIKeyByID(ctx context.Context, id string) (database.APIKey, error) { return fetch(q.log, q.auth, q.db.GetAPIKeyByID)(ctx, id) } @@ -2157,17 +2217,6 @@ func (q *querier) GetLatestWorkspaceBuildByWorkspaceID(ctx context.Context, work return q.db.GetLatestWorkspaceBuildByWorkspaceID(ctx, workspaceID) } -func (q *querier) GetLatestWorkspaceBuilds(ctx context.Context) ([]database.WorkspaceBuild, error) { - // This function is a system function until we implement a join for workspace builds. - // This is because we need to query for all related workspaces to the returned builds. - // This is a very inefficient method of fetching the latest workspace builds. - // We should just join the rbac properties. - if err := q.authorizeContext(ctx, policy.ActionRead, rbac.ResourceSystem); err != nil { - return nil, err - } - return q.db.GetLatestWorkspaceBuilds(ctx) -} - func (q *querier) GetLatestWorkspaceBuildsByWorkspaceIDs(ctx context.Context, ids []uuid.UUID) ([]database.WorkspaceBuild, error) { // This function is a system function until we implement a join for workspace builds. if err := q.authorizeContext(ctx, policy.ActionRead, rbac.ResourceSystem); err != nil { @@ -2853,6 +2902,17 @@ func (q *querier) GetTemplateVersionByTemplateIDAndName(ctx context.Context, arg return tv, nil } +func (q *querier) GetTemplateVersionHasAITask(ctx context.Context, id uuid.UUID) (bool, error) { + // If we can successfully call `GetTemplateVersionByID`, then + // we know the actor has sufficient permissions to know if the + // template has an AI task. + if _, err := q.GetTemplateVersionByID(ctx, id); err != nil { + return false, err + } + + return q.db.GetTemplateVersionHasAITask(ctx, id) +} + func (q *querier) GetTemplateVersionParameters(ctx context.Context, templateVersionID uuid.UUID) ([]database.TemplateVersionParameter, error) { // An actor can read template version parameters if they can read the related template. tv, err := q.db.GetTemplateVersionByID(ctx, templateVersionID) @@ -2982,7 +3042,7 @@ func (q *querier) GetTemplatesWithFilter(ctx context.Context, arg database.GetTe } func (q *querier) GetUnexpiredLicenses(ctx context.Context) ([]database.License, error) { - if err := q.authorizeContext(ctx, policy.ActionRead, rbac.ResourceSystem); err != nil { + if err := q.authorizeContext(ctx, policy.ActionRead, rbac.ResourceLicense); err != nil { return nil, err } return q.db.GetUnexpiredLicenses(ctx) @@ -3075,6 +3135,28 @@ func (q *querier) GetUserNotificationPreferences(ctx context.Context, userID uui return q.db.GetUserNotificationPreferences(ctx, userID) } +func (q *querier) GetUserSecret(ctx context.Context, id uuid.UUID) (database.UserSecret, error) { + // First get the secret to check ownership + secret, err := q.db.GetUserSecret(ctx, id) + if err != nil { + return database.UserSecret{}, err + } + + if err := q.authorizeContext(ctx, policy.ActionRead, secret); err != nil { + return database.UserSecret{}, err + } + return secret, nil +} + +func (q *querier) GetUserSecretByUserIDAndName(ctx context.Context, arg database.GetUserSecretByUserIDAndNameParams) (database.UserSecret, error) { + obj := rbac.ResourceUserSecret.WithOwner(arg.UserID.String()) + if err := q.authorizeContext(ctx, policy.ActionRead, obj); err != nil { + return database.UserSecret{}, err + } + + return q.db.GetUserSecretByUserIDAndName(ctx, arg) +} + func (q *querier) GetUserStatusCounts(ctx context.Context, arg database.GetUserStatusCountsParams) ([]database.GetUserStatusCountsRow, error) { if err := q.authorizeContext(ctx, policy.ActionRead, rbac.ResourceUser); err != nil { return nil, err @@ -3617,11 +3699,6 @@ func (q *querier) GetWorkspacesEligibleForTransition(ctx context.Context, now ti return q.db.GetWorkspacesEligibleForTransition(ctx, now) } -func (q *querier) HasTemplateVersionsWithAITask(ctx context.Context) (bool, error) { - // Anyone can call HasTemplateVersionsWithAITask. - return q.db.HasTemplateVersionsWithAITask(ctx) -} - func (q *querier) InsertAPIKey(ctx context.Context, arg database.InsertAPIKeyParams) (database.APIKey, error) { return insert(q.log, q.auth, rbac.ResourceApiKey.WithOwner(arg.UserID.String()), @@ -3913,6 +3990,13 @@ func (q *querier) InsertTemplateVersionWorkspaceTag(ctx context.Context, arg dat return q.db.InsertTemplateVersionWorkspaceTag(ctx, arg) } +func (q *querier) InsertUsageEvent(ctx context.Context, arg database.InsertUsageEventParams) error { + if err := q.authorizeContext(ctx, policy.ActionCreate, rbac.ResourceUsageEvent); err != nil { + return err + } + return q.db.InsertUsageEvent(ctx, arg) +} + func (q *querier) InsertUser(ctx context.Context, arg database.InsertUserParams) (database.User, error) { // Always check if the assigned roles can actually be assigned by this actor. impliedRoles := append([]rbac.RoleIdentifier{rbac.RoleMember()}, q.convertToDeploymentRoles(arg.RBACRoles)...) @@ -4158,6 +4242,14 @@ func (q *querier) ListProvisionerKeysByOrganizationExcludeReserved(ctx context.C return fetchWithPostFilter(q.auth, policy.ActionRead, q.db.ListProvisionerKeysByOrganizationExcludeReserved)(ctx, organizationID) } +func (q *querier) ListUserSecrets(ctx context.Context, userID uuid.UUID) ([]database.UserSecret, error) { + obj := rbac.ResourceUserSecret.WithOwner(userID.String()) + if err := q.authorizeContext(ctx, policy.ActionRead, obj); err != nil { + return nil, err + } + return q.db.ListUserSecrets(ctx, userID) +} + func (q *querier) ListWorkspaceAgentPortShares(ctx context.Context, workspaceID uuid.UUID) ([]database.WorkspaceAgentPortShare, error) { workspace, err := q.db.GetWorkspaceByID(ctx, workspaceID) if err != nil { @@ -4260,6 +4352,14 @@ func (q *querier) RevokeDBCryptKey(ctx context.Context, activeKeyDigest string) return q.db.RevokeDBCryptKey(ctx, activeKeyDigest) } +func (q *querier) SelectUsageEventsForPublishing(ctx context.Context, arg time.Time) ([]database.UsageEvent, error) { + // ActionUpdate because we're updating the publish_started_at column. + if err := q.authorizeContext(ctx, policy.ActionUpdate, rbac.ResourceUsageEvent); err != nil { + return nil, err + } + return q.db.SelectUsageEventsForPublishing(ctx, arg) +} + func (q *querier) TryAcquireLock(ctx context.Context, id int64) (bool, error) { return q.db.TryAcquireLock(ctx, id) } @@ -4489,6 +4589,22 @@ func (q *querier) UpdateProvisionerJobByID(ctx context.Context, arg database.Upd return q.db.UpdateProvisionerJobByID(ctx, arg) } +func (q *querier) UpdateProvisionerJobLogsLength(ctx context.Context, arg database.UpdateProvisionerJobLogsLengthParams) error { + // Not sure what the rbac should be here, going with this for now + if err := q.authorizeContext(ctx, policy.ActionUpdate, rbac.ResourceProvisionerJobs); err != nil { + return err + } + return q.db.UpdateProvisionerJobLogsLength(ctx, arg) +} + +func (q *querier) UpdateProvisionerJobLogsOverflowed(ctx context.Context, arg database.UpdateProvisionerJobLogsOverflowedParams) error { + // Not sure what the rbac should be here, going with this for now + if err := q.authorizeContext(ctx, policy.ActionUpdate, rbac.ResourceProvisionerJobs); err != nil { + return err + } + return q.db.UpdateProvisionerJobLogsOverflowed(ctx, arg) +} + func (q *querier) UpdateProvisionerJobWithCancelByID(ctx context.Context, arg database.UpdateProvisionerJobWithCancelByIDParams) error { // TODO: Remove this once we have a proper rbac check for provisioner jobs. // Details in https://github.com/coder/coder/issues/16160 @@ -4629,9 +4745,9 @@ func (q *querier) UpdateTemplateScheduleByID(ctx context.Context, arg database.U return update(q.log, q.auth, fetch, q.db.UpdateTemplateScheduleByID)(ctx, arg) } -func (q *querier) UpdateTemplateVersionAITaskByJobID(ctx context.Context, arg database.UpdateTemplateVersionAITaskByJobIDParams) error { - // An actor is allowed to update the template version AI task flag if they are authorized to update the template. - tv, err := q.db.GetTemplateVersionByJobID(ctx, arg.JobID) +func (q *querier) UpdateTemplateVersionByID(ctx context.Context, arg database.UpdateTemplateVersionByIDParams) error { + // An actor is allowed to update the template version if they are authorized to update the template. + tv, err := q.db.GetTemplateVersionByID(ctx, arg.ID) if err != nil { return err } @@ -4648,12 +4764,12 @@ func (q *querier) UpdateTemplateVersionAITaskByJobID(ctx context.Context, arg da if err := q.authorizeContext(ctx, policy.ActionUpdate, obj); err != nil { return err } - return q.db.UpdateTemplateVersionAITaskByJobID(ctx, arg) + return q.db.UpdateTemplateVersionByID(ctx, arg) } -func (q *querier) UpdateTemplateVersionByID(ctx context.Context, arg database.UpdateTemplateVersionByIDParams) error { - // An actor is allowed to update the template version if they are authorized to update the template. - tv, err := q.db.GetTemplateVersionByID(ctx, arg.ID) +func (q *querier) UpdateTemplateVersionDescriptionByJobID(ctx context.Context, arg database.UpdateTemplateVersionDescriptionByJobIDParams) error { + // An actor is allowed to update the template version description if they are authorized to update the template. + tv, err := q.db.GetTemplateVersionByJobID(ctx, arg.JobID) if err != nil { return err } @@ -4670,11 +4786,11 @@ func (q *querier) UpdateTemplateVersionByID(ctx context.Context, arg database.Up if err := q.authorizeContext(ctx, policy.ActionUpdate, obj); err != nil { return err } - return q.db.UpdateTemplateVersionByID(ctx, arg) + return q.db.UpdateTemplateVersionDescriptionByJobID(ctx, arg) } -func (q *querier) UpdateTemplateVersionDescriptionByJobID(ctx context.Context, arg database.UpdateTemplateVersionDescriptionByJobIDParams) error { - // An actor is allowed to update the template version description if they are authorized to update the template. +func (q *querier) UpdateTemplateVersionExternalAuthProvidersByJobID(ctx context.Context, arg database.UpdateTemplateVersionExternalAuthProvidersByJobIDParams) error { + // An actor is allowed to update the template version external auth providers if they are authorized to update the template. tv, err := q.db.GetTemplateVersionByJobID(ctx, arg.JobID) if err != nil { return err @@ -4692,11 +4808,11 @@ func (q *querier) UpdateTemplateVersionDescriptionByJobID(ctx context.Context, a if err := q.authorizeContext(ctx, policy.ActionUpdate, obj); err != nil { return err } - return q.db.UpdateTemplateVersionDescriptionByJobID(ctx, arg) + return q.db.UpdateTemplateVersionExternalAuthProvidersByJobID(ctx, arg) } -func (q *querier) UpdateTemplateVersionExternalAuthProvidersByJobID(ctx context.Context, arg database.UpdateTemplateVersionExternalAuthProvidersByJobIDParams) error { - // An actor is allowed to update the template version external auth providers if they are authorized to update the template. +func (q *querier) UpdateTemplateVersionFlagsByJobID(ctx context.Context, arg database.UpdateTemplateVersionFlagsByJobIDParams) error { + // An actor is allowed to update the template version ai task and external agent flag if they are authorized to update the template. tv, err := q.db.GetTemplateVersionByJobID(ctx, arg.JobID) if err != nil { return err @@ -4714,7 +4830,7 @@ func (q *querier) UpdateTemplateVersionExternalAuthProvidersByJobID(ctx context. if err := q.authorizeContext(ctx, policy.ActionUpdate, obj); err != nil { return err } - return q.db.UpdateTemplateVersionExternalAuthProvidersByJobID(ctx, arg) + return q.db.UpdateTemplateVersionFlagsByJobID(ctx, arg) } func (q *querier) UpdateTemplateWorkspacesLastUsedAt(ctx context.Context, arg database.UpdateTemplateWorkspacesLastUsedAtParams) error { @@ -4725,6 +4841,13 @@ func (q *querier) UpdateTemplateWorkspacesLastUsedAt(ctx context.Context, arg da return fetchAndExec(q.log, q.auth, policy.ActionUpdate, fetch, q.db.UpdateTemplateWorkspacesLastUsedAt)(ctx, arg) } +func (q *querier) UpdateUsageEventsPostPublish(ctx context.Context, arg database.UpdateUsageEventsPostPublishParams) error { + if err := q.authorizeContext(ctx, policy.ActionUpdate, rbac.ResourceUsageEvent); err != nil { + return err + } + return q.db.UpdateUsageEventsPostPublish(ctx, arg) +} + func (q *querier) UpdateUserDeletedByID(ctx context.Context, id uuid.UUID) error { return deleteQ(q.log, q.auth, q.db.GetUserByID, q.db.UpdateUserDeletedByID)(ctx, id) } @@ -4855,6 +4978,19 @@ func (q *querier) UpdateUserRoles(ctx context.Context, arg database.UpdateUserRo return q.db.UpdateUserRoles(ctx, arg) } +func (q *querier) UpdateUserSecret(ctx context.Context, arg database.UpdateUserSecretParams) (database.UserSecret, error) { + // First get the secret to check ownership + secret, err := q.db.GetUserSecret(ctx, arg.ID) + if err != nil { + return database.UserSecret{}, err + } + + if err := q.authorizeContext(ctx, policy.ActionUpdate, secret); err != nil { + return database.UserSecret{}, err + } + return q.db.UpdateUserSecret(ctx, arg) +} + func (q *querier) UpdateUserStatus(ctx context.Context, arg database.UpdateUserStatusParams) (database.User, error) { fetch := func(ctx context.Context, arg database.UpdateUserStatusParams) (database.User, error) { return q.db.GetUserByID(ctx, arg.ID) @@ -4903,6 +5039,18 @@ func (q *querier) UpdateWorkspace(ctx context.Context, arg database.UpdateWorksp return updateWithReturn(q.log, q.auth, fetch, q.db.UpdateWorkspace)(ctx, arg) } +func (q *querier) UpdateWorkspaceACLByID(ctx context.Context, arg database.UpdateWorkspaceACLByIDParams) error { + fetch := func(ctx context.Context, arg database.UpdateWorkspaceACLByIDParams) (database.WorkspaceTable, error) { + w, err := q.db.GetWorkspaceByID(ctx, arg.ID) + if err != nil { + return database.WorkspaceTable{}, err + } + return w.WorkspaceTable(), nil + } + + return fetchAndExec(q.log, q.auth, policy.ActionCreate, fetch, q.db.UpdateWorkspaceACLByID)(ctx, arg) +} + func (q *querier) UpdateWorkspaceAgentConnectionByID(ctx context.Context, arg database.UpdateWorkspaceAgentConnectionByIDParams) error { if err := q.authorizeContext(ctx, policy.ActionUpdate, rbac.ResourceSystem); err != nil { return err @@ -5007,7 +5155,15 @@ func (q *querier) UpdateWorkspaceAutostart(ctx context.Context, arg database.Upd return update(q.log, q.auth, fetch, q.db.UpdateWorkspaceAutostart)(ctx, arg) } -func (q *querier) UpdateWorkspaceBuildAITaskByID(ctx context.Context, arg database.UpdateWorkspaceBuildAITaskByIDParams) error { +// UpdateWorkspaceBuildCostByID is used by the provisioning system to update the cost of a workspace build. +func (q *querier) UpdateWorkspaceBuildCostByID(ctx context.Context, arg database.UpdateWorkspaceBuildCostByIDParams) error { + if err := q.authorizeContext(ctx, policy.ActionUpdate, rbac.ResourceSystem); err != nil { + return err + } + return q.db.UpdateWorkspaceBuildCostByID(ctx, arg) +} + +func (q *querier) UpdateWorkspaceBuildDeadlineByID(ctx context.Context, arg database.UpdateWorkspaceBuildDeadlineByIDParams) error { build, err := q.db.GetWorkspaceBuildByID(ctx, arg.ID) if err != nil { return err @@ -5022,18 +5178,10 @@ func (q *querier) UpdateWorkspaceBuildAITaskByID(ctx context.Context, arg databa if err != nil { return err } - return q.db.UpdateWorkspaceBuildAITaskByID(ctx, arg) -} - -// UpdateWorkspaceBuildCostByID is used by the provisioning system to update the cost of a workspace build. -func (q *querier) UpdateWorkspaceBuildCostByID(ctx context.Context, arg database.UpdateWorkspaceBuildCostByIDParams) error { - if err := q.authorizeContext(ctx, policy.ActionUpdate, rbac.ResourceSystem); err != nil { - return err - } - return q.db.UpdateWorkspaceBuildCostByID(ctx, arg) + return q.db.UpdateWorkspaceBuildDeadlineByID(ctx, arg) } -func (q *querier) UpdateWorkspaceBuildDeadlineByID(ctx context.Context, arg database.UpdateWorkspaceBuildDeadlineByIDParams) error { +func (q *querier) UpdateWorkspaceBuildFlagsByID(ctx context.Context, arg database.UpdateWorkspaceBuildFlagsByIDParams) error { build, err := q.db.GetWorkspaceBuildByID(ctx, arg.ID) if err != nil { return err @@ -5048,7 +5196,7 @@ func (q *querier) UpdateWorkspaceBuildDeadlineByID(ctx context.Context, arg data if err != nil { return err } - return q.db.UpdateWorkspaceBuildDeadlineByID(ctx, arg) + return q.db.UpdateWorkspaceBuildFlagsByID(ctx, arg) } func (q *querier) UpdateWorkspaceBuildProvisionerStateByID(ctx context.Context, arg database.UpdateWorkspaceBuildProvisionerStateByIDParams) error { @@ -5353,6 +5501,26 @@ func (q *querier) UpsertWorkspaceAppAuditSession(ctx context.Context, arg databa return q.db.UpsertWorkspaceAppAuditSession(ctx, arg) } +func (q *querier) ValidateGroupIDs(ctx context.Context, groupIDs []uuid.UUID) (database.ValidateGroupIDsRow, error) { + // This check is probably overly restrictive, but the "correct" check isn't + // necessarily obvious. It's only used as a verification check for ACLs right + // now, which are performed as system. + if err := q.authorizeContext(ctx, policy.ActionRead, rbac.ResourceSystem); err != nil { + return database.ValidateGroupIDsRow{}, err + } + return q.db.ValidateGroupIDs(ctx, groupIDs) +} + +func (q *querier) ValidateUserIDs(ctx context.Context, userIDs []uuid.UUID) (database.ValidateUserIDsRow, error) { + // This check is probably overly restrictive, but the "correct" check isn't + // necessarily obvious. It's only used as a verification check for ACLs right + // now, which are performed as system. + if err := q.authorizeContext(ctx, policy.ActionRead, rbac.ResourceSystem); err != nil { + return database.ValidateUserIDsRow{}, err + } + return q.db.ValidateUserIDs(ctx, userIDs) +} + func (q *querier) GetAuthorizedTemplates(ctx context.Context, arg database.GetTemplatesWithFilterParams, _ rbac.PreparedAuthorized) ([]database.Template, error) { // TODO Delete this function, all GetTemplates should be authorized. For now just call getTemplates on the authz querier. return q.GetTemplatesWithFilter(ctx, arg) diff --git a/coderd/database/dbauthz/dbauthz_test.go b/coderd/database/dbauthz/dbauthz_test.go index bcf0caa95c365..971335c34019b 100644 --- a/coderd/database/dbauthz/dbauthz_test.go +++ b/coderd/database/dbauthz/dbauthz_test.go @@ -7,13 +7,14 @@ import ( "fmt" "net" "reflect" - "strings" "testing" "time" + "github.com/brianvoe/gofakeit/v7" "github.com/google/uuid" "github.com/sqlc-dev/pqtype" "github.com/stretchr/testify/require" + "go.uber.org/mock/gomock" "golang.org/x/xerrors" "cdr.dev/slog" @@ -22,6 +23,7 @@ import ( "github.com/coder/coder/v2/coderd/database/db2sdk" "github.com/coder/coder/v2/coderd/database/dbauthz" "github.com/coder/coder/v2/coderd/database/dbgen" + "github.com/coder/coder/v2/coderd/database/dbmock" "github.com/coder/coder/v2/coderd/database/dbtestutil" "github.com/coder/coder/v2/coderd/database/dbtime" "github.com/coder/coder/v2/coderd/notifications" @@ -204,270 +206,171 @@ func defaultIPAddress() pqtype.Inet { } func (s *MethodTestSuite) TestAPIKey() { - s.Run("DeleteAPIKeyByID", s.Subtest(func(db database.Store, check *expects) { - dbtestutil.DisableForeignKeysAndTriggers(s.T(), db) - key, _ := dbgen.APIKey(s.T(), db, database.APIKey{}) + s.Run("DeleteAPIKeyByID", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) { + key := testutil.Fake(s.T(), faker, database.APIKey{}) + dbm.EXPECT().GetAPIKeyByID(gomock.Any(), key.ID).Return(key, nil).AnyTimes() + dbm.EXPECT().DeleteAPIKeyByID(gomock.Any(), key.ID).Return(nil).AnyTimes() check.Args(key.ID).Asserts(key, policy.ActionDelete).Returns() })) - s.Run("GetAPIKeyByID", s.Subtest(func(db database.Store, check *expects) { - dbtestutil.DisableForeignKeysAndTriggers(s.T(), db) - key, _ := dbgen.APIKey(s.T(), db, database.APIKey{}) + s.Run("GetAPIKeyByID", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) { + key := testutil.Fake(s.T(), faker, database.APIKey{}) + dbm.EXPECT().GetAPIKeyByID(gomock.Any(), key.ID).Return(key, nil).AnyTimes() check.Args(key.ID).Asserts(key, policy.ActionRead).Returns(key) })) - s.Run("GetAPIKeyByName", s.Subtest(func(db database.Store, check *expects) { - dbtestutil.DisableForeignKeysAndTriggers(s.T(), db) - key, _ := dbgen.APIKey(s.T(), db, database.APIKey{ - TokenName: "marge-cat", - LoginType: database.LoginTypeToken, - }) - check.Args(database.GetAPIKeyByNameParams{ - TokenName: key.TokenName, - UserID: key.UserID, - }).Asserts(key, policy.ActionRead).Returns(key) + s.Run("GetAPIKeyByName", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) { + key := testutil.Fake(s.T(), faker, database.APIKey{LoginType: database.LoginTypeToken, TokenName: "marge-cat"}) + dbm.EXPECT().GetAPIKeyByName(gomock.Any(), database.GetAPIKeyByNameParams{TokenName: key.TokenName, UserID: key.UserID}).Return(key, nil).AnyTimes() + check.Args(database.GetAPIKeyByNameParams{TokenName: key.TokenName, UserID: key.UserID}).Asserts(key, policy.ActionRead).Returns(key) })) - s.Run("GetAPIKeysByLoginType", s.Subtest(func(db database.Store, check *expects) { - dbtestutil.DisableForeignKeysAndTriggers(s.T(), db) - a, _ := dbgen.APIKey(s.T(), db, database.APIKey{LoginType: database.LoginTypePassword}) - b, _ := dbgen.APIKey(s.T(), db, database.APIKey{LoginType: database.LoginTypePassword}) - _, _ = dbgen.APIKey(s.T(), db, database.APIKey{LoginType: database.LoginTypeGithub}) - check.Args(database.LoginTypePassword). - Asserts(a, policy.ActionRead, b, policy.ActionRead). - Returns(slice.New(a, b)) + s.Run("GetAPIKeysByLoginType", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) { + a := testutil.Fake(s.T(), faker, database.APIKey{LoginType: database.LoginTypePassword}) + b := testutil.Fake(s.T(), faker, database.APIKey{LoginType: database.LoginTypePassword}) + dbm.EXPECT().GetAPIKeysByLoginType(gomock.Any(), database.LoginTypePassword).Return([]database.APIKey{a, b}, nil).AnyTimes() + check.Args(database.LoginTypePassword).Asserts(a, policy.ActionRead, b, policy.ActionRead).Returns(slice.New(a, b)) })) - s.Run("GetAPIKeysByUserID", s.Subtest(func(db database.Store, check *expects) { - u1 := dbgen.User(s.T(), db, database.User{}) - u2 := dbgen.User(s.T(), db, database.User{}) - - keyA, _ := dbgen.APIKey(s.T(), db, database.APIKey{UserID: u1.ID, LoginType: database.LoginTypeToken, TokenName: "key-a"}) - keyB, _ := dbgen.APIKey(s.T(), db, database.APIKey{UserID: u1.ID, LoginType: database.LoginTypeToken, TokenName: "key-b"}) - _, _ = dbgen.APIKey(s.T(), db, database.APIKey{UserID: u2.ID, LoginType: database.LoginTypeToken}) + s.Run("GetAPIKeysByUserID", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) { + u1 := testutil.Fake(s.T(), faker, database.User{}) + keyA := testutil.Fake(s.T(), faker, database.APIKey{UserID: u1.ID, LoginType: database.LoginTypeToken, TokenName: "key-a"}) + keyB := testutil.Fake(s.T(), faker, database.APIKey{UserID: u1.ID, LoginType: database.LoginTypeToken, TokenName: "key-b"}) + dbm.EXPECT().GetAPIKeysByUserID(gomock.Any(), gomock.Any()).Return(slice.New(keyA, keyB), nil).AnyTimes() check.Args(database.GetAPIKeysByUserIDParams{LoginType: database.LoginTypeToken, UserID: u1.ID}). Asserts(keyA, policy.ActionRead, keyB, policy.ActionRead). Returns(slice.New(keyA, keyB)) })) - s.Run("GetAPIKeysLastUsedAfter", s.Subtest(func(db database.Store, check *expects) { - dbtestutil.DisableForeignKeysAndTriggers(s.T(), db) - a, _ := dbgen.APIKey(s.T(), db, database.APIKey{LastUsed: time.Now().Add(time.Hour)}) - b, _ := dbgen.APIKey(s.T(), db, database.APIKey{LastUsed: time.Now().Add(time.Hour)}) - _, _ = dbgen.APIKey(s.T(), db, database.APIKey{LastUsed: time.Now().Add(-time.Hour)}) - check.Args(time.Now()). - Asserts(a, policy.ActionRead, b, policy.ActionRead). - Returns(slice.New(a, b)) - })) - s.Run("InsertAPIKey", s.Subtest(func(db database.Store, check *expects) { - u := dbgen.User(s.T(), db, database.User{}) - - check.Args(database.InsertAPIKeyParams{ - UserID: u.ID, - LoginType: database.LoginTypePassword, - Scope: database.APIKeyScopeAll, - IPAddress: defaultIPAddress(), - }).Asserts(rbac.ResourceApiKey.WithOwner(u.ID.String()), policy.ActionCreate) - })) - s.Run("UpdateAPIKeyByID", s.Subtest(func(db database.Store, check *expects) { - u := dbgen.User(s.T(), db, database.User{}) - a, _ := dbgen.APIKey(s.T(), db, database.APIKey{UserID: u.ID, IPAddress: defaultIPAddress()}) - check.Args(database.UpdateAPIKeyByIDParams{ - ID: a.ID, - IPAddress: defaultIPAddress(), - LastUsed: time.Now(), - ExpiresAt: time.Now().Add(time.Hour), - }).Asserts(a, policy.ActionUpdate).Returns() - })) - s.Run("DeleteApplicationConnectAPIKeysByUserID", s.Subtest(func(db database.Store, check *expects) { - dbtestutil.DisableForeignKeysAndTriggers(s.T(), db) - a, _ := dbgen.APIKey(s.T(), db, database.APIKey{ - Scope: database.APIKeyScopeApplicationConnect, - }) + s.Run("GetAPIKeysLastUsedAfter", s.Mocked(func(dbm *dbmock.MockStore, _ *gofakeit.Faker, check *expects) { + now := time.Now() + a := database.APIKey{LastUsed: now.Add(time.Hour)} + b := database.APIKey{LastUsed: now.Add(time.Hour)} + dbm.EXPECT().GetAPIKeysLastUsedAfter(gomock.Any(), gomock.Any()).Return([]database.APIKey{a, b}, nil).AnyTimes() + check.Args(now).Asserts(a, policy.ActionRead, b, policy.ActionRead).Returns(slice.New(a, b)) + })) + s.Run("InsertAPIKey", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) { + u := testutil.Fake(s.T(), faker, database.User{}) + arg := database.InsertAPIKeyParams{UserID: u.ID, LoginType: database.LoginTypePassword, Scope: database.APIKeyScopeAll, IPAddress: defaultIPAddress()} + ret := testutil.Fake(s.T(), faker, database.APIKey{UserID: u.ID, LoginType: database.LoginTypePassword}) + dbm.EXPECT().InsertAPIKey(gomock.Any(), arg).Return(ret, nil).AnyTimes() + check.Args(arg).Asserts(rbac.ResourceApiKey.WithOwner(u.ID.String()), policy.ActionCreate) + })) + s.Run("UpdateAPIKeyByID", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) { + u := testutil.Fake(s.T(), faker, database.User{}) + a := testutil.Fake(s.T(), faker, database.APIKey{UserID: u.ID, IPAddress: defaultIPAddress()}) + arg := database.UpdateAPIKeyByIDParams{ID: a.ID, IPAddress: defaultIPAddress(), LastUsed: time.Now(), ExpiresAt: time.Now().Add(time.Hour)} + dbm.EXPECT().GetAPIKeyByID(gomock.Any(), a.ID).Return(a, nil).AnyTimes() + dbm.EXPECT().UpdateAPIKeyByID(gomock.Any(), arg).Return(nil).AnyTimes() + check.Args(arg).Asserts(a, policy.ActionUpdate).Returns() + })) + s.Run("DeleteApplicationConnectAPIKeysByUserID", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) { + a := testutil.Fake(s.T(), faker, database.APIKey{Scope: database.APIKeyScopeApplicationConnect}) + dbm.EXPECT().DeleteApplicationConnectAPIKeysByUserID(gomock.Any(), a.UserID).Return(nil).AnyTimes() check.Args(a.UserID).Asserts(rbac.ResourceApiKey.WithOwner(a.UserID.String()), policy.ActionDelete).Returns() })) - s.Run("DeleteExternalAuthLink", s.Subtest(func(db database.Store, check *expects) { - a := dbgen.ExternalAuthLink(s.T(), db, database.ExternalAuthLink{}) - check.Args(database.DeleteExternalAuthLinkParams{ - ProviderID: a.ProviderID, - UserID: a.UserID, - }).Asserts(rbac.ResourceUserObject(a.UserID), policy.ActionUpdatePersonal).Returns() + s.Run("DeleteExternalAuthLink", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) { + a := testutil.Fake(s.T(), faker, database.ExternalAuthLink{}) + dbm.EXPECT().GetExternalAuthLink(gomock.Any(), database.GetExternalAuthLinkParams{ProviderID: a.ProviderID, UserID: a.UserID}).Return(a, nil).AnyTimes() + dbm.EXPECT().DeleteExternalAuthLink(gomock.Any(), database.DeleteExternalAuthLinkParams{ProviderID: a.ProviderID, UserID: a.UserID}).Return(nil).AnyTimes() + check.Args(database.DeleteExternalAuthLinkParams{ProviderID: a.ProviderID, UserID: a.UserID}).Asserts(a, policy.ActionUpdatePersonal).Returns() })) - s.Run("GetExternalAuthLinksByUserID", s.Subtest(func(db database.Store, check *expects) { - a := dbgen.ExternalAuthLink(s.T(), db, database.ExternalAuthLink{}) - b := dbgen.ExternalAuthLink(s.T(), db, database.ExternalAuthLink{ - UserID: a.UserID, - }) - check.Args(a.UserID).Asserts( - rbac.ResourceUserObject(a.UserID), policy.ActionReadPersonal, - rbac.ResourceUserObject(b.UserID), policy.ActionReadPersonal) + s.Run("GetExternalAuthLinksByUserID", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) { + a := testutil.Fake(s.T(), faker, database.ExternalAuthLink{}) + b := testutil.Fake(s.T(), faker, database.ExternalAuthLink{UserID: a.UserID}) + dbm.EXPECT().GetExternalAuthLinksByUserID(gomock.Any(), a.UserID).Return([]database.ExternalAuthLink{a, b}, nil).AnyTimes() + check.Args(a.UserID).Asserts(a, policy.ActionReadPersonal, b, policy.ActionReadPersonal) })) } func (s *MethodTestSuite) TestAuditLogs() { - s.Run("InsertAuditLog", s.Subtest(func(db database.Store, check *expects) { - check.Args(database.InsertAuditLogParams{ - ResourceType: database.ResourceTypeOrganization, - Action: database.AuditActionCreate, - Diff: json.RawMessage("{}"), - AdditionalFields: json.RawMessage("{}"), - }).Asserts(rbac.ResourceAuditLog, policy.ActionCreate) - })) - s.Run("GetAuditLogsOffset", s.Subtest(func(db database.Store, check *expects) { - _ = dbgen.AuditLog(s.T(), db, database.AuditLog{}) - _ = dbgen.AuditLog(s.T(), db, database.AuditLog{}) - check.Args(database.GetAuditLogsOffsetParams{ - LimitOpt: 10, - }).Asserts(rbac.ResourceAuditLog, policy.ActionRead).WithNotAuthorized("nil") - })) - s.Run("GetAuthorizedAuditLogsOffset", s.Subtest(func(db database.Store, check *expects) { - dbtestutil.DisableForeignKeysAndTriggers(s.T(), db) - _ = dbgen.AuditLog(s.T(), db, database.AuditLog{}) - _ = dbgen.AuditLog(s.T(), db, database.AuditLog{}) - check.Args(database.GetAuditLogsOffsetParams{ - LimitOpt: 10, - }, emptyPreparedAuthorized{}).Asserts(rbac.ResourceAuditLog, policy.ActionRead) - })) - s.Run("CountAuditLogs", s.Subtest(func(db database.Store, check *expects) { - _ = dbgen.AuditLog(s.T(), db, database.AuditLog{}) - _ = dbgen.AuditLog(s.T(), db, database.AuditLog{}) + s.Run("InsertAuditLog", s.Mocked(func(dbm *dbmock.MockStore, _ *gofakeit.Faker, check *expects) { + arg := database.InsertAuditLogParams{ResourceType: database.ResourceTypeOrganization, Action: database.AuditActionCreate, Diff: json.RawMessage("{}"), AdditionalFields: json.RawMessage("{}")} + dbm.EXPECT().InsertAuditLog(gomock.Any(), arg).Return(database.AuditLog{}, nil).AnyTimes() + check.Args(arg).Asserts(rbac.ResourceAuditLog, policy.ActionCreate) + })) + s.Run("GetAuditLogsOffset", s.Mocked(func(dbm *dbmock.MockStore, _ *gofakeit.Faker, check *expects) { + arg := database.GetAuditLogsOffsetParams{LimitOpt: 10} + dbm.EXPECT().GetAuditLogsOffset(gomock.Any(), arg).Return([]database.GetAuditLogsOffsetRow{}, nil).AnyTimes() + dbm.EXPECT().GetAuthorizedAuditLogsOffset(gomock.Any(), arg, gomock.Any()).Return([]database.GetAuditLogsOffsetRow{}, nil).AnyTimes() + check.Args(arg).Asserts(rbac.ResourceAuditLog, policy.ActionRead).WithNotAuthorized("nil") + })) + s.Run("GetAuthorizedAuditLogsOffset", s.Mocked(func(dbm *dbmock.MockStore, _ *gofakeit.Faker, check *expects) { + arg := database.GetAuditLogsOffsetParams{LimitOpt: 10} + dbm.EXPECT().GetAuthorizedAuditLogsOffset(gomock.Any(), arg, gomock.Any()).Return([]database.GetAuditLogsOffsetRow{}, nil).AnyTimes() + dbm.EXPECT().GetAuditLogsOffset(gomock.Any(), arg).Return([]database.GetAuditLogsOffsetRow{}, nil).AnyTimes() + check.Args(arg, emptyPreparedAuthorized{}).Asserts(rbac.ResourceAuditLog, policy.ActionRead) + })) + s.Run("CountAuditLogs", s.Mocked(func(dbm *dbmock.MockStore, _ *gofakeit.Faker, check *expects) { + dbm.EXPECT().CountAuditLogs(gomock.Any(), database.CountAuditLogsParams{}).Return(int64(0), nil).AnyTimes() + dbm.EXPECT().CountAuthorizedAuditLogs(gomock.Any(), database.CountAuditLogsParams{}, gomock.Any()).Return(int64(0), nil).AnyTimes() check.Args(database.CountAuditLogsParams{}).Asserts(rbac.ResourceAuditLog, policy.ActionRead).WithNotAuthorized("nil") })) - s.Run("CountAuthorizedAuditLogs", s.Subtest(func(db database.Store, check *expects) { - _ = dbgen.AuditLog(s.T(), db, database.AuditLog{}) - _ = dbgen.AuditLog(s.T(), db, database.AuditLog{}) + s.Run("CountAuthorizedAuditLogs", s.Mocked(func(dbm *dbmock.MockStore, _ *gofakeit.Faker, check *expects) { + dbm.EXPECT().CountAuthorizedAuditLogs(gomock.Any(), database.CountAuditLogsParams{}, gomock.Any()).Return(int64(0), nil).AnyTimes() + dbm.EXPECT().CountAuditLogs(gomock.Any(), database.CountAuditLogsParams{}).Return(int64(0), nil).AnyTimes() check.Args(database.CountAuditLogsParams{}, emptyPreparedAuthorized{}).Asserts(rbac.ResourceAuditLog, policy.ActionRead) })) - s.Run("DeleteOldAuditLogConnectionEvents", s.Subtest(func(db database.Store, check *expects) { - _ = dbgen.AuditLog(s.T(), db, database.AuditLog{}) + s.Run("DeleteOldAuditLogConnectionEvents", s.Mocked(func(dbm *dbmock.MockStore, _ *gofakeit.Faker, check *expects) { + dbm.EXPECT().DeleteOldAuditLogConnectionEvents(gomock.Any(), database.DeleteOldAuditLogConnectionEventsParams{}).Return(nil).AnyTimes() check.Args(database.DeleteOldAuditLogConnectionEventsParams{}).Asserts(rbac.ResourceSystem, policy.ActionDelete) })) } func (s *MethodTestSuite) TestConnectionLogs() { - createWorkspace := func(t *testing.T, db database.Store) database.WorkspaceTable { - u := dbgen.User(s.T(), db, database.User{}) - o := dbgen.Organization(s.T(), db, database.Organization{}) - tpl := dbgen.Template(s.T(), db, database.Template{ - OrganizationID: o.ID, - CreatedBy: u.ID, - }) - return dbgen.Workspace(s.T(), db, database.WorkspaceTable{ - ID: uuid.New(), - OwnerID: u.ID, - OrganizationID: o.ID, - AutomaticUpdates: database.AutomaticUpdatesNever, - TemplateID: tpl.ID, - }) - } - s.Run("UpsertConnectionLog", s.Subtest(func(db database.Store, check *expects) { - ws := createWorkspace(s.T(), db) - check.Args(database.UpsertConnectionLogParams{ - Ip: defaultIPAddress(), - Type: database.ConnectionTypeSsh, - WorkspaceID: ws.ID, - OrganizationID: ws.OrganizationID, - ConnectionStatus: database.ConnectionStatusConnected, - WorkspaceOwnerID: ws.OwnerID, - }).Asserts(rbac.ResourceConnectionLog, policy.ActionUpdate) - })) - s.Run("GetConnectionLogsOffset", s.Subtest(func(db database.Store, check *expects) { - ws := createWorkspace(s.T(), db) - _ = dbgen.ConnectionLog(s.T(), db, database.UpsertConnectionLogParams{ - Ip: defaultIPAddress(), - Type: database.ConnectionTypeSsh, - WorkspaceID: ws.ID, - OrganizationID: ws.OrganizationID, - WorkspaceOwnerID: ws.OwnerID, - }) - _ = dbgen.ConnectionLog(s.T(), db, database.UpsertConnectionLogParams{ - Ip: defaultIPAddress(), - Type: database.ConnectionTypeSsh, - WorkspaceID: ws.ID, - OrganizationID: ws.OrganizationID, - WorkspaceOwnerID: ws.OwnerID, - }) - check.Args(database.GetConnectionLogsOffsetParams{ - LimitOpt: 10, - }).Asserts(rbac.ResourceConnectionLog, policy.ActionRead).WithNotAuthorized("nil") - })) - s.Run("GetAuthorizedConnectionLogsOffset", s.Subtest(func(db database.Store, check *expects) { - ws := createWorkspace(s.T(), db) - _ = dbgen.ConnectionLog(s.T(), db, database.UpsertConnectionLogParams{ - Ip: defaultIPAddress(), - Type: database.ConnectionTypeSsh, - WorkspaceID: ws.ID, - OrganizationID: ws.OrganizationID, - WorkspaceOwnerID: ws.OwnerID, - }) - _ = dbgen.ConnectionLog(s.T(), db, database.UpsertConnectionLogParams{ - Ip: defaultIPAddress(), - Type: database.ConnectionTypeSsh, - WorkspaceID: ws.ID, - OrganizationID: ws.OrganizationID, - WorkspaceOwnerID: ws.OwnerID, - }) - check.Args(database.GetConnectionLogsOffsetParams{ - LimitOpt: 10, - }, emptyPreparedAuthorized{}).Asserts(rbac.ResourceConnectionLog, policy.ActionRead) - })) - s.Run("CountConnectionLogs", s.Subtest(func(db database.Store, check *expects) { - ws := createWorkspace(s.T(), db) - _ = dbgen.ConnectionLog(s.T(), db, database.UpsertConnectionLogParams{ - Type: database.ConnectionTypeSsh, - WorkspaceID: ws.ID, - OrganizationID: ws.OrganizationID, - WorkspaceOwnerID: ws.OwnerID, - }) - _ = dbgen.ConnectionLog(s.T(), db, database.UpsertConnectionLogParams{ - Type: database.ConnectionTypeSsh, - WorkspaceID: ws.ID, - OrganizationID: ws.OrganizationID, - WorkspaceOwnerID: ws.OwnerID, - }) - check.Args(database.CountConnectionLogsParams{}).Asserts( - rbac.ResourceConnectionLog, policy.ActionRead, - ).WithNotAuthorized("nil") - })) - s.Run("CountAuthorizedConnectionLogs", s.Subtest(func(db database.Store, check *expects) { - ws := createWorkspace(s.T(), db) - _ = dbgen.ConnectionLog(s.T(), db, database.UpsertConnectionLogParams{ - Type: database.ConnectionTypeSsh, - WorkspaceID: ws.ID, - OrganizationID: ws.OrganizationID, - WorkspaceOwnerID: ws.OwnerID, - }) - _ = dbgen.ConnectionLog(s.T(), db, database.UpsertConnectionLogParams{ - Type: database.ConnectionTypeSsh, - WorkspaceID: ws.ID, - OrganizationID: ws.OrganizationID, - WorkspaceOwnerID: ws.OwnerID, - }) - check.Args(database.CountConnectionLogsParams{}, emptyPreparedAuthorized{}).Asserts( - rbac.ResourceConnectionLog, policy.ActionRead, - ) + s.Run("UpsertConnectionLog", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) { + ws := testutil.Fake(s.T(), faker, database.WorkspaceTable{}) + arg := database.UpsertConnectionLogParams{Ip: defaultIPAddress(), Type: database.ConnectionTypeSsh, WorkspaceID: ws.ID, OrganizationID: ws.OrganizationID, ConnectionStatus: database.ConnectionStatusConnected, WorkspaceOwnerID: ws.OwnerID} + dbm.EXPECT().UpsertConnectionLog(gomock.Any(), arg).Return(database.ConnectionLog{}, nil).AnyTimes() + check.Args(arg).Asserts(rbac.ResourceConnectionLog, policy.ActionUpdate) + })) + s.Run("GetConnectionLogsOffset", s.Mocked(func(dbm *dbmock.MockStore, _ *gofakeit.Faker, check *expects) { + arg := database.GetConnectionLogsOffsetParams{LimitOpt: 10} + dbm.EXPECT().GetConnectionLogsOffset(gomock.Any(), arg).Return([]database.GetConnectionLogsOffsetRow{}, nil).AnyTimes() + dbm.EXPECT().GetAuthorizedConnectionLogsOffset(gomock.Any(), arg, gomock.Any()).Return([]database.GetConnectionLogsOffsetRow{}, nil).AnyTimes() + check.Args(arg).Asserts(rbac.ResourceConnectionLog, policy.ActionRead).WithNotAuthorized("nil") + })) + s.Run("GetAuthorizedConnectionLogsOffset", s.Mocked(func(dbm *dbmock.MockStore, _ *gofakeit.Faker, check *expects) { + arg := database.GetConnectionLogsOffsetParams{LimitOpt: 10} + dbm.EXPECT().GetAuthorizedConnectionLogsOffset(gomock.Any(), arg, gomock.Any()).Return([]database.GetConnectionLogsOffsetRow{}, nil).AnyTimes() + dbm.EXPECT().GetConnectionLogsOffset(gomock.Any(), arg).Return([]database.GetConnectionLogsOffsetRow{}, nil).AnyTimes() + check.Args(arg, emptyPreparedAuthorized{}).Asserts(rbac.ResourceConnectionLog, policy.ActionRead) + })) + s.Run("CountConnectionLogs", s.Mocked(func(dbm *dbmock.MockStore, _ *gofakeit.Faker, check *expects) { + dbm.EXPECT().CountConnectionLogs(gomock.Any(), database.CountConnectionLogsParams{}).Return(int64(0), nil).AnyTimes() + dbm.EXPECT().CountAuthorizedConnectionLogs(gomock.Any(), database.CountConnectionLogsParams{}, gomock.Any()).Return(int64(0), nil).AnyTimes() + check.Args(database.CountConnectionLogsParams{}).Asserts(rbac.ResourceConnectionLog, policy.ActionRead).WithNotAuthorized("nil") + })) + s.Run("CountAuthorizedConnectionLogs", s.Mocked(func(dbm *dbmock.MockStore, _ *gofakeit.Faker, check *expects) { + dbm.EXPECT().CountAuthorizedConnectionLogs(gomock.Any(), database.CountConnectionLogsParams{}, gomock.Any()).Return(int64(0), nil).AnyTimes() + dbm.EXPECT().CountConnectionLogs(gomock.Any(), database.CountConnectionLogsParams{}).Return(int64(0), nil).AnyTimes() + check.Args(database.CountConnectionLogsParams{}, emptyPreparedAuthorized{}).Asserts(rbac.ResourceConnectionLog, policy.ActionRead) })) } func (s *MethodTestSuite) TestFile() { - s.Run("GetFileByHashAndCreator", s.Subtest(func(db database.Store, check *expects) { - f := dbgen.File(s.T(), db, database.File{}) + s.Run("GetFileByHashAndCreator", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) { + f := testutil.Fake(s.T(), faker, database.File{}) + dbm.EXPECT().GetFileByHashAndCreator(gomock.Any(), gomock.Any()).Return(f, nil).AnyTimes() + // dbauthz may attempt to check template access on NotAuthorized; ensure mock handles it. + dbm.EXPECT().GetFileTemplates(gomock.Any(), f.ID).Return([]database.GetFileTemplatesRow{}, nil).AnyTimes() check.Args(database.GetFileByHashAndCreatorParams{ Hash: f.Hash, CreatedBy: f.CreatedBy, }).Asserts(f, policy.ActionRead).Returns(f) })) - s.Run("GetFileByID", s.Subtest(func(db database.Store, check *expects) { - f := dbgen.File(s.T(), db, database.File{}) + s.Run("GetFileByID", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) { + f := testutil.Fake(s.T(), faker, database.File{}) + dbm.EXPECT().GetFileByID(gomock.Any(), f.ID).Return(f, nil).AnyTimes() + dbm.EXPECT().GetFileTemplates(gomock.Any(), f.ID).Return([]database.GetFileTemplatesRow{}, nil).AnyTimes() check.Args(f.ID).Asserts(f, policy.ActionRead).Returns(f) })) - s.Run("GetFileIDByTemplateVersionID", s.Subtest(func(db database.Store, check *expects) { - o := dbgen.Organization(s.T(), db, database.Organization{}) - u := dbgen.User(s.T(), db, database.User{}) - _ = dbgen.OrganizationMember(s.T(), db, database.OrganizationMember{OrganizationID: o.ID, UserID: u.ID}) - f := dbgen.File(s.T(), db, database.File{CreatedBy: u.ID}) - j := dbgen.ProvisionerJob(s.T(), db, nil, database.ProvisionerJob{StorageMethod: database.ProvisionerStorageMethodFile, FileID: f.ID}) - tv := dbgen.TemplateVersion(s.T(), db, database.TemplateVersion{OrganizationID: o.ID, JobID: j.ID, CreatedBy: u.ID}) - check.Args(tv.ID).Asserts(rbac.ResourceFile.WithID(f.ID), policy.ActionRead).Returns(f.ID) + s.Run("GetFileIDByTemplateVersionID", s.Mocked(func(dbm *dbmock.MockStore, _ *gofakeit.Faker, check *expects) { + tvID := uuid.New() + fileID := uuid.New() + dbm.EXPECT().GetFileIDByTemplateVersionID(gomock.Any(), tvID).Return(fileID, nil).AnyTimes() + check.Args(tvID).Asserts(rbac.ResourceFile.WithID(fileID), policy.ActionRead).Returns(fileID) })) - s.Run("InsertFile", s.Subtest(func(db database.Store, check *expects) { - u := dbgen.User(s.T(), db, database.User{}) + s.Run("InsertFile", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) { + u := testutil.Fake(s.T(), faker, database.User{}) + ret := testutil.Fake(s.T(), faker, database.File{CreatedBy: u.ID}) + dbm.EXPECT().InsertFile(gomock.Any(), gomock.Any()).Return(ret, nil).AnyTimes() check.Args(database.InsertFileParams{ CreatedBy: u.ID, }).Asserts(rbac.ResourceFile.WithOwner(u.ID.String()), policy.ActionCreate) @@ -475,153 +378,150 @@ func (s *MethodTestSuite) TestFile() { } func (s *MethodTestSuite) TestGroup() { - s.Run("DeleteGroupByID", s.Subtest(func(db database.Store, check *expects) { - dbtestutil.DisableForeignKeysAndTriggers(s.T(), db) - g := dbgen.Group(s.T(), db, database.Group{}) + s.Run("DeleteGroupByID", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) { + g := testutil.Fake(s.T(), faker, database.Group{}) + dbm.EXPECT().GetGroupByID(gomock.Any(), g.ID).Return(g, nil).AnyTimes() + dbm.EXPECT().DeleteGroupByID(gomock.Any(), g.ID).Return(nil).AnyTimes() check.Args(g.ID).Asserts(g, policy.ActionDelete).Returns() })) - s.Run("DeleteGroupMemberFromGroup", s.Subtest(func(db database.Store, check *expects) { - dbtestutil.DisableForeignKeysAndTriggers(s.T(), db) - g := dbgen.Group(s.T(), db, database.Group{}) - u := dbgen.User(s.T(), db, database.User{}) - m := dbgen.GroupMember(s.T(), db, database.GroupMemberTable{ - GroupID: g.ID, - UserID: u.ID, - }) - check.Args(database.DeleteGroupMemberFromGroupParams{ - UserID: m.UserID, - GroupID: g.ID, - }).Asserts(g, policy.ActionUpdate).Returns() + + s.Run("DeleteGroupMemberFromGroup", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) { + g := testutil.Fake(s.T(), faker, database.Group{}) + u := testutil.Fake(s.T(), faker, database.User{}) + m := testutil.Fake(s.T(), faker, database.GroupMember{GroupID: g.ID, UserID: u.ID}) + dbm.EXPECT().GetGroupByID(gomock.Any(), g.ID).Return(g, nil).AnyTimes() + dbm.EXPECT().DeleteGroupMemberFromGroup(gomock.Any(), database.DeleteGroupMemberFromGroupParams{UserID: m.UserID, GroupID: g.ID}).Return(nil).AnyTimes() + check.Args(database.DeleteGroupMemberFromGroupParams{UserID: m.UserID, GroupID: g.ID}).Asserts(g, policy.ActionUpdate).Returns() })) - s.Run("GetGroupByID", s.Subtest(func(db database.Store, check *expects) { - dbtestutil.DisableForeignKeysAndTriggers(s.T(), db) - g := dbgen.Group(s.T(), db, database.Group{}) + + s.Run("GetGroupByID", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) { + g := testutil.Fake(s.T(), faker, database.Group{}) + dbm.EXPECT().GetGroupByID(gomock.Any(), g.ID).Return(g, nil).AnyTimes() check.Args(g.ID).Asserts(g, policy.ActionRead).Returns(g) })) - s.Run("GetGroupByOrgAndName", s.Subtest(func(db database.Store, check *expects) { - dbtestutil.DisableForeignKeysAndTriggers(s.T(), db) - g := dbgen.Group(s.T(), db, database.Group{}) - check.Args(database.GetGroupByOrgAndNameParams{ - OrganizationID: g.OrganizationID, - Name: g.Name, - }).Asserts(g, policy.ActionRead).Returns(g) + + s.Run("GetGroupByOrgAndName", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) { + g := testutil.Fake(s.T(), faker, database.Group{}) + dbm.EXPECT().GetGroupByOrgAndName(gomock.Any(), database.GetGroupByOrgAndNameParams{OrganizationID: g.OrganizationID, Name: g.Name}).Return(g, nil).AnyTimes() + check.Args(database.GetGroupByOrgAndNameParams{OrganizationID: g.OrganizationID, Name: g.Name}).Asserts(g, policy.ActionRead).Returns(g) })) - s.Run("GetGroupMembersByGroupID", s.Subtest(func(db database.Store, check *expects) { - dbtestutil.DisableForeignKeysAndTriggers(s.T(), db) - g := dbgen.Group(s.T(), db, database.Group{}) - u := dbgen.User(s.T(), db, database.User{}) - gm := dbgen.GroupMember(s.T(), db, database.GroupMemberTable{GroupID: g.ID, UserID: u.ID}) - check.Args(database.GetGroupMembersByGroupIDParams{ - GroupID: g.ID, - IncludeSystem: false, - }).Asserts(gm, policy.ActionRead) + + s.Run("GetGroupMembersByGroupID", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) { + g := testutil.Fake(s.T(), faker, database.Group{}) + u := testutil.Fake(s.T(), faker, database.User{}) + gm := testutil.Fake(s.T(), faker, database.GroupMember{GroupID: g.ID, UserID: u.ID}) + arg := database.GetGroupMembersByGroupIDParams{GroupID: g.ID, IncludeSystem: false} + dbm.EXPECT().GetGroupMembersByGroupID(gomock.Any(), arg).Return([]database.GroupMember{gm}, nil).AnyTimes() + check.Args(arg).Asserts(gm, policy.ActionRead) })) - s.Run("GetGroupMembersCountByGroupID", s.Subtest(func(db database.Store, check *expects) { - dbtestutil.DisableForeignKeysAndTriggers(s.T(), db) - g := dbgen.Group(s.T(), db, database.Group{}) - check.Args(database.GetGroupMembersCountByGroupIDParams{ - GroupID: g.ID, - IncludeSystem: false, - }).Asserts(g, policy.ActionRead) + + s.Run("GetGroupMembersCountByGroupID", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) { + g := testutil.Fake(s.T(), faker, database.Group{}) + arg := database.GetGroupMembersCountByGroupIDParams{GroupID: g.ID, IncludeSystem: false} + dbm.EXPECT().GetGroupByID(gomock.Any(), g.ID).Return(g, nil).AnyTimes() + dbm.EXPECT().GetGroupMembersCountByGroupID(gomock.Any(), arg).Return(int64(0), nil).AnyTimes() + check.Args(arg).Asserts(g, policy.ActionRead) })) - s.Run("GetGroupMembers", s.Subtest(func(db database.Store, check *expects) { - dbtestutil.DisableForeignKeysAndTriggers(s.T(), db) - g := dbgen.Group(s.T(), db, database.Group{}) - u := dbgen.User(s.T(), db, database.User{}) - dbgen.GroupMember(s.T(), db, database.GroupMemberTable{GroupID: g.ID, UserID: u.ID}) + + s.Run("GetGroupMembers", s.Mocked(func(dbm *dbmock.MockStore, _ *gofakeit.Faker, check *expects) { + dbm.EXPECT().GetGroupMembers(gomock.Any(), false).Return([]database.GroupMember{}, nil).AnyTimes() check.Args(false).Asserts(rbac.ResourceSystem, policy.ActionRead) })) - s.Run("System/GetGroups", s.Subtest(func(db database.Store, check *expects) { - dbtestutil.DisableForeignKeysAndTriggers(s.T(), db) - _ = dbgen.Group(s.T(), db, database.Group{}) - check.Args(database.GetGroupsParams{}). - Asserts(rbac.ResourceSystem, policy.ActionRead) + + s.Run("System/GetGroups", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) { + o := testutil.Fake(s.T(), faker, database.Organization{}) + g := testutil.Fake(s.T(), faker, database.Group{OrganizationID: o.ID}) + row := database.GetGroupsRow{Group: g, OrganizationName: o.Name, OrganizationDisplayName: o.DisplayName} + dbm.EXPECT().GetGroups(gomock.Any(), database.GetGroupsParams{}).Return([]database.GetGroupsRow{row}, nil).AnyTimes() + check.Args(database.GetGroupsParams{}).Asserts(rbac.ResourceSystem, policy.ActionRead) })) - s.Run("GetGroups", s.Subtest(func(db database.Store, check *expects) { - o := dbgen.Organization(s.T(), db, database.Organization{}) - g := dbgen.Group(s.T(), db, database.Group{OrganizationID: o.ID}) - u := dbgen.User(s.T(), db, database.User{}) - gm := dbgen.GroupMember(s.T(), db, database.GroupMemberTable{GroupID: g.ID, UserID: u.ID}) - check.Args(database.GetGroupsParams{ - OrganizationID: g.OrganizationID, - HasMemberID: gm.UserID, - }).Asserts(rbac.ResourceSystem, policy.ActionRead, g, policy.ActionRead). - // Fail the system resource skip - FailSystemObjectChecks() + + s.Run("GetGroups", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) { + o := testutil.Fake(s.T(), faker, database.Organization{}) + g := testutil.Fake(s.T(), faker, database.Group{OrganizationID: o.ID}) + u := testutil.Fake(s.T(), faker, database.User{}) + gm := testutil.Fake(s.T(), faker, database.GroupMember{GroupID: g.ID, UserID: u.ID}) + params := database.GetGroupsParams{OrganizationID: g.OrganizationID, HasMemberID: gm.UserID} + row := database.GetGroupsRow{Group: g, OrganizationName: o.Name, OrganizationDisplayName: o.DisplayName} + dbm.EXPECT().GetGroups(gomock.Any(), params).Return([]database.GetGroupsRow{row}, nil).AnyTimes() + check.Args(params).Asserts(rbac.ResourceSystem, policy.ActionRead, g, policy.ActionRead).FailSystemObjectChecks() })) - s.Run("InsertAllUsersGroup", s.Subtest(func(db database.Store, check *expects) { - o := dbgen.Organization(s.T(), db, database.Organization{}) + + s.Run("InsertAllUsersGroup", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) { + o := testutil.Fake(s.T(), faker, database.Organization{}) + ret := testutil.Fake(s.T(), faker, database.Group{OrganizationID: o.ID}) + dbm.EXPECT().InsertAllUsersGroup(gomock.Any(), o.ID).Return(ret, nil).AnyTimes() check.Args(o.ID).Asserts(rbac.ResourceGroup.InOrg(o.ID), policy.ActionCreate) })) - s.Run("InsertGroup", s.Subtest(func(db database.Store, check *expects) { - o := dbgen.Organization(s.T(), db, database.Organization{}) - check.Args(database.InsertGroupParams{ - OrganizationID: o.ID, - Name: "test", - }).Asserts(rbac.ResourceGroup.InOrg(o.ID), policy.ActionCreate) + + s.Run("InsertGroup", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) { + o := testutil.Fake(s.T(), faker, database.Organization{}) + arg := database.InsertGroupParams{OrganizationID: o.ID, Name: "test"} + ret := testutil.Fake(s.T(), faker, database.Group{OrganizationID: o.ID, Name: arg.Name}) + dbm.EXPECT().InsertGroup(gomock.Any(), arg).Return(ret, nil).AnyTimes() + check.Args(arg).Asserts(rbac.ResourceGroup.InOrg(o.ID), policy.ActionCreate) })) - s.Run("InsertGroupMember", s.Subtest(func(db database.Store, check *expects) { - dbtestutil.DisableForeignKeysAndTriggers(s.T(), db) - g := dbgen.Group(s.T(), db, database.Group{}) - check.Args(database.InsertGroupMemberParams{ - UserID: uuid.New(), - GroupID: g.ID, - }).Asserts(g, policy.ActionUpdate).Returns() + + s.Run("InsertGroupMember", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) { + g := testutil.Fake(s.T(), faker, database.Group{}) + arg := database.InsertGroupMemberParams{UserID: uuid.New(), GroupID: g.ID} + dbm.EXPECT().GetGroupByID(gomock.Any(), g.ID).Return(g, nil).AnyTimes() + dbm.EXPECT().InsertGroupMember(gomock.Any(), arg).Return(nil).AnyTimes() + check.Args(arg).Asserts(g, policy.ActionUpdate).Returns() })) - s.Run("InsertUserGroupsByName", s.Subtest(func(db database.Store, check *expects) { - o := dbgen.Organization(s.T(), db, database.Organization{}) - u1 := dbgen.User(s.T(), db, database.User{}) - g1 := dbgen.Group(s.T(), db, database.Group{OrganizationID: o.ID}) - g2 := dbgen.Group(s.T(), db, database.Group{OrganizationID: o.ID}) - check.Args(database.InsertUserGroupsByNameParams{ - OrganizationID: o.ID, - UserID: u1.ID, - GroupNames: slice.New(g1.Name, g2.Name), - }).Asserts(rbac.ResourceGroup.InOrg(o.ID), policy.ActionUpdate).Returns() + + s.Run("InsertUserGroupsByName", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) { + o := testutil.Fake(s.T(), faker, database.Organization{}) + u1 := testutil.Fake(s.T(), faker, database.User{}) + g1 := testutil.Fake(s.T(), faker, database.Group{OrganizationID: o.ID}) + g2 := testutil.Fake(s.T(), faker, database.Group{OrganizationID: o.ID}) + arg := database.InsertUserGroupsByNameParams{OrganizationID: o.ID, UserID: u1.ID, GroupNames: slice.New(g1.Name, g2.Name)} + dbm.EXPECT().InsertUserGroupsByName(gomock.Any(), arg).Return(nil).AnyTimes() + check.Args(arg).Asserts(rbac.ResourceGroup.InOrg(o.ID), policy.ActionUpdate).Returns() })) - s.Run("InsertUserGroupsByID", s.Subtest(func(db database.Store, check *expects) { - o := dbgen.Organization(s.T(), db, database.Organization{}) - u1 := dbgen.User(s.T(), db, database.User{}) - g1 := dbgen.Group(s.T(), db, database.Group{OrganizationID: o.ID}) - g2 := dbgen.Group(s.T(), db, database.Group{OrganizationID: o.ID}) - g3 := dbgen.Group(s.T(), db, database.Group{OrganizationID: o.ID}) - _ = dbgen.GroupMember(s.T(), db, database.GroupMemberTable{GroupID: g1.ID, UserID: u1.ID}) + + s.Run("InsertUserGroupsByID", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) { + o := testutil.Fake(s.T(), faker, database.Organization{}) + u1 := testutil.Fake(s.T(), faker, database.User{}) + g1 := testutil.Fake(s.T(), faker, database.Group{OrganizationID: o.ID}) + g2 := testutil.Fake(s.T(), faker, database.Group{OrganizationID: o.ID}) + g3 := testutil.Fake(s.T(), faker, database.Group{OrganizationID: o.ID}) returns := slice.New(g2.ID, g3.ID) - if !dbtestutil.WillUsePostgres() { - returns = slice.New(g1.ID, g2.ID, g3.ID) - } - check.Args(database.InsertUserGroupsByIDParams{ - UserID: u1.ID, - GroupIds: slice.New(g1.ID, g2.ID, g3.ID), - }).Asserts(rbac.ResourceSystem, policy.ActionUpdate).Returns(returns) + arg := database.InsertUserGroupsByIDParams{UserID: u1.ID, GroupIds: slice.New(g1.ID, g2.ID, g3.ID)} + dbm.EXPECT().InsertUserGroupsByID(gomock.Any(), arg).Return(returns, nil).AnyTimes() + check.Args(arg).Asserts(rbac.ResourceSystem, policy.ActionUpdate).Returns(returns) })) - s.Run("RemoveUserFromAllGroups", s.Subtest(func(db database.Store, check *expects) { - o := dbgen.Organization(s.T(), db, database.Organization{}) - u1 := dbgen.User(s.T(), db, database.User{}) - g1 := dbgen.Group(s.T(), db, database.Group{OrganizationID: o.ID}) - g2 := dbgen.Group(s.T(), db, database.Group{OrganizationID: o.ID}) - _ = dbgen.GroupMember(s.T(), db, database.GroupMemberTable{GroupID: g1.ID, UserID: u1.ID}) - _ = dbgen.GroupMember(s.T(), db, database.GroupMemberTable{GroupID: g2.ID, UserID: u1.ID}) + + s.Run("RemoveUserFromAllGroups", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) { + u1 := testutil.Fake(s.T(), faker, database.User{}) + dbm.EXPECT().RemoveUserFromAllGroups(gomock.Any(), u1.ID).Return(nil).AnyTimes() check.Args(u1.ID).Asserts(rbac.ResourceSystem, policy.ActionUpdate).Returns() })) - s.Run("RemoveUserFromGroups", s.Subtest(func(db database.Store, check *expects) { - o := dbgen.Organization(s.T(), db, database.Organization{}) - u1 := dbgen.User(s.T(), db, database.User{}) - g1 := dbgen.Group(s.T(), db, database.Group{OrganizationID: o.ID}) - g2 := dbgen.Group(s.T(), db, database.Group{OrganizationID: o.ID}) - _ = dbgen.GroupMember(s.T(), db, database.GroupMemberTable{GroupID: g1.ID, UserID: u1.ID}) - _ = dbgen.GroupMember(s.T(), db, database.GroupMemberTable{GroupID: g2.ID, UserID: u1.ID}) - check.Args(database.RemoveUserFromGroupsParams{ - UserID: u1.ID, - GroupIds: []uuid.UUID{g1.ID, g2.ID}, - }).Asserts(rbac.ResourceSystem, policy.ActionUpdate).Returns(slice.New(g1.ID, g2.ID)) - })) - s.Run("UpdateGroupByID", s.Subtest(func(db database.Store, check *expects) { - dbtestutil.DisableForeignKeysAndTriggers(s.T(), db) - g := dbgen.Group(s.T(), db, database.Group{}) - check.Args(database.UpdateGroupByIDParams{ - ID: g.ID, - }).Asserts(g, policy.ActionUpdate) + + s.Run("RemoveUserFromGroups", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) { + o := testutil.Fake(s.T(), faker, database.Organization{}) + u1 := testutil.Fake(s.T(), faker, database.User{}) + g1 := testutil.Fake(s.T(), faker, database.Group{OrganizationID: o.ID}) + g2 := testutil.Fake(s.T(), faker, database.Group{OrganizationID: o.ID}) + arg := database.RemoveUserFromGroupsParams{UserID: u1.ID, GroupIds: []uuid.UUID{g1.ID, g2.ID}} + dbm.EXPECT().RemoveUserFromGroups(gomock.Any(), arg).Return(slice.New(g1.ID, g2.ID), nil).AnyTimes() + check.Args(arg).Asserts(rbac.ResourceSystem, policy.ActionUpdate).Returns(slice.New(g1.ID, g2.ID)) + })) + + s.Run("UpdateGroupByID", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) { + g := testutil.Fake(s.T(), faker, database.Group{}) + arg := database.UpdateGroupByIDParams{ID: g.ID} + dbm.EXPECT().GetGroupByID(gomock.Any(), g.ID).Return(g, nil).AnyTimes() + dbm.EXPECT().UpdateGroupByID(gomock.Any(), arg).Return(g, nil).AnyTimes() + check.Args(arg).Asserts(g, policy.ActionUpdate) + })) + + s.Run("ValidateGroupIDs", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) { + o := testutil.Fake(s.T(), faker, database.Organization{}) + g := testutil.Fake(s.T(), faker, database.Group{OrganizationID: o.ID}) + ids := []uuid.UUID{g.ID} + dbm.EXPECT().ValidateGroupIDs(gomock.Any(), ids).Return(database.ValidateGroupIDsRow{}, nil).AnyTimes() + check.Args(ids).Asserts(rbac.ResourceSystem, policy.ActionRead) })) } @@ -849,88 +749,91 @@ func (s *MethodTestSuite) TestProvisionerJob() { } func (s *MethodTestSuite) TestLicense() { - s.Run("GetLicenses", s.Subtest(func(db database.Store, check *expects) { - l, err := db.InsertLicense(context.Background(), database.InsertLicenseParams{ + s.Run("GetLicenses", s.Mocked(func(dbm *dbmock.MockStore, _ *gofakeit.Faker, check *expects) { + a := database.License{ID: 1} + b := database.License{ID: 2} + dbm.EXPECT().GetLicenses(gomock.Any()).Return([]database.License{a, b}, nil).AnyTimes() + check.Args().Asserts(a, policy.ActionRead, b, policy.ActionRead).Returns([]database.License{a, b}) + })) + s.Run("GetUnexpiredLicenses", s.Mocked(func(db *dbmock.MockStore, faker *gofakeit.Faker, check *expects) { + l := database.License{ + ID: 1, + Exp: time.Now().Add(time.Hour * 24 * 30), UUID: uuid.New(), - }) - require.NoError(s.T(), err) - check.Args().Asserts(l, policy.ActionRead). + } + db.EXPECT().GetUnexpiredLicenses(gomock.Any()). + Return([]database.License{l}, nil). + AnyTimes() + check.Args().Asserts(rbac.ResourceLicense, policy.ActionRead). Returns([]database.License{l}) })) - s.Run("InsertLicense", s.Subtest(func(db database.Store, check *expects) { - check.Args(database.InsertLicenseParams{}). - Asserts(rbac.ResourceLicense, policy.ActionCreate) + s.Run("InsertLicense", s.Mocked(func(dbm *dbmock.MockStore, _ *gofakeit.Faker, check *expects) { + dbm.EXPECT().InsertLicense(gomock.Any(), database.InsertLicenseParams{}).Return(database.License{}, nil).AnyTimes() + check.Args(database.InsertLicenseParams{}).Asserts(rbac.ResourceLicense, policy.ActionCreate) })) - s.Run("UpsertLogoURL", s.Subtest(func(db database.Store, check *expects) { + s.Run("UpsertLogoURL", s.Mocked(func(dbm *dbmock.MockStore, _ *gofakeit.Faker, check *expects) { + dbm.EXPECT().UpsertLogoURL(gomock.Any(), "value").Return(nil).AnyTimes() check.Args("value").Asserts(rbac.ResourceDeploymentConfig, policy.ActionUpdate) })) - s.Run("UpsertAnnouncementBanners", s.Subtest(func(db database.Store, check *expects) { + s.Run("UpsertAnnouncementBanners", s.Mocked(func(dbm *dbmock.MockStore, _ *gofakeit.Faker, check *expects) { + dbm.EXPECT().UpsertAnnouncementBanners(gomock.Any(), "value").Return(nil).AnyTimes() check.Args("value").Asserts(rbac.ResourceDeploymentConfig, policy.ActionUpdate) })) - s.Run("GetLicenseByID", s.Subtest(func(db database.Store, check *expects) { - l, err := db.InsertLicense(context.Background(), database.InsertLicenseParams{ - UUID: uuid.New(), - }) - require.NoError(s.T(), err) - check.Args(l.ID).Asserts(l, policy.ActionRead).Returns(l) + s.Run("GetLicenseByID", s.Mocked(func(dbm *dbmock.MockStore, _ *gofakeit.Faker, check *expects) { + l := database.License{ID: 1} + dbm.EXPECT().GetLicenseByID(gomock.Any(), int32(1)).Return(l, nil).AnyTimes() + check.Args(int32(1)).Asserts(l, policy.ActionRead).Returns(l) })) - s.Run("DeleteLicense", s.Subtest(func(db database.Store, check *expects) { - l, err := db.InsertLicense(context.Background(), database.InsertLicenseParams{ - UUID: uuid.New(), - }) - require.NoError(s.T(), err) + s.Run("DeleteLicense", s.Mocked(func(dbm *dbmock.MockStore, _ *gofakeit.Faker, check *expects) { + l := database.License{ID: 1} + dbm.EXPECT().GetLicenseByID(gomock.Any(), l.ID).Return(l, nil).AnyTimes() + dbm.EXPECT().DeleteLicense(gomock.Any(), l.ID).Return(int32(1), nil).AnyTimes() check.Args(l.ID).Asserts(l, policy.ActionDelete) })) - s.Run("GetDeploymentID", s.Subtest(func(db database.Store, check *expects) { - db.InsertDeploymentID(context.Background(), "value") + s.Run("GetDeploymentID", s.Mocked(func(dbm *dbmock.MockStore, _ *gofakeit.Faker, check *expects) { + dbm.EXPECT().GetDeploymentID(gomock.Any()).Return("value", nil).AnyTimes() check.Args().Asserts().Returns("value") })) - s.Run("GetDefaultProxyConfig", s.Subtest(func(db database.Store, check *expects) { - check.Args().Asserts().Returns(database.GetDefaultProxyConfigRow{ - DisplayName: "Default", - IconUrl: "/emojis/1f3e1.png", - }) + s.Run("GetDefaultProxyConfig", s.Mocked(func(dbm *dbmock.MockStore, _ *gofakeit.Faker, check *expects) { + dbm.EXPECT().GetDefaultProxyConfig(gomock.Any()).Return(database.GetDefaultProxyConfigRow{DisplayName: "Default", IconUrl: "/emojis/1f3e1.png"}, nil).AnyTimes() + check.Args().Asserts().Returns(database.GetDefaultProxyConfigRow{DisplayName: "Default", IconUrl: "/emojis/1f3e1.png"}) })) - s.Run("GetLogoURL", s.Subtest(func(db database.Store, check *expects) { - err := db.UpsertLogoURL(context.Background(), "value") - require.NoError(s.T(), err) + s.Run("GetLogoURL", s.Mocked(func(dbm *dbmock.MockStore, _ *gofakeit.Faker, check *expects) { + dbm.EXPECT().GetLogoURL(gomock.Any()).Return("value", nil).AnyTimes() check.Args().Asserts().Returns("value") })) - s.Run("GetAnnouncementBanners", s.Subtest(func(db database.Store, check *expects) { - err := db.UpsertAnnouncementBanners(context.Background(), "value") - require.NoError(s.T(), err) + s.Run("GetAnnouncementBanners", s.Mocked(func(dbm *dbmock.MockStore, _ *gofakeit.Faker, check *expects) { + dbm.EXPECT().GetAnnouncementBanners(gomock.Any()).Return("value", nil).AnyTimes() check.Args().Asserts().Returns("value") })) - s.Run("GetManagedAgentCount", s.Subtest(func(db database.Store, check *expects) { + s.Run("GetManagedAgentCount", s.Mocked(func(dbm *dbmock.MockStore, _ *gofakeit.Faker, check *expects) { start := dbtime.Now() end := start.Add(time.Hour) - check.Args(database.GetManagedAgentCountParams{ - StartTime: start, - EndTime: end, - }).Asserts(rbac.ResourceWorkspace, policy.ActionRead).Returns(int64(0)) + dbm.EXPECT().GetManagedAgentCount(gomock.Any(), database.GetManagedAgentCountParams{StartTime: start, EndTime: end}).Return(int64(0), nil).AnyTimes() + check.Args(database.GetManagedAgentCountParams{StartTime: start, EndTime: end}).Asserts(rbac.ResourceWorkspace, policy.ActionRead).Returns(int64(0)) })) } func (s *MethodTestSuite) TestOrganization() { - s.Run("Deployment/OIDCClaimFields", s.Subtest(func(db database.Store, check *expects) { + s.Run("Deployment/OIDCClaimFields", s.Mocked(func(dbm *dbmock.MockStore, _ *gofakeit.Faker, check *expects) { + dbm.EXPECT().OIDCClaimFields(gomock.Any(), uuid.Nil).Return([]string{}, nil).AnyTimes() check.Args(uuid.Nil).Asserts(rbac.ResourceIdpsyncSettings, policy.ActionRead).Returns([]string{}) })) - s.Run("Organization/OIDCClaimFields", s.Subtest(func(db database.Store, check *expects) { + s.Run("Organization/OIDCClaimFields", s.Mocked(func(dbm *dbmock.MockStore, _ *gofakeit.Faker, check *expects) { id := uuid.New() + dbm.EXPECT().OIDCClaimFields(gomock.Any(), id).Return([]string{}, nil).AnyTimes() check.Args(id).Asserts(rbac.ResourceIdpsyncSettings.InOrg(id), policy.ActionRead).Returns([]string{}) })) - s.Run("Deployment/OIDCClaimFieldValues", s.Subtest(func(db database.Store, check *expects) { - check.Args(database.OIDCClaimFieldValuesParams{ - ClaimField: "claim-field", - OrganizationID: uuid.Nil, - }).Asserts(rbac.ResourceIdpsyncSettings, policy.ActionRead).Returns([]string{}) + s.Run("Deployment/OIDCClaimFieldValues", s.Mocked(func(dbm *dbmock.MockStore, _ *gofakeit.Faker, check *expects) { + arg := database.OIDCClaimFieldValuesParams{ClaimField: "claim-field", OrganizationID: uuid.Nil} + dbm.EXPECT().OIDCClaimFieldValues(gomock.Any(), arg).Return([]string{}, nil).AnyTimes() + check.Args(arg).Asserts(rbac.ResourceIdpsyncSettings, policy.ActionRead).Returns([]string{}) })) - s.Run("Organization/OIDCClaimFieldValues", s.Subtest(func(db database.Store, check *expects) { + s.Run("Organization/OIDCClaimFieldValues", s.Mocked(func(dbm *dbmock.MockStore, _ *gofakeit.Faker, check *expects) { id := uuid.New() - check.Args(database.OIDCClaimFieldValuesParams{ - ClaimField: "claim-field", - OrganizationID: id, - }).Asserts(rbac.ResourceIdpsyncSettings.InOrg(id), policy.ActionRead).Returns([]string{}) + arg := database.OIDCClaimFieldValuesParams{ClaimField: "claim-field", OrganizationID: id} + dbm.EXPECT().OIDCClaimFieldValues(gomock.Any(), arg).Return([]string{}, nil).AnyTimes() + check.Args(arg).Asserts(rbac.ResourceIdpsyncSettings.InOrg(id), policy.ActionRead).Returns([]string{}) })) s.Run("ByOrganization/GetGroups", s.Subtest(func(db database.Store, check *expects) { o := dbgen.Organization(s.T(), db, database.Organization{}) @@ -1237,748 +1140,604 @@ func (s *MethodTestSuite) TestOrganization() { } func (s *MethodTestSuite) TestWorkspaceProxy() { - s.Run("InsertWorkspaceProxy", s.Subtest(func(db database.Store, check *expects) { - check.Args(database.InsertWorkspaceProxyParams{ - ID: uuid.New(), - }).Asserts(rbac.ResourceWorkspaceProxy, policy.ActionCreate) - })) - s.Run("RegisterWorkspaceProxy", s.Subtest(func(db database.Store, check *expects) { - p, _ := dbgen.WorkspaceProxy(s.T(), db, database.WorkspaceProxy{}) - check.Args(database.RegisterWorkspaceProxyParams{ - ID: p.ID, - }).Asserts(p, policy.ActionUpdate) - })) - s.Run("GetWorkspaceProxyByID", s.Subtest(func(db database.Store, check *expects) { - p, _ := dbgen.WorkspaceProxy(s.T(), db, database.WorkspaceProxy{}) + s.Run("InsertWorkspaceProxy", s.Mocked(func(dbm *dbmock.MockStore, _ *gofakeit.Faker, check *expects) { + arg := database.InsertWorkspaceProxyParams{ID: uuid.New()} + dbm.EXPECT().InsertWorkspaceProxy(gomock.Any(), arg).Return(database.WorkspaceProxy{}, nil).AnyTimes() + check.Args(arg).Asserts(rbac.ResourceWorkspaceProxy, policy.ActionCreate) + })) + s.Run("RegisterWorkspaceProxy", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) { + p := testutil.Fake(s.T(), faker, database.WorkspaceProxy{}) + dbm.EXPECT().GetWorkspaceProxyByID(gomock.Any(), p.ID).Return(p, nil).AnyTimes() + dbm.EXPECT().RegisterWorkspaceProxy(gomock.Any(), database.RegisterWorkspaceProxyParams{ID: p.ID}).Return(p, nil).AnyTimes() + check.Args(database.RegisterWorkspaceProxyParams{ID: p.ID}).Asserts(p, policy.ActionUpdate) + })) + s.Run("GetWorkspaceProxyByID", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) { + p := testutil.Fake(s.T(), faker, database.WorkspaceProxy{}) + dbm.EXPECT().GetWorkspaceProxyByID(gomock.Any(), p.ID).Return(p, nil).AnyTimes() check.Args(p.ID).Asserts(p, policy.ActionRead).Returns(p) })) - s.Run("GetWorkspaceProxyByName", s.Subtest(func(db database.Store, check *expects) { - p, _ := dbgen.WorkspaceProxy(s.T(), db, database.WorkspaceProxy{}) + s.Run("GetWorkspaceProxyByName", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) { + p := testutil.Fake(s.T(), faker, database.WorkspaceProxy{}) + dbm.EXPECT().GetWorkspaceProxyByName(gomock.Any(), p.Name).Return(p, nil).AnyTimes() check.Args(p.Name).Asserts(p, policy.ActionRead).Returns(p) })) - s.Run("UpdateWorkspaceProxyDeleted", s.Subtest(func(db database.Store, check *expects) { - p, _ := dbgen.WorkspaceProxy(s.T(), db, database.WorkspaceProxy{}) - check.Args(database.UpdateWorkspaceProxyDeletedParams{ - ID: p.ID, - Deleted: true, - }).Asserts(p, policy.ActionDelete) - })) - s.Run("UpdateWorkspaceProxy", s.Subtest(func(db database.Store, check *expects) { - p, _ := dbgen.WorkspaceProxy(s.T(), db, database.WorkspaceProxy{}) - check.Args(database.UpdateWorkspaceProxyParams{ - ID: p.ID, - }).Asserts(p, policy.ActionUpdate) - })) - s.Run("GetWorkspaceProxies", s.Subtest(func(db database.Store, check *expects) { - p1, _ := dbgen.WorkspaceProxy(s.T(), db, database.WorkspaceProxy{}) - p2, _ := dbgen.WorkspaceProxy(s.T(), db, database.WorkspaceProxy{}) + s.Run("UpdateWorkspaceProxyDeleted", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) { + p := testutil.Fake(s.T(), faker, database.WorkspaceProxy{}) + dbm.EXPECT().GetWorkspaceProxyByID(gomock.Any(), p.ID).Return(p, nil).AnyTimes() + dbm.EXPECT().UpdateWorkspaceProxyDeleted(gomock.Any(), database.UpdateWorkspaceProxyDeletedParams{ID: p.ID, Deleted: true}).Return(nil).AnyTimes() + check.Args(database.UpdateWorkspaceProxyDeletedParams{ID: p.ID, Deleted: true}).Asserts(p, policy.ActionDelete) + })) + s.Run("UpdateWorkspaceProxy", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) { + p := testutil.Fake(s.T(), faker, database.WorkspaceProxy{}) + dbm.EXPECT().GetWorkspaceProxyByID(gomock.Any(), p.ID).Return(p, nil).AnyTimes() + dbm.EXPECT().UpdateWorkspaceProxy(gomock.Any(), database.UpdateWorkspaceProxyParams{ID: p.ID}).Return(p, nil).AnyTimes() + check.Args(database.UpdateWorkspaceProxyParams{ID: p.ID}).Asserts(p, policy.ActionUpdate) + })) + s.Run("GetWorkspaceProxies", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) { + p1 := testutil.Fake(s.T(), faker, database.WorkspaceProxy{}) + p2 := testutil.Fake(s.T(), faker, database.WorkspaceProxy{}) + dbm.EXPECT().GetWorkspaceProxies(gomock.Any()).Return([]database.WorkspaceProxy{p1, p2}, nil).AnyTimes() check.Args().Asserts(p1, policy.ActionRead, p2, policy.ActionRead).Returns(slice.New(p1, p2)) })) } func (s *MethodTestSuite) TestTemplate() { - s.Run("GetPreviousTemplateVersion", s.Subtest(func(db database.Store, check *expects) { - tvid := uuid.New() - now := time.Now() - u := dbgen.User(s.T(), db, database.User{}) - o1 := dbgen.Organization(s.T(), db, database.Organization{}) - t1 := dbgen.Template(s.T(), db, database.Template{ - OrganizationID: o1.ID, - ActiveVersionID: tvid, - CreatedBy: u.ID, - }) - _ = dbgen.TemplateVersion(s.T(), db, database.TemplateVersion{ - CreatedAt: now.Add(-time.Hour), - ID: tvid, - Name: t1.Name, - OrganizationID: o1.ID, - TemplateID: uuid.NullUUID{UUID: t1.ID, Valid: true}, - CreatedBy: u.ID, - }) - b := dbgen.TemplateVersion(s.T(), db, database.TemplateVersion{ - CreatedAt: now.Add(-2 * time.Hour), - Name: t1.Name + "b", - OrganizationID: o1.ID, - TemplateID: uuid.NullUUID{UUID: t1.ID, Valid: true}, - CreatedBy: u.ID, - }) - check.Args(database.GetPreviousTemplateVersionParams{ - Name: t1.Name, - OrganizationID: o1.ID, - TemplateID: uuid.NullUUID{UUID: t1.ID, Valid: true}, - }).Asserts(t1, policy.ActionRead).Returns(b) - })) - s.Run("GetTemplateByID", s.Subtest(func(db database.Store, check *expects) { - dbtestutil.DisableForeignKeysAndTriggers(s.T(), db) - t1 := dbgen.Template(s.T(), db, database.Template{}) + s.Run("GetPreviousTemplateVersion", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) { + t1 := testutil.Fake(s.T(), faker, database.Template{}) + b := testutil.Fake(s.T(), faker, database.TemplateVersion{TemplateID: uuid.NullUUID{UUID: t1.ID, Valid: true}}) + arg := database.GetPreviousTemplateVersionParams{Name: b.Name, OrganizationID: t1.OrganizationID, TemplateID: uuid.NullUUID{UUID: t1.ID, Valid: true}} + dbm.EXPECT().GetTemplateByID(gomock.Any(), t1.ID).Return(t1, nil).AnyTimes() + dbm.EXPECT().GetPreviousTemplateVersion(gomock.Any(), arg).Return(b, nil).AnyTimes() + check.Args(arg).Asserts(t1, policy.ActionRead).Returns(b) + })) + s.Run("GetTemplateByID", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) { + t1 := testutil.Fake(s.T(), faker, database.Template{}) + dbm.EXPECT().GetTemplateByID(gomock.Any(), t1.ID).Return(t1, nil).AnyTimes() check.Args(t1.ID).Asserts(t1, policy.ActionRead).Returns(t1) })) - s.Run("GetTemplateByOrganizationAndName", s.Subtest(func(db database.Store, check *expects) { - dbtestutil.DisableForeignKeysAndTriggers(s.T(), db) - o1 := dbgen.Organization(s.T(), db, database.Organization{}) - t1 := dbgen.Template(s.T(), db, database.Template{ - OrganizationID: o1.ID, - }) - check.Args(database.GetTemplateByOrganizationAndNameParams{ - Name: t1.Name, - OrganizationID: o1.ID, - }).Asserts(t1, policy.ActionRead).Returns(t1) - })) - s.Run("GetTemplateVersionByJobID", s.Subtest(func(db database.Store, check *expects) { - dbtestutil.DisableForeignKeysAndTriggers(s.T(), db) - t1 := dbgen.Template(s.T(), db, database.Template{}) - tv := dbgen.TemplateVersion(s.T(), db, database.TemplateVersion{ - TemplateID: uuid.NullUUID{UUID: t1.ID, Valid: true}, - }) + s.Run("GetTemplateByOrganizationAndName", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) { + t1 := testutil.Fake(s.T(), faker, database.Template{}) + arg := database.GetTemplateByOrganizationAndNameParams{Name: t1.Name, OrganizationID: t1.OrganizationID} + dbm.EXPECT().GetTemplateByOrganizationAndName(gomock.Any(), arg).Return(t1, nil).AnyTimes() + check.Args(arg).Asserts(t1, policy.ActionRead).Returns(t1) + })) + s.Run("GetTemplateVersionByJobID", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) { + t1 := testutil.Fake(s.T(), faker, database.Template{}) + tv := testutil.Fake(s.T(), faker, database.TemplateVersion{TemplateID: uuid.NullUUID{UUID: t1.ID, Valid: true}}) + dbm.EXPECT().GetTemplateVersionByJobID(gomock.Any(), tv.JobID).Return(tv, nil).AnyTimes() + dbm.EXPECT().GetTemplateByID(gomock.Any(), t1.ID).Return(t1, nil).AnyTimes() check.Args(tv.JobID).Asserts(t1, policy.ActionRead).Returns(tv) })) - s.Run("GetTemplateVersionByTemplateIDAndName", s.Subtest(func(db database.Store, check *expects) { - dbtestutil.DisableForeignKeysAndTriggers(s.T(), db) - t1 := dbgen.Template(s.T(), db, database.Template{}) - tv := dbgen.TemplateVersion(s.T(), db, database.TemplateVersion{ - TemplateID: uuid.NullUUID{UUID: t1.ID, Valid: true}, - }) - check.Args(database.GetTemplateVersionByTemplateIDAndNameParams{ - Name: tv.Name, - TemplateID: uuid.NullUUID{UUID: t1.ID, Valid: true}, - }).Asserts(t1, policy.ActionRead).Returns(tv) - })) - s.Run("GetTemplateVersionParameters", s.Subtest(func(db database.Store, check *expects) { - dbtestutil.DisableForeignKeysAndTriggers(s.T(), db) - t1 := dbgen.Template(s.T(), db, database.Template{}) - tv := dbgen.TemplateVersion(s.T(), db, database.TemplateVersion{ - TemplateID: uuid.NullUUID{UUID: t1.ID, Valid: true}, - }) + s.Run("GetTemplateVersionByTemplateIDAndName", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) { + t1 := testutil.Fake(s.T(), faker, database.Template{}) + tv := testutil.Fake(s.T(), faker, database.TemplateVersion{TemplateID: uuid.NullUUID{UUID: t1.ID, Valid: true}}) + arg := database.GetTemplateVersionByTemplateIDAndNameParams{Name: tv.Name, TemplateID: uuid.NullUUID{UUID: t1.ID, Valid: true}} + dbm.EXPECT().GetTemplateVersionByTemplateIDAndName(gomock.Any(), arg).Return(tv, nil).AnyTimes() + dbm.EXPECT().GetTemplateByID(gomock.Any(), t1.ID).Return(t1, nil).AnyTimes() + check.Args(arg).Asserts(t1, policy.ActionRead).Returns(tv) + })) + s.Run("GetTemplateVersionParameters", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) { + t1 := testutil.Fake(s.T(), faker, database.Template{}) + tv := testutil.Fake(s.T(), faker, database.TemplateVersion{TemplateID: uuid.NullUUID{UUID: t1.ID, Valid: true}}) + dbm.EXPECT().GetTemplateVersionByID(gomock.Any(), tv.ID).Return(tv, nil).AnyTimes() + dbm.EXPECT().GetTemplateByID(gomock.Any(), t1.ID).Return(t1, nil).AnyTimes() + dbm.EXPECT().GetTemplateVersionParameters(gomock.Any(), tv.ID).Return([]database.TemplateVersionParameter{}, nil).AnyTimes() check.Args(tv.ID).Asserts(t1, policy.ActionRead).Returns([]database.TemplateVersionParameter{}) })) - s.Run("GetTemplateVersionTerraformValues", s.Subtest(func(db database.Store, check *expects) { - o := dbgen.Organization(s.T(), db, database.Organization{}) - u := dbgen.User(s.T(), db, database.User{}) - _ = dbgen.OrganizationMember(s.T(), db, database.OrganizationMember{OrganizationID: o.ID, UserID: u.ID}) - t := dbgen.Template(s.T(), db, database.Template{OrganizationID: o.ID, CreatedBy: u.ID}) - job := dbgen.ProvisionerJob(s.T(), db, nil, database.ProvisionerJob{OrganizationID: o.ID}) - tv := dbgen.TemplateVersion(s.T(), db, database.TemplateVersion{ - OrganizationID: o.ID, - CreatedBy: u.ID, - JobID: job.ID, - TemplateID: uuid.NullUUID{UUID: t.ID, Valid: true}, - }) - dbgen.TemplateVersionTerraformValues(s.T(), db, database.TemplateVersionTerraformValue{ - TemplateVersionID: tv.ID, - }) + s.Run("GetTemplateVersionTerraformValues", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) { + t := testutil.Fake(s.T(), faker, database.Template{}) + tv := testutil.Fake(s.T(), faker, database.TemplateVersion{TemplateID: uuid.NullUUID{UUID: t.ID, Valid: true}}) + val := testutil.Fake(s.T(), faker, database.TemplateVersionTerraformValue{TemplateVersionID: tv.ID}) + dbm.EXPECT().GetTemplateVersionByID(gomock.Any(), tv.ID).Return(tv, nil).AnyTimes() + dbm.EXPECT().GetTemplateByID(gomock.Any(), t.ID).Return(t, nil).AnyTimes() + dbm.EXPECT().GetTemplateVersionTerraformValues(gomock.Any(), tv.ID).Return(val, nil).AnyTimes() check.Args(tv.ID).Asserts(t, policy.ActionRead) })) - s.Run("GetTemplateVersionVariables", s.Subtest(func(db database.Store, check *expects) { - dbtestutil.DisableForeignKeysAndTriggers(s.T(), db) - t1 := dbgen.Template(s.T(), db, database.Template{}) - tv := dbgen.TemplateVersion(s.T(), db, database.TemplateVersion{ - TemplateID: uuid.NullUUID{UUID: t1.ID, Valid: true}, - }) - tvv1 := dbgen.TemplateVersionVariable(s.T(), db, database.TemplateVersionVariable{ - TemplateVersionID: tv.ID, - }) + s.Run("GetTemplateVersionVariables", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) { + t1 := testutil.Fake(s.T(), faker, database.Template{}) + tv := testutil.Fake(s.T(), faker, database.TemplateVersion{TemplateID: uuid.NullUUID{UUID: t1.ID, Valid: true}}) + tvv1 := testutil.Fake(s.T(), faker, database.TemplateVersionVariable{TemplateVersionID: tv.ID}) + dbm.EXPECT().GetTemplateVersionByID(gomock.Any(), tv.ID).Return(tv, nil).AnyTimes() + dbm.EXPECT().GetTemplateByID(gomock.Any(), t1.ID).Return(t1, nil).AnyTimes() + dbm.EXPECT().GetTemplateVersionVariables(gomock.Any(), tv.ID).Return([]database.TemplateVersionVariable{tvv1}, nil).AnyTimes() check.Args(tv.ID).Asserts(t1, policy.ActionRead).Returns([]database.TemplateVersionVariable{tvv1}) })) - s.Run("GetTemplateVersionWorkspaceTags", s.Subtest(func(db database.Store, check *expects) { - dbtestutil.DisableForeignKeysAndTriggers(s.T(), db) - t1 := dbgen.Template(s.T(), db, database.Template{}) - tv := dbgen.TemplateVersion(s.T(), db, database.TemplateVersion{ - TemplateID: uuid.NullUUID{UUID: t1.ID, Valid: true}, - }) - wt1 := dbgen.TemplateVersionWorkspaceTag(s.T(), db, database.TemplateVersionWorkspaceTag{ - TemplateVersionID: tv.ID, - }) + s.Run("GetTemplateVersionWorkspaceTags", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) { + t1 := testutil.Fake(s.T(), faker, database.Template{}) + tv := testutil.Fake(s.T(), faker, database.TemplateVersion{TemplateID: uuid.NullUUID{UUID: t1.ID, Valid: true}}) + wt1 := testutil.Fake(s.T(), faker, database.TemplateVersionWorkspaceTag{TemplateVersionID: tv.ID}) + dbm.EXPECT().GetTemplateVersionByID(gomock.Any(), tv.ID).Return(tv, nil).AnyTimes() + dbm.EXPECT().GetTemplateByID(gomock.Any(), t1.ID).Return(t1, nil).AnyTimes() + dbm.EXPECT().GetTemplateVersionWorkspaceTags(gomock.Any(), tv.ID).Return([]database.TemplateVersionWorkspaceTag{wt1}, nil).AnyTimes() check.Args(tv.ID).Asserts(t1, policy.ActionRead).Returns([]database.TemplateVersionWorkspaceTag{wt1}) })) - s.Run("GetTemplateGroupRoles", s.Subtest(func(db database.Store, check *expects) { - dbtestutil.DisableForeignKeysAndTriggers(s.T(), db) - t1 := dbgen.Template(s.T(), db, database.Template{}) + s.Run("GetTemplateGroupRoles", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) { + t1 := testutil.Fake(s.T(), faker, database.Template{}) + dbm.EXPECT().GetTemplateByID(gomock.Any(), t1.ID).Return(t1, nil).AnyTimes() + dbm.EXPECT().GetTemplateGroupRoles(gomock.Any(), t1.ID).Return([]database.TemplateGroup{}, nil).AnyTimes() check.Args(t1.ID).Asserts(t1, policy.ActionUpdate) })) - s.Run("GetTemplateUserRoles", s.Subtest(func(db database.Store, check *expects) { - dbtestutil.DisableForeignKeysAndTriggers(s.T(), db) - t1 := dbgen.Template(s.T(), db, database.Template{}) + s.Run("GetTemplateUserRoles", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) { + t1 := testutil.Fake(s.T(), faker, database.Template{}) + dbm.EXPECT().GetTemplateByID(gomock.Any(), t1.ID).Return(t1, nil).AnyTimes() + dbm.EXPECT().GetTemplateUserRoles(gomock.Any(), t1.ID).Return([]database.TemplateUser{}, nil).AnyTimes() check.Args(t1.ID).Asserts(t1, policy.ActionUpdate) })) - s.Run("GetTemplateVersionByID", s.Subtest(func(db database.Store, check *expects) { - dbtestutil.DisableForeignKeysAndTriggers(s.T(), db) - t1 := dbgen.Template(s.T(), db, database.Template{}) - tv := dbgen.TemplateVersion(s.T(), db, database.TemplateVersion{ - TemplateID: uuid.NullUUID{UUID: t1.ID, Valid: true}, - }) + s.Run("GetTemplateVersionByID", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) { + t1 := testutil.Fake(s.T(), faker, database.Template{}) + tv := testutil.Fake(s.T(), faker, database.TemplateVersion{TemplateID: uuid.NullUUID{UUID: t1.ID, Valid: true}}) + dbm.EXPECT().GetTemplateVersionByID(gomock.Any(), tv.ID).Return(tv, nil).AnyTimes() + dbm.EXPECT().GetTemplateByID(gomock.Any(), t1.ID).Return(t1, nil).AnyTimes() check.Args(tv.ID).Asserts(t1, policy.ActionRead).Returns(tv) })) - s.Run("GetTemplateVersionsByTemplateID", s.Subtest(func(db database.Store, check *expects) { - dbtestutil.DisableForeignKeysAndTriggers(s.T(), db) - t1 := dbgen.Template(s.T(), db, database.Template{}) - a := dbgen.TemplateVersion(s.T(), db, database.TemplateVersion{ - TemplateID: uuid.NullUUID{UUID: t1.ID, Valid: true}, - }) - b := dbgen.TemplateVersion(s.T(), db, database.TemplateVersion{ - TemplateID: uuid.NullUUID{UUID: t1.ID, Valid: true}, - }) - check.Args(database.GetTemplateVersionsByTemplateIDParams{ - TemplateID: t1.ID, - }).Asserts(t1, policy.ActionRead). - Returns(slice.New(a, b)) - })) - s.Run("GetTemplateVersionsCreatedAfter", s.Subtest(func(db database.Store, check *expects) { - dbtestutil.DisableForeignKeysAndTriggers(s.T(), db) + s.Run("Orphaned/GetTemplateVersionByID", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) { + tv := testutil.Fake(s.T(), faker, database.TemplateVersion{}) + // uuid.NullUUID{Valid: false} is a zero value. faker overwrites zero values + // with random data, so we need to set TemplateID after faker is done with it. + tv.TemplateID = uuid.NullUUID{Valid: false} + dbm.EXPECT().GetTemplateVersionByID(gomock.Any(), tv.ID).Return(tv, nil).AnyTimes() + check.Args(tv.ID).Asserts(tv.RBACObjectNoTemplate(), policy.ActionRead).Returns(tv) + })) + s.Run("GetTemplateVersionsByTemplateID", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) { + t1 := testutil.Fake(s.T(), faker, database.Template{}) + a := testutil.Fake(s.T(), faker, database.TemplateVersion{TemplateID: uuid.NullUUID{UUID: t1.ID, Valid: true}}) + b := testutil.Fake(s.T(), faker, database.TemplateVersion{TemplateID: uuid.NullUUID{UUID: t1.ID, Valid: true}}) + arg := database.GetTemplateVersionsByTemplateIDParams{TemplateID: t1.ID} + dbm.EXPECT().GetTemplateByID(gomock.Any(), t1.ID).Return(t1, nil).AnyTimes() + dbm.EXPECT().GetTemplateVersionsByTemplateID(gomock.Any(), arg).Return([]database.TemplateVersion{a, b}, nil).AnyTimes() + check.Args(arg).Asserts(t1, policy.ActionRead).Returns(slice.New(a, b)) + })) + s.Run("GetTemplateVersionsCreatedAfter", s.Mocked(func(dbm *dbmock.MockStore, _ *gofakeit.Faker, check *expects) { now := time.Now() - t1 := dbgen.Template(s.T(), db, database.Template{}) - _ = dbgen.TemplateVersion(s.T(), db, database.TemplateVersion{ - TemplateID: uuid.NullUUID{UUID: t1.ID, Valid: true}, - CreatedAt: now.Add(-time.Hour), - }) - _ = dbgen.TemplateVersion(s.T(), db, database.TemplateVersion{ - TemplateID: uuid.NullUUID{UUID: t1.ID, Valid: true}, - CreatedAt: now.Add(-2 * time.Hour), - }) + dbm.EXPECT().GetTemplateVersionsCreatedAfter(gomock.Any(), now.Add(-time.Hour)).Return([]database.TemplateVersion{}, nil).AnyTimes() check.Args(now.Add(-time.Hour)).Asserts(rbac.ResourceTemplate.All(), policy.ActionRead) })) - s.Run("GetTemplatesWithFilter", s.Subtest(func(db database.Store, check *expects) { - o := dbgen.Organization(s.T(), db, database.Organization{}) - u := dbgen.User(s.T(), db, database.User{}) - a := dbgen.Template(s.T(), db, database.Template{ - OrganizationID: o.ID, - CreatedBy: u.ID, - }) - // No asserts because SQLFilter. - check.Args(database.GetTemplatesWithFilterParams{}). - Asserts().Returns(slice.New(a)) + s.Run("GetTemplateVersionHasAITask", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) { + t := testutil.Fake(s.T(), faker, database.Template{}) + tv := testutil.Fake(s.T(), faker, database.TemplateVersion{TemplateID: uuid.NullUUID{UUID: t.ID, Valid: true}}) + dbm.EXPECT().GetTemplateVersionByID(gomock.Any(), tv.ID).Return(tv, nil).AnyTimes() + dbm.EXPECT().GetTemplateByID(gomock.Any(), t.ID).Return(t, nil).AnyTimes() + dbm.EXPECT().GetTemplateVersionHasAITask(gomock.Any(), tv.ID).Return(false, nil).AnyTimes() + check.Args(tv.ID).Asserts(t, policy.ActionRead) })) - s.Run("GetAuthorizedTemplates", s.Subtest(func(db database.Store, check *expects) { - dbtestutil.DisableForeignKeysAndTriggers(s.T(), db) - a := dbgen.Template(s.T(), db, database.Template{}) + s.Run("GetTemplatesWithFilter", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) { + a := testutil.Fake(s.T(), faker, database.Template{}) + arg := database.GetTemplatesWithFilterParams{} + dbm.EXPECT().GetAuthorizedTemplates(gomock.Any(), arg, gomock.Any()).Return([]database.Template{a}, nil).AnyTimes() // No asserts because SQLFilter. - check.Args(database.GetTemplatesWithFilterParams{}, emptyPreparedAuthorized{}). - Asserts(). - Returns(slice.New(a)) - })) - s.Run("InsertTemplate", s.Subtest(func(db database.Store, check *expects) { - dbtestutil.DisableForeignKeysAndTriggers(s.T(), db) - orgID := uuid.New() - check.Args(database.InsertTemplateParams{ - Provisioner: "echo", - OrganizationID: orgID, - MaxPortSharingLevel: database.AppSharingLevelOwner, - }).Asserts(rbac.ResourceTemplate.InOrg(orgID), policy.ActionCreate) - })) - s.Run("InsertTemplateVersion", s.Subtest(func(db database.Store, check *expects) { - dbtestutil.DisableForeignKeysAndTriggers(s.T(), db) - t1 := dbgen.Template(s.T(), db, database.Template{}) - check.Args(database.InsertTemplateVersionParams{ - TemplateID: uuid.NullUUID{UUID: t1.ID, Valid: true}, - OrganizationID: t1.OrganizationID, - }).Asserts(t1, policy.ActionRead, t1, policy.ActionCreate) + check.Args(arg).Asserts().Returns(slice.New(a)) })) - s.Run("InsertTemplateVersionTerraformValuesByJobID", s.Subtest(func(db database.Store, check *expects) { - o := dbgen.Organization(s.T(), db, database.Organization{}) - u := dbgen.User(s.T(), db, database.User{}) - _ = dbgen.OrganizationMember(s.T(), db, database.OrganizationMember{OrganizationID: o.ID, UserID: u.ID}) - t := dbgen.Template(s.T(), db, database.Template{OrganizationID: o.ID, CreatedBy: u.ID}) - job := dbgen.ProvisionerJob(s.T(), db, nil, database.ProvisionerJob{OrganizationID: o.ID}) - _ = dbgen.TemplateVersion(s.T(), db, database.TemplateVersion{ - OrganizationID: o.ID, - CreatedBy: u.ID, - JobID: job.ID, - TemplateID: uuid.NullUUID{UUID: t.ID, Valid: true}, - }) - check.Args(database.InsertTemplateVersionTerraformValuesByJobIDParams{ - JobID: job.ID, - CachedPlan: []byte("{}"), - }).Asserts(rbac.ResourceSystem, policy.ActionCreate) - })) - s.Run("SoftDeleteTemplateByID", s.Subtest(func(db database.Store, check *expects) { - dbtestutil.DisableForeignKeysAndTriggers(s.T(), db) - t1 := dbgen.Template(s.T(), db, database.Template{}) + s.Run("GetAuthorizedTemplates", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) { + a := testutil.Fake(s.T(), faker, database.Template{}) + arg := database.GetTemplatesWithFilterParams{} + dbm.EXPECT().GetAuthorizedTemplates(gomock.Any(), arg, gomock.Any()).Return([]database.Template{a}, nil).AnyTimes() + // No asserts because SQLFilter. + check.Args(arg, emptyPreparedAuthorized{}).Asserts().Returns(slice.New(a)) + })) + s.Run("InsertTemplate", s.Mocked(func(dbm *dbmock.MockStore, _ *gofakeit.Faker, check *expects) { + arg := database.InsertTemplateParams{OrganizationID: uuid.New()} + dbm.EXPECT().InsertTemplate(gomock.Any(), arg).Return(nil).AnyTimes() + check.Args(arg).Asserts(rbac.ResourceTemplate.InOrg(arg.OrganizationID), policy.ActionCreate) + })) + s.Run("InsertTemplateVersion", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) { + t1 := testutil.Fake(s.T(), faker, database.Template{}) + arg := database.InsertTemplateVersionParams{TemplateID: uuid.NullUUID{UUID: t1.ID, Valid: true}, OrganizationID: t1.OrganizationID} + dbm.EXPECT().GetTemplateByID(gomock.Any(), t1.ID).Return(t1, nil).AnyTimes() + dbm.EXPECT().InsertTemplateVersion(gomock.Any(), arg).Return(nil).AnyTimes() + check.Args(arg).Asserts(t1, policy.ActionRead, t1, policy.ActionCreate) + })) + s.Run("InsertTemplateVersionTerraformValuesByJobID", s.Mocked(func(dbm *dbmock.MockStore, _ *gofakeit.Faker, check *expects) { + job := uuid.New() + arg := database.InsertTemplateVersionTerraformValuesByJobIDParams{JobID: job, CachedPlan: []byte("{}")} + dbm.EXPECT().InsertTemplateVersionTerraformValuesByJobID(gomock.Any(), arg).Return(nil).AnyTimes() + check.Args(arg).Asserts(rbac.ResourceSystem, policy.ActionCreate) + })) + s.Run("SoftDeleteTemplateByID", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) { + t1 := testutil.Fake(s.T(), faker, database.Template{}) + dbm.EXPECT().GetTemplateByID(gomock.Any(), t1.ID).Return(t1, nil).AnyTimes() + dbm.EXPECT().UpdateTemplateDeletedByID(gomock.Any(), gomock.AssignableToTypeOf(database.UpdateTemplateDeletedByIDParams{})).Return(nil).AnyTimes() check.Args(t1.ID).Asserts(t1, policy.ActionDelete) })) - s.Run("UpdateTemplateACLByID", s.Subtest(func(db database.Store, check *expects) { - dbtestutil.DisableForeignKeysAndTriggers(s.T(), db) - t1 := dbgen.Template(s.T(), db, database.Template{}) - check.Args(database.UpdateTemplateACLByIDParams{ - ID: t1.ID, - }).Asserts(t1, policy.ActionCreate) - })) - s.Run("UpdateTemplateAccessControlByID", s.Subtest(func(db database.Store, check *expects) { - dbtestutil.DisableForeignKeysAndTriggers(s.T(), db) - t1 := dbgen.Template(s.T(), db, database.Template{}) - check.Args(database.UpdateTemplateAccessControlByIDParams{ - ID: t1.ID, - }).Asserts(t1, policy.ActionUpdate) - })) - s.Run("UpdateTemplateScheduleByID", s.Subtest(func(db database.Store, check *expects) { - dbtestutil.DisableForeignKeysAndTriggers(s.T(), db) - t1 := dbgen.Template(s.T(), db, database.Template{}) - check.Args(database.UpdateTemplateScheduleByIDParams{ - ID: t1.ID, - }).Asserts(t1, policy.ActionUpdate) - })) - s.Run("UpdateTemplateVersionAITaskByJobID", s.Subtest(func(db database.Store, check *expects) { - dbtestutil.DisableForeignKeysAndTriggers(s.T(), db) - o := dbgen.Organization(s.T(), db, database.Organization{}) - u := dbgen.User(s.T(), db, database.User{}) - _ = dbgen.OrganizationMember(s.T(), db, database.OrganizationMember{OrganizationID: o.ID, UserID: u.ID}) - t := dbgen.Template(s.T(), db, database.Template{OrganizationID: o.ID, CreatedBy: u.ID}) - job := dbgen.ProvisionerJob(s.T(), db, nil, database.ProvisionerJob{OrganizationID: o.ID}) - _ = dbgen.TemplateVersion(s.T(), db, database.TemplateVersion{ - OrganizationID: o.ID, - CreatedBy: u.ID, - JobID: job.ID, - TemplateID: uuid.NullUUID{UUID: t.ID, Valid: true}, - }) - check.Args(database.UpdateTemplateVersionAITaskByJobIDParams{ - JobID: job.ID, - HasAITask: sql.NullBool{Bool: true, Valid: true}, - }).Asserts(t, policy.ActionUpdate) - })) - s.Run("UpdateTemplateWorkspacesLastUsedAt", s.Subtest(func(db database.Store, check *expects) { - dbtestutil.DisableForeignKeysAndTriggers(s.T(), db) - t1 := dbgen.Template(s.T(), db, database.Template{}) - check.Args(database.UpdateTemplateWorkspacesLastUsedAtParams{ - TemplateID: t1.ID, - }).Asserts(t1, policy.ActionUpdate) - })) - s.Run("UpdateWorkspacesDormantDeletingAtByTemplateID", s.Subtest(func(db database.Store, check *expects) { - dbtestutil.DisableForeignKeysAndTriggers(s.T(), db) - t1 := dbgen.Template(s.T(), db, database.Template{}) - check.Args(database.UpdateWorkspacesDormantDeletingAtByTemplateIDParams{ - TemplateID: t1.ID, - }).Asserts(t1, policy.ActionUpdate) - })) - s.Run("UpdateWorkspacesTTLByTemplateID", s.Subtest(func(db database.Store, check *expects) { - dbtestutil.DisableForeignKeysAndTriggers(s.T(), db) - t1 := dbgen.Template(s.T(), db, database.Template{}) - check.Args(database.UpdateWorkspacesTTLByTemplateIDParams{ - TemplateID: t1.ID, - }).Asserts(t1, policy.ActionUpdate) - })) - s.Run("UpdateTemplateActiveVersionByID", s.Subtest(func(db database.Store, check *expects) { - dbtestutil.DisableForeignKeysAndTriggers(s.T(), db) - t1 := dbgen.Template(s.T(), db, database.Template{ - ActiveVersionID: uuid.New(), - }) - tv := dbgen.TemplateVersion(s.T(), db, database.TemplateVersion{ - ID: t1.ActiveVersionID, - TemplateID: uuid.NullUUID{UUID: t1.ID, Valid: true}, - }) - check.Args(database.UpdateTemplateActiveVersionByIDParams{ - ID: t1.ID, - ActiveVersionID: tv.ID, - }).Asserts(t1, policy.ActionUpdate).Returns() - })) - s.Run("UpdateTemplateDeletedByID", s.Subtest(func(db database.Store, check *expects) { - dbtestutil.DisableForeignKeysAndTriggers(s.T(), db) - t1 := dbgen.Template(s.T(), db, database.Template{}) - check.Args(database.UpdateTemplateDeletedByIDParams{ - ID: t1.ID, - Deleted: true, - }).Asserts(t1, policy.ActionDelete).Returns() - })) - s.Run("UpdateTemplateMetaByID", s.Subtest(func(db database.Store, check *expects) { - dbtestutil.DisableForeignKeysAndTriggers(s.T(), db) - t1 := dbgen.Template(s.T(), db, database.Template{}) - check.Args(database.UpdateTemplateMetaByIDParams{ - ID: t1.ID, - MaxPortSharingLevel: "owner", - }).Asserts(t1, policy.ActionUpdate) - })) - s.Run("UpdateTemplateVersionByID", s.Subtest(func(db database.Store, check *expects) { - dbtestutil.DisableForeignKeysAndTriggers(s.T(), db) - t1 := dbgen.Template(s.T(), db, database.Template{}) - tv := dbgen.TemplateVersion(s.T(), db, database.TemplateVersion{ - TemplateID: uuid.NullUUID{UUID: t1.ID, Valid: true}, - }) - check.Args(database.UpdateTemplateVersionByIDParams{ - ID: tv.ID, - TemplateID: uuid.NullUUID{UUID: t1.ID, Valid: true}, - Name: tv.Name, - UpdatedAt: tv.UpdatedAt, - }).Asserts(t1, policy.ActionUpdate) - })) - s.Run("UpdateTemplateVersionDescriptionByJobID", s.Subtest(func(db database.Store, check *expects) { - dbtestutil.DisableForeignKeysAndTriggers(s.T(), db) - jobID := uuid.New() - t1 := dbgen.Template(s.T(), db, database.Template{}) - _ = dbgen.TemplateVersion(s.T(), db, database.TemplateVersion{ - TemplateID: uuid.NullUUID{UUID: t1.ID, Valid: true}, - JobID: jobID, - }) - check.Args(database.UpdateTemplateVersionDescriptionByJobIDParams{ - JobID: jobID, - Readme: "foo", - }).Asserts(t1, policy.ActionUpdate).Returns() - })) - s.Run("UpdateTemplateVersionExternalAuthProvidersByJobID", s.Subtest(func(db database.Store, check *expects) { - jobID := uuid.New() - u := dbgen.User(s.T(), db, database.User{}) - o := dbgen.Organization(s.T(), db, database.Organization{}) - t1 := dbgen.Template(s.T(), db, database.Template{ - OrganizationID: o.ID, - CreatedBy: u.ID, - }) - _ = dbgen.TemplateVersion(s.T(), db, database.TemplateVersion{ - TemplateID: uuid.NullUUID{UUID: t1.ID, Valid: true}, - CreatedBy: u.ID, - OrganizationID: o.ID, - JobID: jobID, - }) - check.Args(database.UpdateTemplateVersionExternalAuthProvidersByJobIDParams{ - JobID: jobID, - ExternalAuthProviders: json.RawMessage("{}"), - }).Asserts(t1, policy.ActionUpdate).Returns() - })) - s.Run("GetTemplateInsights", s.Subtest(func(db database.Store, check *expects) { - check.Args(database.GetTemplateInsightsParams{}).Asserts(rbac.ResourceTemplate, policy.ActionViewInsights) - })) - s.Run("GetUserLatencyInsights", s.Subtest(func(db database.Store, check *expects) { - check.Args(database.GetUserLatencyInsightsParams{}).Asserts(rbac.ResourceTemplate, policy.ActionViewInsights) - })) - s.Run("GetUserActivityInsights", s.Subtest(func(db database.Store, check *expects) { - check.Args(database.GetUserActivityInsightsParams{}).Asserts(rbac.ResourceTemplate, policy.ActionViewInsights). - ErrorsWithInMemDB(sql.ErrNoRows). - Returns([]database.GetUserActivityInsightsRow{}) - })) - s.Run("GetTemplateParameterInsights", s.Subtest(func(db database.Store, check *expects) { - check.Args(database.GetTemplateParameterInsightsParams{}).Asserts(rbac.ResourceTemplate, policy.ActionViewInsights) - })) - s.Run("GetTemplateInsightsByInterval", s.Subtest(func(db database.Store, check *expects) { - check.Args(database.GetTemplateInsightsByIntervalParams{ - IntervalDays: 7, - StartTime: dbtime.Now().Add(-time.Hour * 24 * 7), - EndTime: dbtime.Now(), - }).Asserts(rbac.ResourceTemplate, policy.ActionViewInsights) - })) - s.Run("GetTemplateInsightsByTemplate", s.Subtest(func(db database.Store, check *expects) { - check.Args(database.GetTemplateInsightsByTemplateParams{}).Asserts(rbac.ResourceTemplate, policy.ActionViewInsights) - })) - s.Run("GetTemplateAppInsights", s.Subtest(func(db database.Store, check *expects) { - check.Args(database.GetTemplateAppInsightsParams{}).Asserts(rbac.ResourceTemplate, policy.ActionViewInsights) - })) - s.Run("GetTemplateAppInsightsByTemplate", s.Subtest(func(db database.Store, check *expects) { - check.Args(database.GetTemplateAppInsightsByTemplateParams{}).Asserts(rbac.ResourceTemplate, policy.ActionViewInsights) - })) - s.Run("GetTemplateUsageStats", s.Subtest(func(db database.Store, check *expects) { - check.Args(database.GetTemplateUsageStatsParams{}).Asserts(rbac.ResourceTemplate, policy.ActionViewInsights). - ErrorsWithInMemDB(sql.ErrNoRows). - Returns([]database.TemplateUsageStat{}) - })) - s.Run("UpsertTemplateUsageStats", s.Subtest(func(db database.Store, check *expects) { + s.Run("UpdateTemplateACLByID", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) { + t1 := testutil.Fake(s.T(), faker, database.Template{}) + arg := database.UpdateTemplateACLByIDParams{ID: t1.ID} + dbm.EXPECT().GetTemplateByID(gomock.Any(), t1.ID).Return(t1, nil).AnyTimes() + dbm.EXPECT().UpdateTemplateACLByID(gomock.Any(), arg).Return(nil).AnyTimes() + check.Args(arg).Asserts(t1, policy.ActionCreate) + })) + s.Run("UpdateTemplateAccessControlByID", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) { + t1 := testutil.Fake(s.T(), faker, database.Template{}) + arg := database.UpdateTemplateAccessControlByIDParams{ID: t1.ID} + dbm.EXPECT().GetTemplateByID(gomock.Any(), t1.ID).Return(t1, nil).AnyTimes() + dbm.EXPECT().UpdateTemplateAccessControlByID(gomock.Any(), arg).Return(nil).AnyTimes() + check.Args(arg).Asserts(t1, policy.ActionUpdate) + })) + s.Run("UpdateTemplateScheduleByID", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) { + t1 := testutil.Fake(s.T(), faker, database.Template{}) + arg := database.UpdateTemplateScheduleByIDParams{ID: t1.ID} + dbm.EXPECT().GetTemplateByID(gomock.Any(), t1.ID).Return(t1, nil).AnyTimes() + dbm.EXPECT().UpdateTemplateScheduleByID(gomock.Any(), arg).Return(nil).AnyTimes() + check.Args(arg).Asserts(t1, policy.ActionUpdate) + })) + s.Run("UpdateTemplateVersionFlagsByJobID", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) { + t := testutil.Fake(s.T(), faker, database.Template{}) + tv := testutil.Fake(s.T(), faker, database.TemplateVersion{TemplateID: uuid.NullUUID{UUID: t.ID, Valid: true}}) + arg := database.UpdateTemplateVersionFlagsByJobIDParams{JobID: tv.JobID, HasAITask: sql.NullBool{Bool: true, Valid: true}, HasExternalAgent: sql.NullBool{Bool: true, Valid: true}} + dbm.EXPECT().GetTemplateVersionByJobID(gomock.Any(), tv.JobID).Return(tv, nil).AnyTimes() + dbm.EXPECT().GetTemplateByID(gomock.Any(), t.ID).Return(t, nil).AnyTimes() + dbm.EXPECT().UpdateTemplateVersionFlagsByJobID(gomock.Any(), arg).Return(nil).AnyTimes() + check.Args(arg).Asserts(t, policy.ActionUpdate) + })) + s.Run("UpdateTemplateWorkspacesLastUsedAt", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) { + t1 := testutil.Fake(s.T(), faker, database.Template{}) + arg := database.UpdateTemplateWorkspacesLastUsedAtParams{TemplateID: t1.ID} + dbm.EXPECT().GetTemplateByID(gomock.Any(), t1.ID).Return(t1, nil).AnyTimes() + dbm.EXPECT().UpdateTemplateWorkspacesLastUsedAt(gomock.Any(), arg).Return(nil).AnyTimes() + check.Args(arg).Asserts(t1, policy.ActionUpdate) + })) + s.Run("UpdateWorkspacesDormantDeletingAtByTemplateID", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) { + t1 := testutil.Fake(s.T(), faker, database.Template{}) + arg := database.UpdateWorkspacesDormantDeletingAtByTemplateIDParams{TemplateID: t1.ID} + dbm.EXPECT().GetTemplateByID(gomock.Any(), t1.ID).Return(t1, nil).AnyTimes() + dbm.EXPECT().UpdateWorkspacesDormantDeletingAtByTemplateID(gomock.Any(), arg).Return([]database.WorkspaceTable{}, nil).AnyTimes() + check.Args(arg).Asserts(t1, policy.ActionUpdate) + })) + s.Run("UpdateWorkspacesTTLByTemplateID", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) { + t1 := testutil.Fake(s.T(), faker, database.Template{}) + arg := database.UpdateWorkspacesTTLByTemplateIDParams{TemplateID: t1.ID} + dbm.EXPECT().GetTemplateByID(gomock.Any(), t1.ID).Return(t1, nil).AnyTimes() + dbm.EXPECT().UpdateWorkspacesTTLByTemplateID(gomock.Any(), arg).Return(nil).AnyTimes() + check.Args(arg).Asserts(t1, policy.ActionUpdate) + })) + s.Run("UpdateTemplateActiveVersionByID", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) { + t1 := testutil.Fake(s.T(), faker, database.Template{ActiveVersionID: uuid.New()}) + tv := testutil.Fake(s.T(), faker, database.TemplateVersion{ID: t1.ActiveVersionID, TemplateID: uuid.NullUUID{UUID: t1.ID, Valid: true}}) + arg := database.UpdateTemplateActiveVersionByIDParams{ID: t1.ID, ActiveVersionID: tv.ID} + dbm.EXPECT().GetTemplateByID(gomock.Any(), t1.ID).Return(t1, nil).AnyTimes() + dbm.EXPECT().UpdateTemplateActiveVersionByID(gomock.Any(), arg).Return(nil).AnyTimes() + check.Args(arg).Asserts(t1, policy.ActionUpdate).Returns() + })) + s.Run("UpdateTemplateDeletedByID", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) { + t1 := testutil.Fake(s.T(), faker, database.Template{}) + arg := database.UpdateTemplateDeletedByIDParams{ID: t1.ID, Deleted: true} + // The method delegates to SoftDeleteTemplateByID, which fetches then updates. + dbm.EXPECT().GetTemplateByID(gomock.Any(), t1.ID).Return(t1, nil).AnyTimes() + dbm.EXPECT().UpdateTemplateDeletedByID(gomock.Any(), gomock.AssignableToTypeOf(database.UpdateTemplateDeletedByIDParams{})).Return(nil).AnyTimes() + check.Args(arg).Asserts(t1, policy.ActionDelete).Returns() + })) + s.Run("UpdateTemplateMetaByID", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) { + t1 := testutil.Fake(s.T(), faker, database.Template{}) + arg := database.UpdateTemplateMetaByIDParams{ID: t1.ID, MaxPortSharingLevel: "owner", CorsBehavior: database.CorsBehaviorSimple} + dbm.EXPECT().GetTemplateByID(gomock.Any(), t1.ID).Return(t1, nil).AnyTimes() + dbm.EXPECT().UpdateTemplateMetaByID(gomock.Any(), arg).Return(nil).AnyTimes() + check.Args(arg).Asserts(t1, policy.ActionUpdate) + })) + s.Run("UpdateTemplateVersionByID", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) { + t1 := testutil.Fake(s.T(), faker, database.Template{}) + tv := testutil.Fake(s.T(), faker, database.TemplateVersion{TemplateID: uuid.NullUUID{UUID: t1.ID, Valid: true}}) + arg := database.UpdateTemplateVersionByIDParams{ID: tv.ID, TemplateID: uuid.NullUUID{UUID: t1.ID, Valid: true}, Name: tv.Name, UpdatedAt: tv.UpdatedAt} + dbm.EXPECT().GetTemplateVersionByID(gomock.Any(), tv.ID).Return(tv, nil).AnyTimes() + dbm.EXPECT().GetTemplateByID(gomock.Any(), t1.ID).Return(t1, nil).AnyTimes() + dbm.EXPECT().UpdateTemplateVersionByID(gomock.Any(), arg).Return(nil).AnyTimes() + check.Args(arg).Asserts(t1, policy.ActionUpdate) + })) + s.Run("UpdateTemplateVersionDescriptionByJobID", s.Mocked(func(dbm *dbmock.MockStore, _ *gofakeit.Faker, check *expects) { + tv := database.TemplateVersion{ID: uuid.New(), JobID: uuid.New(), TemplateID: uuid.NullUUID{UUID: uuid.New(), Valid: true}} + t1 := database.Template{ID: tv.TemplateID.UUID} + arg := database.UpdateTemplateVersionDescriptionByJobIDParams{JobID: tv.JobID, Readme: "foo"} + dbm.EXPECT().GetTemplateVersionByJobID(gomock.Any(), tv.JobID).Return(tv, nil).AnyTimes() + dbm.EXPECT().GetTemplateByID(gomock.Any(), t1.ID).Return(t1, nil).AnyTimes() + dbm.EXPECT().UpdateTemplateVersionDescriptionByJobID(gomock.Any(), arg).Return(nil).AnyTimes() + check.Args(arg).Asserts(t1, policy.ActionUpdate).Returns() + })) + s.Run("UpdateTemplateVersionExternalAuthProvidersByJobID", s.Mocked(func(dbm *dbmock.MockStore, _ *gofakeit.Faker, check *expects) { + tv := database.TemplateVersion{ID: uuid.New(), JobID: uuid.New(), TemplateID: uuid.NullUUID{UUID: uuid.New(), Valid: true}} + t1 := database.Template{ID: tv.TemplateID.UUID} + arg := database.UpdateTemplateVersionExternalAuthProvidersByJobIDParams{JobID: tv.JobID, ExternalAuthProviders: json.RawMessage("{}")} + dbm.EXPECT().GetTemplateVersionByJobID(gomock.Any(), tv.JobID).Return(tv, nil).AnyTimes() + dbm.EXPECT().GetTemplateByID(gomock.Any(), t1.ID).Return(t1, nil).AnyTimes() + dbm.EXPECT().UpdateTemplateVersionExternalAuthProvidersByJobID(gomock.Any(), arg).Return(nil).AnyTimes() + check.Args(arg).Asserts(t1, policy.ActionUpdate).Returns() + })) + s.Run("GetTemplateInsights", s.Mocked(func(dbm *dbmock.MockStore, _ *gofakeit.Faker, check *expects) { + arg := database.GetTemplateInsightsParams{} + dbm.EXPECT().GetTemplateInsights(gomock.Any(), arg).Return(database.GetTemplateInsightsRow{}, nil).AnyTimes() + check.Args(arg).Asserts(rbac.ResourceTemplate, policy.ActionViewInsights) + })) + s.Run("GetUserLatencyInsights", s.Mocked(func(dbm *dbmock.MockStore, _ *gofakeit.Faker, check *expects) { + arg := database.GetUserLatencyInsightsParams{} + dbm.EXPECT().GetUserLatencyInsights(gomock.Any(), arg).Return([]database.GetUserLatencyInsightsRow{}, nil).AnyTimes() + check.Args(arg).Asserts(rbac.ResourceTemplate, policy.ActionViewInsights) + })) + s.Run("GetUserActivityInsights", s.Mocked(func(dbm *dbmock.MockStore, _ *gofakeit.Faker, check *expects) { + arg := database.GetUserActivityInsightsParams{} + dbm.EXPECT().GetUserActivityInsights(gomock.Any(), arg).Return([]database.GetUserActivityInsightsRow{}, nil).AnyTimes() + check.Args(arg).Asserts(rbac.ResourceTemplate, policy.ActionViewInsights).Returns([]database.GetUserActivityInsightsRow{}) + })) + s.Run("GetTemplateParameterInsights", s.Mocked(func(dbm *dbmock.MockStore, _ *gofakeit.Faker, check *expects) { + arg := database.GetTemplateParameterInsightsParams{} + dbm.EXPECT().GetTemplateParameterInsights(gomock.Any(), arg).Return([]database.GetTemplateParameterInsightsRow{}, nil).AnyTimes() + check.Args(arg).Asserts(rbac.ResourceTemplate, policy.ActionViewInsights) + })) + s.Run("GetTemplateInsightsByInterval", s.Mocked(func(dbm *dbmock.MockStore, _ *gofakeit.Faker, check *expects) { + arg := database.GetTemplateInsightsByIntervalParams{IntervalDays: 7, StartTime: dbtime.Now().Add(-time.Hour * 24 * 7), EndTime: dbtime.Now()} + dbm.EXPECT().GetTemplateInsightsByInterval(gomock.Any(), arg).Return([]database.GetTemplateInsightsByIntervalRow{}, nil).AnyTimes() + check.Args(arg).Asserts(rbac.ResourceTemplate, policy.ActionViewInsights) + })) + s.Run("GetTemplateInsightsByTemplate", s.Mocked(func(dbm *dbmock.MockStore, _ *gofakeit.Faker, check *expects) { + arg := database.GetTemplateInsightsByTemplateParams{} + dbm.EXPECT().GetTemplateInsightsByTemplate(gomock.Any(), arg).Return([]database.GetTemplateInsightsByTemplateRow{}, nil).AnyTimes() + check.Args(arg).Asserts(rbac.ResourceTemplate, policy.ActionViewInsights) + })) + s.Run("GetTemplateAppInsights", s.Mocked(func(dbm *dbmock.MockStore, _ *gofakeit.Faker, check *expects) { + arg := database.GetTemplateAppInsightsParams{} + dbm.EXPECT().GetTemplateAppInsights(gomock.Any(), arg).Return([]database.GetTemplateAppInsightsRow{}, nil).AnyTimes() + check.Args(arg).Asserts(rbac.ResourceTemplate, policy.ActionViewInsights) + })) + s.Run("GetTemplateAppInsightsByTemplate", s.Mocked(func(dbm *dbmock.MockStore, _ *gofakeit.Faker, check *expects) { + arg := database.GetTemplateAppInsightsByTemplateParams{} + dbm.EXPECT().GetTemplateAppInsightsByTemplate(gomock.Any(), arg).Return([]database.GetTemplateAppInsightsByTemplateRow{}, nil).AnyTimes() + check.Args(arg).Asserts(rbac.ResourceTemplate, policy.ActionViewInsights) + })) + s.Run("GetTemplateUsageStats", s.Mocked(func(dbm *dbmock.MockStore, _ *gofakeit.Faker, check *expects) { + arg := database.GetTemplateUsageStatsParams{} + dbm.EXPECT().GetTemplateUsageStats(gomock.Any(), arg).Return([]database.TemplateUsageStat{}, nil).AnyTimes() + check.Args(arg).Asserts(rbac.ResourceTemplate, policy.ActionViewInsights).Returns([]database.TemplateUsageStat{}) + })) + s.Run("UpsertTemplateUsageStats", s.Mocked(func(dbm *dbmock.MockStore, _ *gofakeit.Faker, check *expects) { + dbm.EXPECT().UpsertTemplateUsageStats(gomock.Any()).Return(nil).AnyTimes() check.Asserts(rbac.ResourceSystem, policy.ActionUpdate) })) } func (s *MethodTestSuite) TestUser() { - s.Run("GetAuthorizedUsers", s.Subtest(func(db database.Store, check *expects) { - dbtestutil.DisableForeignKeysAndTriggers(s.T(), db) - dbgen.User(s.T(), db, database.User{}) + s.Run("GetAuthorizedUsers", s.Mocked(func(dbm *dbmock.MockStore, _ *gofakeit.Faker, check *expects) { + arg := database.GetUsersParams{} + dbm.EXPECT().GetAuthorizedUsers(gomock.Any(), arg, gomock.Any()).Return([]database.GetUsersRow{}, nil).AnyTimes() // No asserts because SQLFilter. - check.Args(database.GetUsersParams{}, emptyPreparedAuthorized{}). - Asserts() - })) - s.Run("DeleteAPIKeysByUserID", s.Subtest(func(db database.Store, check *expects) { - u := dbgen.User(s.T(), db, database.User{}) - check.Args(u.ID).Asserts(rbac.ResourceApiKey.WithOwner(u.ID.String()), policy.ActionDelete).Returns() - })) - s.Run("GetQuotaAllowanceForUser", s.Subtest(func(db database.Store, check *expects) { - u := dbgen.User(s.T(), db, database.User{}) - check.Args(database.GetQuotaAllowanceForUserParams{ - UserID: u.ID, - OrganizationID: uuid.New(), - }).Asserts(u, policy.ActionRead).Returns(int64(0)) - })) - s.Run("GetQuotaConsumedForUser", s.Subtest(func(db database.Store, check *expects) { - u := dbgen.User(s.T(), db, database.User{}) - check.Args(database.GetQuotaConsumedForUserParams{ - OwnerID: u.ID, - OrganizationID: uuid.New(), - }).Asserts(u, policy.ActionRead).Returns(int64(0)) - })) - s.Run("GetUserByEmailOrUsername", s.Subtest(func(db database.Store, check *expects) { - u := dbgen.User(s.T(), db, database.User{}) - check.Args(database.GetUserByEmailOrUsernameParams{ - Username: u.Username, - Email: u.Email, - }).Asserts(u, policy.ActionRead).Returns(u) - })) - s.Run("GetUserByID", s.Subtest(func(db database.Store, check *expects) { - u := dbgen.User(s.T(), db, database.User{}) + check.Args(arg, emptyPreparedAuthorized{}).Asserts() + })) + s.Run("DeleteAPIKeysByUserID", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) { + key := testutil.Fake(s.T(), faker, database.APIKey{}) + dbm.EXPECT().DeleteAPIKeysByUserID(gomock.Any(), key.UserID).Return(nil).AnyTimes() + check.Args(key.UserID).Asserts(rbac.ResourceApiKey.WithOwner(key.UserID.String()), policy.ActionDelete).Returns() + })) + s.Run("GetQuotaAllowanceForUser", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) { + u := testutil.Fake(s.T(), faker, database.User{}) + arg := database.GetQuotaAllowanceForUserParams{UserID: u.ID, OrganizationID: uuid.New()} + dbm.EXPECT().GetQuotaAllowanceForUser(gomock.Any(), arg).Return(int64(0), nil).AnyTimes() + check.Args(arg).Asserts(u, policy.ActionRead).Returns(int64(0)) + })) + s.Run("GetQuotaConsumedForUser", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) { + u := testutil.Fake(s.T(), faker, database.User{}) + arg := database.GetQuotaConsumedForUserParams{OwnerID: u.ID, OrganizationID: uuid.New()} + dbm.EXPECT().GetQuotaConsumedForUser(gomock.Any(), arg).Return(int64(0), nil).AnyTimes() + check.Args(arg).Asserts(u, policy.ActionRead).Returns(int64(0)) + })) + s.Run("GetUserByEmailOrUsername", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) { + u := testutil.Fake(s.T(), faker, database.User{}) + arg := database.GetUserByEmailOrUsernameParams{Email: u.Email} + dbm.EXPECT().GetUserByEmailOrUsername(gomock.Any(), arg).Return(u, nil).AnyTimes() + check.Args(arg).Asserts(u, policy.ActionRead).Returns(u) + })) + s.Run("GetUserByID", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) { + u := testutil.Fake(s.T(), faker, database.User{}) + dbm.EXPECT().GetUserByID(gomock.Any(), u.ID).Return(u, nil).AnyTimes() check.Args(u.ID).Asserts(u, policy.ActionRead).Returns(u) })) - s.Run("GetUsersByIDs", s.Subtest(func(db database.Store, check *expects) { - a := dbgen.User(s.T(), db, database.User{CreatedAt: dbtime.Now().Add(-time.Hour)}) - b := dbgen.User(s.T(), db, database.User{CreatedAt: dbtime.Now()}) - check.Args([]uuid.UUID{a.ID, b.ID}). - Asserts(a, policy.ActionRead, b, policy.ActionRead). - Returns(slice.New(a, b)) - })) - s.Run("GetUsers", s.Subtest(func(db database.Store, check *expects) { - dbtestutil.DisableForeignKeysAndTriggers(s.T(), db) - dbgen.User(s.T(), db, database.User{Username: "GetUsers-a-user"}) - dbgen.User(s.T(), db, database.User{Username: "GetUsers-b-user"}) - check.Args(database.GetUsersParams{}). - // Asserts are done in a SQL filter - Asserts() - })) - s.Run("InsertUser", s.Subtest(func(db database.Store, check *expects) { - check.Args(database.InsertUserParams{ - ID: uuid.New(), - LoginType: database.LoginTypePassword, - RBACRoles: []string{}, - }).Asserts(rbac.ResourceAssignRole, policy.ActionAssign, rbac.ResourceUser, policy.ActionCreate) - })) - s.Run("InsertUserLink", s.Subtest(func(db database.Store, check *expects) { - u := dbgen.User(s.T(), db, database.User{}) - check.Args(database.InsertUserLinkParams{ - UserID: u.ID, - LoginType: database.LoginTypeOIDC, - }).Asserts(u, policy.ActionUpdate) - })) - s.Run("UpdateUserDeletedByID", s.Subtest(func(db database.Store, check *expects) { - u := dbgen.User(s.T(), db, database.User{}) + s.Run("GetUsersByIDs", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) { + a := testutil.Fake(s.T(), faker, database.User{CreatedAt: dbtime.Now().Add(-time.Hour)}) + b := testutil.Fake(s.T(), faker, database.User{CreatedAt: dbtime.Now()}) + ids := []uuid.UUID{a.ID, b.ID} + dbm.EXPECT().GetUsersByIDs(gomock.Any(), ids).Return([]database.User{a, b}, nil).AnyTimes() + check.Args(ids).Asserts(a, policy.ActionRead, b, policy.ActionRead).Returns(slice.New(a, b)) + })) + s.Run("GetUsers", s.Mocked(func(dbm *dbmock.MockStore, _ *gofakeit.Faker, check *expects) { + arg := database.GetUsersParams{} + dbm.EXPECT().GetAuthorizedUsers(gomock.Any(), arg, gomock.Any()).Return([]database.GetUsersRow{}, nil).AnyTimes() + // Asserts are done in a SQL filter + check.Args(arg).Asserts() + })) + s.Run("InsertUser", s.Mocked(func(dbm *dbmock.MockStore, _ *gofakeit.Faker, check *expects) { + arg := database.InsertUserParams{ID: uuid.New(), LoginType: database.LoginTypePassword, RBACRoles: []string{}} + dbm.EXPECT().InsertUser(gomock.Any(), arg).Return(database.User{ID: arg.ID, LoginType: arg.LoginType}, nil).AnyTimes() + check.Args(arg).Asserts(rbac.ResourceAssignRole, policy.ActionAssign, rbac.ResourceUser, policy.ActionCreate) + })) + s.Run("InsertUserLink", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) { + u := testutil.Fake(s.T(), faker, database.User{}) + arg := database.InsertUserLinkParams{UserID: u.ID, LoginType: database.LoginTypeOIDC} + dbm.EXPECT().InsertUserLink(gomock.Any(), arg).Return(database.UserLink{}, nil).AnyTimes() + check.Args(arg).Asserts(u, policy.ActionUpdate) + })) + s.Run("UpdateUserDeletedByID", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) { + u := testutil.Fake(s.T(), faker, database.User{}) + dbm.EXPECT().GetUserByID(gomock.Any(), u.ID).Return(u, nil).AnyTimes() + dbm.EXPECT().UpdateUserDeletedByID(gomock.Any(), u.ID).Return(nil).AnyTimes() check.Args(u.ID).Asserts(u, policy.ActionDelete).Returns() })) - s.Run("UpdateUserGithubComUserID", s.Subtest(func(db database.Store, check *expects) { - u := dbgen.User(s.T(), db, database.User{}) - check.Args(database.UpdateUserGithubComUserIDParams{ - ID: u.ID, - }).Asserts(u, policy.ActionUpdatePersonal) - })) - s.Run("UpdateUserHashedPassword", s.Subtest(func(db database.Store, check *expects) { - u := dbgen.User(s.T(), db, database.User{}) - check.Args(database.UpdateUserHashedPasswordParams{ - ID: u.ID, - }).Asserts(u, policy.ActionUpdatePersonal).Returns() - })) - s.Run("UpdateUserHashedOneTimePasscode", s.Subtest(func(db database.Store, check *expects) { - u := dbgen.User(s.T(), db, database.User{}) - check.Args(database.UpdateUserHashedOneTimePasscodeParams{ - ID: u.ID, - HashedOneTimePasscode: []byte{}, - OneTimePasscodeExpiresAt: sql.NullTime{Time: u.CreatedAt, Valid: true}, - }).Asserts(rbac.ResourceSystem, policy.ActionUpdate).Returns() - })) - s.Run("UpdateUserQuietHoursSchedule", s.Subtest(func(db database.Store, check *expects) { - u := dbgen.User(s.T(), db, database.User{}) - check.Args(database.UpdateUserQuietHoursScheduleParams{ - ID: u.ID, - }).Asserts(u, policy.ActionUpdatePersonal) - })) - s.Run("UpdateUserLastSeenAt", s.Subtest(func(db database.Store, check *expects) { - u := dbgen.User(s.T(), db, database.User{}) - check.Args(database.UpdateUserLastSeenAtParams{ - ID: u.ID, - UpdatedAt: u.UpdatedAt, - LastSeenAt: u.LastSeenAt, - }).Asserts(u, policy.ActionUpdate).Returns(u) - })) - s.Run("UpdateUserProfile", s.Subtest(func(db database.Store, check *expects) { - u := dbgen.User(s.T(), db, database.User{}) - check.Args(database.UpdateUserProfileParams{ - ID: u.ID, - Email: u.Email, - Username: u.Username, - Name: u.Name, - UpdatedAt: u.UpdatedAt, - }).Asserts(u, policy.ActionUpdatePersonal).Returns(u) - })) - s.Run("GetUserWorkspaceBuildParameters", s.Subtest(func(db database.Store, check *expects) { - u := dbgen.User(s.T(), db, database.User{}) - check.Args( - database.GetUserWorkspaceBuildParametersParams{ - OwnerID: u.ID, - TemplateID: uuid.UUID{}, - }, - ).Asserts(u, policy.ActionReadPersonal).Returns( - []database.GetUserWorkspaceBuildParametersRow{}, - ) - })) - s.Run("GetUserThemePreference", s.Subtest(func(db database.Store, check *expects) { - ctx := context.Background() - u := dbgen.User(s.T(), db, database.User{}) - db.UpdateUserThemePreference(ctx, database.UpdateUserThemePreferenceParams{ - UserID: u.ID, - ThemePreference: "light", - }) + s.Run("UpdateUserGithubComUserID", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) { + u := testutil.Fake(s.T(), faker, database.User{}) + arg := database.UpdateUserGithubComUserIDParams{ID: u.ID} + dbm.EXPECT().GetUserByID(gomock.Any(), u.ID).Return(u, nil).AnyTimes() + dbm.EXPECT().UpdateUserGithubComUserID(gomock.Any(), arg).Return(nil).AnyTimes() + check.Args(arg).Asserts(u, policy.ActionUpdatePersonal) + })) + s.Run("UpdateUserHashedPassword", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) { + u := testutil.Fake(s.T(), faker, database.User{}) + arg := database.UpdateUserHashedPasswordParams{ID: u.ID} + dbm.EXPECT().GetUserByID(gomock.Any(), u.ID).Return(u, nil).AnyTimes() + dbm.EXPECT().UpdateUserHashedPassword(gomock.Any(), arg).Return(nil).AnyTimes() + check.Args(arg).Asserts(u, policy.ActionUpdatePersonal).Returns() + })) + s.Run("UpdateUserHashedOneTimePasscode", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) { + u := testutil.Fake(s.T(), faker, database.User{}) + arg := database.UpdateUserHashedOneTimePasscodeParams{ID: u.ID} + dbm.EXPECT().UpdateUserHashedOneTimePasscode(gomock.Any(), arg).Return(nil).AnyTimes() + check.Args(arg).Asserts(rbac.ResourceSystem, policy.ActionUpdate).Returns() + })) + s.Run("UpdateUserQuietHoursSchedule", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) { + u := testutil.Fake(s.T(), faker, database.User{}) + arg := database.UpdateUserQuietHoursScheduleParams{ID: u.ID} + dbm.EXPECT().GetUserByID(gomock.Any(), u.ID).Return(u, nil).AnyTimes() + dbm.EXPECT().UpdateUserQuietHoursSchedule(gomock.Any(), arg).Return(database.User{}, nil).AnyTimes() + check.Args(arg).Asserts(u, policy.ActionUpdatePersonal) + })) + s.Run("UpdateUserLastSeenAt", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) { + u := testutil.Fake(s.T(), faker, database.User{}) + arg := database.UpdateUserLastSeenAtParams{ID: u.ID, UpdatedAt: u.UpdatedAt, LastSeenAt: u.LastSeenAt} + dbm.EXPECT().GetUserByID(gomock.Any(), u.ID).Return(u, nil).AnyTimes() + dbm.EXPECT().UpdateUserLastSeenAt(gomock.Any(), arg).Return(u, nil).AnyTimes() + check.Args(arg).Asserts(u, policy.ActionUpdate).Returns(u) + })) + s.Run("UpdateUserProfile", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) { + u := testutil.Fake(s.T(), faker, database.User{}) + arg := database.UpdateUserProfileParams{ID: u.ID, Email: u.Email, Username: u.Username, Name: u.Name, UpdatedAt: u.UpdatedAt} + dbm.EXPECT().GetUserByID(gomock.Any(), u.ID).Return(u, nil).AnyTimes() + dbm.EXPECT().UpdateUserProfile(gomock.Any(), arg).Return(u, nil).AnyTimes() + check.Args(arg).Asserts(u, policy.ActionUpdatePersonal).Returns(u) + })) + s.Run("GetUserWorkspaceBuildParameters", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) { + u := testutil.Fake(s.T(), faker, database.User{}) + arg := database.GetUserWorkspaceBuildParametersParams{OwnerID: u.ID, TemplateID: uuid.Nil} + dbm.EXPECT().GetUserByID(gomock.Any(), u.ID).Return(u, nil).AnyTimes() + dbm.EXPECT().GetUserWorkspaceBuildParameters(gomock.Any(), arg).Return([]database.GetUserWorkspaceBuildParametersRow{}, nil).AnyTimes() + check.Args(arg).Asserts(u, policy.ActionReadPersonal).Returns([]database.GetUserWorkspaceBuildParametersRow{}) + })) + s.Run("GetUserThemePreference", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) { + u := testutil.Fake(s.T(), faker, database.User{}) + dbm.EXPECT().GetUserByID(gomock.Any(), u.ID).Return(u, nil).AnyTimes() + dbm.EXPECT().GetUserThemePreference(gomock.Any(), u.ID).Return("light", nil).AnyTimes() check.Args(u.ID).Asserts(u, policy.ActionReadPersonal).Returns("light") })) - s.Run("UpdateUserThemePreference", s.Subtest(func(db database.Store, check *expects) { - u := dbgen.User(s.T(), db, database.User{}) - uc := database.UserConfig{ - UserID: u.ID, - Key: "theme_preference", - Value: "dark", - } - check.Args(database.UpdateUserThemePreferenceParams{ - UserID: u.ID, - ThemePreference: uc.Value, - }).Asserts(u, policy.ActionUpdatePersonal).Returns(uc) - })) - s.Run("GetUserTerminalFont", s.Subtest(func(db database.Store, check *expects) { - ctx := context.Background() - u := dbgen.User(s.T(), db, database.User{}) - db.UpdateUserTerminalFont(ctx, database.UpdateUserTerminalFontParams{ - UserID: u.ID, - TerminalFont: "ibm-plex-mono", - }) + s.Run("UpdateUserThemePreference", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) { + u := testutil.Fake(s.T(), faker, database.User{}) + uc := database.UserConfig{UserID: u.ID, Key: "theme_preference", Value: "dark"} + arg := database.UpdateUserThemePreferenceParams{UserID: u.ID, ThemePreference: uc.Value} + dbm.EXPECT().GetUserByID(gomock.Any(), u.ID).Return(u, nil).AnyTimes() + dbm.EXPECT().UpdateUserThemePreference(gomock.Any(), arg).Return(uc, nil).AnyTimes() + check.Args(arg).Asserts(u, policy.ActionUpdatePersonal).Returns(uc) + })) + s.Run("GetUserTerminalFont", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) { + u := testutil.Fake(s.T(), faker, database.User{}) + dbm.EXPECT().GetUserByID(gomock.Any(), u.ID).Return(u, nil).AnyTimes() + dbm.EXPECT().GetUserTerminalFont(gomock.Any(), u.ID).Return("ibm-plex-mono", nil).AnyTimes() check.Args(u.ID).Asserts(u, policy.ActionReadPersonal).Returns("ibm-plex-mono") })) - s.Run("UpdateUserTerminalFont", s.Subtest(func(db database.Store, check *expects) { - u := dbgen.User(s.T(), db, database.User{}) - uc := database.UserConfig{ - UserID: u.ID, - Key: "terminal_font", - Value: "ibm-plex-mono", - } - check.Args(database.UpdateUserTerminalFontParams{ - UserID: u.ID, - TerminalFont: uc.Value, - }).Asserts(u, policy.ActionUpdatePersonal).Returns(uc) - })) - s.Run("UpdateUserStatus", s.Subtest(func(db database.Store, check *expects) { - u := dbgen.User(s.T(), db, database.User{}) - check.Args(database.UpdateUserStatusParams{ - ID: u.ID, - Status: u.Status, - UpdatedAt: u.UpdatedAt, - }).Asserts(u, policy.ActionUpdate).Returns(u) - })) - s.Run("DeleteGitSSHKey", s.Subtest(func(db database.Store, check *expects) { - dbtestutil.DisableForeignKeysAndTriggers(s.T(), db) - key := dbgen.GitSSHKey(s.T(), db, database.GitSSHKey{}) + s.Run("UpdateUserTerminalFont", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) { + u := testutil.Fake(s.T(), faker, database.User{}) + uc := database.UserConfig{UserID: u.ID, Key: "terminal_font", Value: "ibm-plex-mono"} + arg := database.UpdateUserTerminalFontParams{UserID: u.ID, TerminalFont: uc.Value} + dbm.EXPECT().GetUserByID(gomock.Any(), u.ID).Return(u, nil).AnyTimes() + dbm.EXPECT().UpdateUserTerminalFont(gomock.Any(), arg).Return(uc, nil).AnyTimes() + check.Args(arg).Asserts(u, policy.ActionUpdatePersonal).Returns(uc) + })) + s.Run("UpdateUserStatus", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) { + u := testutil.Fake(s.T(), faker, database.User{}) + arg := database.UpdateUserStatusParams{ID: u.ID, Status: u.Status, UpdatedAt: u.UpdatedAt} + dbm.EXPECT().GetUserByID(gomock.Any(), u.ID).Return(u, nil).AnyTimes() + dbm.EXPECT().UpdateUserStatus(gomock.Any(), arg).Return(u, nil).AnyTimes() + check.Args(arg).Asserts(u, policy.ActionUpdate).Returns(u) + })) + s.Run("DeleteGitSSHKey", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) { + key := testutil.Fake(s.T(), faker, database.GitSSHKey{}) + dbm.EXPECT().GetGitSSHKey(gomock.Any(), key.UserID).Return(key, nil).AnyTimes() + dbm.EXPECT().DeleteGitSSHKey(gomock.Any(), key.UserID).Return(nil).AnyTimes() check.Args(key.UserID).Asserts(rbac.ResourceUserObject(key.UserID), policy.ActionUpdatePersonal).Returns() })) - s.Run("GetGitSSHKey", s.Subtest(func(db database.Store, check *expects) { - dbtestutil.DisableForeignKeysAndTriggers(s.T(), db) - key := dbgen.GitSSHKey(s.T(), db, database.GitSSHKey{}) + s.Run("GetGitSSHKey", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) { + key := testutil.Fake(s.T(), faker, database.GitSSHKey{}) + dbm.EXPECT().GetGitSSHKey(gomock.Any(), key.UserID).Return(key, nil).AnyTimes() check.Args(key.UserID).Asserts(rbac.ResourceUserObject(key.UserID), policy.ActionReadPersonal).Returns(key) })) - s.Run("InsertGitSSHKey", s.Subtest(func(db database.Store, check *expects) { - u := dbgen.User(s.T(), db, database.User{}) - check.Args(database.InsertGitSSHKeyParams{ - UserID: u.ID, - }).Asserts(u, policy.ActionUpdatePersonal) - })) - s.Run("UpdateGitSSHKey", s.Subtest(func(db database.Store, check *expects) { - dbtestutil.DisableForeignKeysAndTriggers(s.T(), db) - key := dbgen.GitSSHKey(s.T(), db, database.GitSSHKey{}) - check.Args(database.UpdateGitSSHKeyParams{ - UserID: key.UserID, - UpdatedAt: key.UpdatedAt, - }).Asserts(rbac.ResourceUserObject(key.UserID), policy.ActionUpdatePersonal).Returns(key) - })) - s.Run("GetExternalAuthLink", s.Subtest(func(db database.Store, check *expects) { - link := dbgen.ExternalAuthLink(s.T(), db, database.ExternalAuthLink{}) - check.Args(database.GetExternalAuthLinkParams{ - ProviderID: link.ProviderID, - UserID: link.UserID, - }).Asserts(rbac.ResourceUserObject(link.UserID), policy.ActionReadPersonal).Returns(link) - })) - s.Run("InsertExternalAuthLink", s.Subtest(func(db database.Store, check *expects) { - u := dbgen.User(s.T(), db, database.User{}) - check.Args(database.InsertExternalAuthLinkParams{ - ProviderID: uuid.NewString(), - UserID: u.ID, - }).Asserts(u, policy.ActionUpdatePersonal) - })) - s.Run("UpdateExternalAuthLinkRefreshToken", s.Subtest(func(db database.Store, check *expects) { - link := dbgen.ExternalAuthLink(s.T(), db, database.ExternalAuthLink{}) - check.Args(database.UpdateExternalAuthLinkRefreshTokenParams{ - OAuthRefreshToken: "", - OAuthRefreshTokenKeyID: "", - ProviderID: link.ProviderID, - UserID: link.UserID, - UpdatedAt: link.UpdatedAt, - }).Asserts(rbac.ResourceUserObject(link.UserID), policy.ActionUpdatePersonal) - })) - s.Run("UpdateExternalAuthLink", s.Subtest(func(db database.Store, check *expects) { - link := dbgen.ExternalAuthLink(s.T(), db, database.ExternalAuthLink{}) - check.Args(database.UpdateExternalAuthLinkParams{ - ProviderID: link.ProviderID, - UserID: link.UserID, - OAuthAccessToken: link.OAuthAccessToken, - OAuthRefreshToken: link.OAuthRefreshToken, - OAuthExpiry: link.OAuthExpiry, - UpdatedAt: link.UpdatedAt, - }).Asserts(rbac.ResourceUserObject(link.UserID), policy.ActionUpdatePersonal).Returns(link) - })) - s.Run("UpdateUserLink", s.Subtest(func(db database.Store, check *expects) { - dbtestutil.DisableForeignKeysAndTriggers(s.T(), db) - link := dbgen.UserLink(s.T(), db, database.UserLink{}) - check.Args(database.UpdateUserLinkParams{ - OAuthAccessToken: link.OAuthAccessToken, - OAuthRefreshToken: link.OAuthRefreshToken, - OAuthExpiry: link.OAuthExpiry, - UserID: link.UserID, - LoginType: link.LoginType, - Claims: database.UserLinkClaims{}, - }).Asserts(rbac.ResourceUserObject(link.UserID), policy.ActionUpdatePersonal).Returns(link) - })) - s.Run("UpdateUserRoles", s.Subtest(func(db database.Store, check *expects) { - u := dbgen.User(s.T(), db, database.User{RBACRoles: []string{codersdk.RoleTemplateAdmin}}) + s.Run("InsertGitSSHKey", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) { + u := testutil.Fake(s.T(), faker, database.User{}) + arg := database.InsertGitSSHKeyParams{UserID: u.ID} + dbm.EXPECT().InsertGitSSHKey(gomock.Any(), arg).Return(database.GitSSHKey{UserID: u.ID}, nil).AnyTimes() + check.Args(arg).Asserts(u, policy.ActionUpdatePersonal) + })) + s.Run("UpdateGitSSHKey", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) { + key := testutil.Fake(s.T(), faker, database.GitSSHKey{}) + arg := database.UpdateGitSSHKeyParams{UserID: key.UserID, UpdatedAt: key.UpdatedAt} + dbm.EXPECT().GetGitSSHKey(gomock.Any(), key.UserID).Return(key, nil).AnyTimes() + dbm.EXPECT().UpdateGitSSHKey(gomock.Any(), arg).Return(key, nil).AnyTimes() + check.Args(arg).Asserts(key, policy.ActionUpdatePersonal).Returns(key) + })) + s.Run("GetExternalAuthLink", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) { + link := testutil.Fake(s.T(), faker, database.ExternalAuthLink{}) + arg := database.GetExternalAuthLinkParams{ProviderID: link.ProviderID, UserID: link.UserID} + dbm.EXPECT().GetExternalAuthLink(gomock.Any(), arg).Return(link, nil).AnyTimes() + check.Args(arg).Asserts(link, policy.ActionReadPersonal).Returns(link) + })) + s.Run("InsertExternalAuthLink", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) { + u := testutil.Fake(s.T(), faker, database.User{}) + arg := database.InsertExternalAuthLinkParams{ProviderID: uuid.NewString(), UserID: u.ID} + dbm.EXPECT().InsertExternalAuthLink(gomock.Any(), arg).Return(database.ExternalAuthLink{}, nil).AnyTimes() + check.Args(arg).Asserts(u, policy.ActionUpdatePersonal) + })) + s.Run("UpdateExternalAuthLinkRefreshToken", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) { + link := testutil.Fake(s.T(), faker, database.ExternalAuthLink{}) + arg := database.UpdateExternalAuthLinkRefreshTokenParams{OAuthRefreshToken: "", OAuthRefreshTokenKeyID: "", ProviderID: link.ProviderID, UserID: link.UserID, UpdatedAt: link.UpdatedAt} + dbm.EXPECT().GetExternalAuthLink(gomock.Any(), database.GetExternalAuthLinkParams{ProviderID: link.ProviderID, UserID: link.UserID}).Return(link, nil).AnyTimes() + dbm.EXPECT().UpdateExternalAuthLinkRefreshToken(gomock.Any(), arg).Return(nil).AnyTimes() + check.Args(arg).Asserts(link, policy.ActionUpdatePersonal) + })) + s.Run("UpdateExternalAuthLink", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) { + link := testutil.Fake(s.T(), faker, database.ExternalAuthLink{}) + arg := database.UpdateExternalAuthLinkParams{ProviderID: link.ProviderID, UserID: link.UserID, OAuthAccessToken: link.OAuthAccessToken, OAuthRefreshToken: link.OAuthRefreshToken, OAuthExpiry: link.OAuthExpiry, UpdatedAt: link.UpdatedAt} + dbm.EXPECT().GetExternalAuthLink(gomock.Any(), database.GetExternalAuthLinkParams{ProviderID: link.ProviderID, UserID: link.UserID}).Return(link, nil).AnyTimes() + dbm.EXPECT().UpdateExternalAuthLink(gomock.Any(), arg).Return(link, nil).AnyTimes() + check.Args(arg).Asserts(link, policy.ActionUpdatePersonal).Returns(link) + })) + s.Run("UpdateUserLink", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) { + link := testutil.Fake(s.T(), faker, database.UserLink{}) + arg := database.UpdateUserLinkParams{OAuthAccessToken: link.OAuthAccessToken, OAuthRefreshToken: link.OAuthRefreshToken, OAuthExpiry: link.OAuthExpiry, UserID: link.UserID, LoginType: link.LoginType, Claims: database.UserLinkClaims{}} + dbm.EXPECT().GetUserLinkByUserIDLoginType(gomock.Any(), database.GetUserLinkByUserIDLoginTypeParams{UserID: link.UserID, LoginType: link.LoginType}).Return(link, nil).AnyTimes() + dbm.EXPECT().UpdateUserLink(gomock.Any(), arg).Return(link, nil).AnyTimes() + check.Args(arg).Asserts(link, policy.ActionUpdatePersonal).Returns(link) + })) + s.Run("UpdateUserRoles", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) { + u := testutil.Fake(s.T(), faker, database.User{RBACRoles: []string{codersdk.RoleTemplateAdmin}}) o := u o.RBACRoles = []string{codersdk.RoleUserAdmin} - check.Args(database.UpdateUserRolesParams{ - GrantedRoles: []string{codersdk.RoleUserAdmin}, - ID: u.ID, - }).Asserts( + arg := database.UpdateUserRolesParams{GrantedRoles: []string{codersdk.RoleUserAdmin}, ID: u.ID} + dbm.EXPECT().GetUserByID(gomock.Any(), u.ID).Return(u, nil).AnyTimes() + dbm.EXPECT().UpdateUserRoles(gomock.Any(), arg).Return(o, nil).AnyTimes() + check.Args(arg).Asserts( u, policy.ActionRead, rbac.ResourceAssignRole, policy.ActionAssign, rbac.ResourceAssignRole, policy.ActionUnassign, ).Returns(o) })) - s.Run("AllUserIDs", s.Subtest(func(db database.Store, check *expects) { - a := dbgen.User(s.T(), db, database.User{}) - b := dbgen.User(s.T(), db, database.User{}) + s.Run("AllUserIDs", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) { + a := testutil.Fake(s.T(), faker, database.User{}) + b := testutil.Fake(s.T(), faker, database.User{}) + dbm.EXPECT().AllUserIDs(gomock.Any(), false).Return([]uuid.UUID{a.ID, b.ID}, nil).AnyTimes() check.Args(false).Asserts(rbac.ResourceSystem, policy.ActionRead).Returns(slice.New(a.ID, b.ID)) })) - s.Run("CustomRoles", s.Subtest(func(db database.Store, check *expects) { - check.Args(database.CustomRolesParams{}).Asserts(rbac.ResourceAssignRole, policy.ActionRead).Returns([]database.CustomRole{}) + s.Run("CustomRoles", s.Mocked(func(dbm *dbmock.MockStore, _ *gofakeit.Faker, check *expects) { + arg := database.CustomRolesParams{} + dbm.EXPECT().CustomRoles(gomock.Any(), arg).Return([]database.CustomRole{}, nil).AnyTimes() + check.Args(arg).Asserts(rbac.ResourceAssignRole, policy.ActionRead).Returns([]database.CustomRole{}) })) - s.Run("Organization/DeleteCustomRole", s.Subtest(func(db database.Store, check *expects) { - customRole := dbgen.CustomRole(s.T(), db, database.CustomRole{ - OrganizationID: uuid.NullUUID{ - UUID: uuid.New(), - Valid: true, - }, - }) - check.Args(database.DeleteCustomRoleParams{ - Name: customRole.Name, - OrganizationID: customRole.OrganizationID, - }).Asserts( - rbac.ResourceAssignOrgRole.InOrg(customRole.OrganizationID.UUID), policy.ActionDelete) + s.Run("Organization/DeleteCustomRole", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) { + orgID := uuid.New() + arg := database.DeleteCustomRoleParams{Name: "role", OrganizationID: uuid.NullUUID{UUID: orgID, Valid: true}} + dbm.EXPECT().DeleteCustomRole(gomock.Any(), arg).Return(nil).AnyTimes() + check.Args(arg).Asserts(rbac.ResourceAssignOrgRole.InOrg(orgID), policy.ActionDelete) })) - s.Run("Site/DeleteCustomRole", s.Subtest(func(db database.Store, check *expects) { - customRole := dbgen.CustomRole(s.T(), db, database.CustomRole{ - OrganizationID: uuid.NullUUID{ - UUID: uuid.Nil, - Valid: false, - }, - }) - check.Args(database.DeleteCustomRoleParams{ - Name: customRole.Name, - }).Asserts( - // fails immediately, missing organization id - ).Errors(dbauthz.NotAuthorizedError{Err: xerrors.New("custom roles must belong to an organization")}) + s.Run("Site/DeleteCustomRole", s.Mocked(func(_ *dbmock.MockStore, _ *gofakeit.Faker, check *expects) { + arg := database.DeleteCustomRoleParams{Name: "role"} + check.Args(arg).Asserts().Errors(dbauthz.NotAuthorizedError{Err: xerrors.New("custom roles must belong to an organization")}) })) - s.Run("Blank/UpdateCustomRole", s.Subtest(func(db database.Store, check *expects) { - dbtestutil.DisableForeignKeysAndTriggers(s.T(), db) - customRole := dbgen.CustomRole(s.T(), db, database.CustomRole{ - OrganizationID: uuid.NullUUID{UUID: uuid.New(), Valid: true}, - }) - // Blank is no perms in the role - check.Args(database.UpdateCustomRoleParams{ - Name: customRole.Name, - DisplayName: "Test Name", - OrganizationID: customRole.OrganizationID, - SitePermissions: nil, - OrgPermissions: nil, - UserPermissions: nil, - }).Asserts(rbac.ResourceAssignOrgRole.InOrg(customRole.OrganizationID.UUID), policy.ActionUpdate) - })) - s.Run("SitePermissions/UpdateCustomRole", s.Subtest(func(db database.Store, check *expects) { - check.Args(database.UpdateCustomRoleParams{ + s.Run("Blank/UpdateCustomRole", s.Mocked(func(dbm *dbmock.MockStore, _ *gofakeit.Faker, check *expects) { + orgID := uuid.New() + arg := database.UpdateCustomRoleParams{Name: "name", DisplayName: "Test Name", OrganizationID: uuid.NullUUID{UUID: orgID, Valid: true}} + dbm.EXPECT().UpdateCustomRole(gomock.Any(), arg).Return(database.CustomRole{}, nil).AnyTimes() + // Blank perms -> no escalation asserts beyond org role update + check.Args(arg).Asserts(rbac.ResourceAssignOrgRole.InOrg(orgID), policy.ActionUpdate) + })) + s.Run("SitePermissions/UpdateCustomRole", s.Mocked(func(_ *dbmock.MockStore, _ *gofakeit.Faker, check *expects) { + arg := database.UpdateCustomRoleParams{ Name: "", OrganizationID: uuid.NullUUID{UUID: uuid.Nil, Valid: false}, DisplayName: "Test Name", @@ -1989,50 +1748,35 @@ func (s *MethodTestSuite) TestUser() { UserPermissions: db2sdk.List(codersdk.CreatePermissions(map[codersdk.RBACResource][]codersdk.RBACAction{ codersdk.ResourceWorkspace: {codersdk.ActionRead}, }), convertSDKPerm), - }).Asserts( - // fails immediately, missing organization id - ).Errors(dbauthz.NotAuthorizedError{Err: xerrors.New("custom roles must belong to an organization")}) + } + check.Args(arg).Asserts().Errors(dbauthz.NotAuthorizedError{Err: xerrors.New("custom roles must belong to an organization")}) })) - s.Run("OrgPermissions/UpdateCustomRole", s.Subtest(func(db database.Store, check *expects) { + s.Run("OrgPermissions/UpdateCustomRole", s.Mocked(func(dbm *dbmock.MockStore, _ *gofakeit.Faker, check *expects) { orgID := uuid.New() - customRole := dbgen.CustomRole(s.T(), db, database.CustomRole{ - OrganizationID: uuid.NullUUID{ - UUID: orgID, - Valid: true, - }, - }) - - check.Args(database.UpdateCustomRoleParams{ - Name: customRole.Name, - DisplayName: "Test Name", - OrganizationID: customRole.OrganizationID, - SitePermissions: nil, + arg := database.UpdateCustomRoleParams{ + Name: "name", + DisplayName: "Test Name", + OrganizationID: uuid.NullUUID{UUID: orgID, Valid: true}, OrgPermissions: db2sdk.List(codersdk.CreatePermissions(map[codersdk.RBACResource][]codersdk.RBACAction{ codersdk.ResourceTemplate: {codersdk.ActionCreate, codersdk.ActionRead}, }), convertSDKPerm), - UserPermissions: nil, - }).Asserts( - // First check + } + dbm.EXPECT().UpdateCustomRole(gomock.Any(), arg).Return(database.CustomRole{}, nil).AnyTimes() + check.Args(arg).Asserts( rbac.ResourceAssignOrgRole.InOrg(orgID), policy.ActionUpdate, // Escalation checks rbac.ResourceTemplate.InOrg(orgID), policy.ActionCreate, rbac.ResourceTemplate.InOrg(orgID), policy.ActionRead, ) })) - s.Run("Blank/InsertCustomRole", s.Subtest(func(db database.Store, check *expects) { - // Blank is no perms in the role + s.Run("Blank/InsertCustomRole", s.Mocked(func(dbm *dbmock.MockStore, _ *gofakeit.Faker, check *expects) { orgID := uuid.New() - check.Args(database.InsertCustomRoleParams{ - Name: "test", - DisplayName: "Test Name", - OrganizationID: uuid.NullUUID{UUID: orgID, Valid: true}, - SitePermissions: nil, - OrgPermissions: nil, - UserPermissions: nil, - }).Asserts(rbac.ResourceAssignOrgRole.InOrg(orgID), policy.ActionCreate) - })) - s.Run("SitePermissions/InsertCustomRole", s.Subtest(func(db database.Store, check *expects) { - check.Args(database.InsertCustomRoleParams{ + arg := database.InsertCustomRoleParams{Name: "test", DisplayName: "Test Name", OrganizationID: uuid.NullUUID{UUID: orgID, Valid: true}} + dbm.EXPECT().InsertCustomRole(gomock.Any(), arg).Return(database.CustomRole{}, nil).AnyTimes() + check.Args(arg).Asserts(rbac.ResourceAssignOrgRole.InOrg(orgID), policy.ActionCreate) + })) + s.Run("SitePermissions/InsertCustomRole", s.Mocked(func(_ *dbmock.MockStore, _ *gofakeit.Faker, check *expects) { + arg := database.InsertCustomRoleParams{ Name: "test", DisplayName: "Test Name", SitePermissions: db2sdk.List(codersdk.CreatePermissions(map[codersdk.RBACResource][]codersdk.RBACAction{ @@ -2042,38 +1786,37 @@ func (s *MethodTestSuite) TestUser() { UserPermissions: db2sdk.List(codersdk.CreatePermissions(map[codersdk.RBACResource][]codersdk.RBACAction{ codersdk.ResourceWorkspace: {codersdk.ActionRead}, }), convertSDKPerm), - }).Asserts( - // fails immediately, missing organization id - ).Errors(dbauthz.NotAuthorizedError{Err: xerrors.New("custom roles must belong to an organization")}) + } + check.Args(arg).Asserts().Errors(dbauthz.NotAuthorizedError{Err: xerrors.New("custom roles must belong to an organization")}) })) - s.Run("OrgPermissions/InsertCustomRole", s.Subtest(func(db database.Store, check *expects) { + s.Run("OrgPermissions/InsertCustomRole", s.Mocked(func(dbm *dbmock.MockStore, _ *gofakeit.Faker, check *expects) { orgID := uuid.New() - check.Args(database.InsertCustomRoleParams{ - Name: "test", - DisplayName: "Test Name", - OrganizationID: uuid.NullUUID{ - UUID: orgID, - Valid: true, - }, - SitePermissions: nil, + arg := database.InsertCustomRoleParams{ + Name: "test", + DisplayName: "Test Name", + OrganizationID: uuid.NullUUID{UUID: orgID, Valid: true}, OrgPermissions: db2sdk.List(codersdk.CreatePermissions(map[codersdk.RBACResource][]codersdk.RBACAction{ codersdk.ResourceTemplate: {codersdk.ActionCreate, codersdk.ActionRead}, }), convertSDKPerm), - UserPermissions: nil, - }).Asserts( - // First check + } + dbm.EXPECT().InsertCustomRole(gomock.Any(), arg).Return(database.CustomRole{}, nil).AnyTimes() + check.Args(arg).Asserts( rbac.ResourceAssignOrgRole.InOrg(orgID), policy.ActionCreate, // Escalation checks rbac.ResourceTemplate.InOrg(orgID), policy.ActionCreate, rbac.ResourceTemplate.InOrg(orgID), policy.ActionRead, ) })) - s.Run("GetUserStatusCounts", s.Subtest(func(db database.Store, check *expects) { - check.Args(database.GetUserStatusCountsParams{ - StartTime: time.Now().Add(-time.Hour * 24 * 30), - EndTime: time.Now(), - Interval: int32((time.Hour * 24).Seconds()), - }).Asserts(rbac.ResourceUser, policy.ActionRead) + s.Run("GetUserStatusCounts", s.Mocked(func(dbm *dbmock.MockStore, _ *gofakeit.Faker, check *expects) { + arg := database.GetUserStatusCountsParams{StartTime: time.Now().Add(-time.Hour * 24 * 30), EndTime: time.Now(), Interval: int32((time.Hour * 24).Seconds())} + dbm.EXPECT().GetUserStatusCounts(gomock.Any(), arg).Return([]database.GetUserStatusCountsRow{}, nil).AnyTimes() + check.Args(arg).Asserts(rbac.ResourceUser, policy.ActionRead) + })) + s.Run("ValidateUserIDs", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) { + u := testutil.Fake(s.T(), faker, database.User{}) + ids := []uuid.UUID{u.ID} + dbm.EXPECT().ValidateUserIDs(gomock.Any(), ids).Return(database.ValidateUserIDsRow{}, nil).AnyTimes() + check.Args(ids).Asserts(rbac.ResourceSystem, policy.ActionRead) })) } @@ -2144,6 +1887,22 @@ func (s *MethodTestSuite) TestWorkspace() { // no asserts here because SQLFilter check.Args([]uuid.UUID{}, emptyPreparedAuthorized{}).Asserts() })) + s.Run("UpdateWorkspaceACLByID", s.Subtest(func(db database.Store, check *expects) { + u := dbgen.User(s.T(), db, database.User{}) + o := dbgen.Organization(s.T(), db, database.Organization{}) + tpl := dbgen.Template(s.T(), db, database.Template{ + OrganizationID: o.ID, + CreatedBy: u.ID, + }) + ws := dbgen.Workspace(s.T(), db, database.WorkspaceTable{ + OwnerID: u.ID, + OrganizationID: o.ID, + TemplateID: tpl.ID, + }) + check.Args(database.UpdateWorkspaceACLByIDParams{ + ID: ws.ID, + }).Asserts(ws, policy.ActionCreate) + })) s.Run("GetLatestWorkspaceBuildByWorkspaceID", s.Subtest(func(db database.Store, check *expects) { u := dbgen.User(s.T(), db, database.User{}) o := dbgen.Organization(s.T(), db, database.Organization{}) @@ -3200,38 +2959,44 @@ func (s *MethodTestSuite) TestWorkspace() { Deadline: b.Deadline, }).Asserts(w, policy.ActionUpdate) })) - s.Run("UpdateWorkspaceBuildAITaskByID", s.Subtest(func(db database.Store, check *expects) { - u := dbgen.User(s.T(), db, database.User{}) - o := dbgen.Organization(s.T(), db, database.Organization{}) - tpl := dbgen.Template(s.T(), db, database.Template{ + s.Run("UpdateWorkspaceBuildFlagsByID", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) { + u := testutil.Fake(s.T(), faker, database.User{}) + o := testutil.Fake(s.T(), faker, database.Organization{}) + tpl := testutil.Fake(s.T(), faker, database.Template{ OrganizationID: o.ID, CreatedBy: u.ID, }) - tv := dbgen.TemplateVersion(s.T(), db, database.TemplateVersion{ + tv := testutil.Fake(s.T(), faker, database.TemplateVersion{ TemplateID: uuid.NullUUID{UUID: tpl.ID, Valid: true}, OrganizationID: o.ID, CreatedBy: u.ID, }) - w := dbgen.Workspace(s.T(), db, database.WorkspaceTable{ + w := testutil.Fake(s.T(), faker, database.Workspace{ TemplateID: tpl.ID, OrganizationID: o.ID, OwnerID: u.ID, }) - j := dbgen.ProvisionerJob(s.T(), db, nil, database.ProvisionerJob{ + j := testutil.Fake(s.T(), faker, database.ProvisionerJob{ Type: database.ProvisionerJobTypeWorkspaceBuild, }) - b := dbgen.WorkspaceBuild(s.T(), db, database.WorkspaceBuild{ + b := testutil.Fake(s.T(), faker, database.WorkspaceBuild{ JobID: j.ID, WorkspaceID: w.ID, TemplateVersionID: tv.ID, }) - res := dbgen.WorkspaceResource(s.T(), db, database.WorkspaceResource{JobID: b.JobID}) - agt := dbgen.WorkspaceAgent(s.T(), db, database.WorkspaceAgent{ResourceID: res.ID}) - app := dbgen.WorkspaceApp(s.T(), db, database.WorkspaceApp{AgentID: agt.ID}) - check.Args(database.UpdateWorkspaceBuildAITaskByIDParams{ - HasAITask: sql.NullBool{Bool: true, Valid: true}, - SidebarAppID: uuid.NullUUID{UUID: app.ID, Valid: true}, - ID: b.ID, + res := testutil.Fake(s.T(), faker, database.WorkspaceResource{JobID: b.JobID}) + agt := testutil.Fake(s.T(), faker, database.WorkspaceAgent{ResourceID: res.ID}) + app := testutil.Fake(s.T(), faker, database.WorkspaceApp{AgentID: agt.ID}) + + dbm.EXPECT().GetWorkspaceByID(gomock.Any(), w.ID).Return(w, nil).AnyTimes() + dbm.EXPECT().GetWorkspaceBuildByID(gomock.Any(), b.ID).Return(b, nil).AnyTimes() + dbm.EXPECT().UpdateWorkspaceBuildFlagsByID(gomock.Any(), gomock.Any()).Return(nil).AnyTimes() + check.Args(database.UpdateWorkspaceBuildFlagsByIDParams{ + ID: b.ID, + HasAITask: sql.NullBool{Bool: true, Valid: true}, + HasExternalAgent: sql.NullBool{Bool: true, Valid: true}, + SidebarAppID: uuid.NullUUID{UUID: app.ID, Valid: true}, + UpdatedAt: b.UpdatedAt, }).Asserts(w, policy.ActionUpdate) })) s.Run("SoftDeleteWorkspaceByID", s.Subtest(func(db database.Store, check *expects) { @@ -3572,73 +3337,50 @@ func (s *MethodTestSuite) TestWorkspacePortSharing() { } func (s *MethodTestSuite) TestProvisionerKeys() { - s.Run("InsertProvisionerKey", s.Subtest(func(db database.Store, check *expects) { - org := dbgen.Organization(s.T(), db, database.Organization{}) - pk := database.ProvisionerKey{ - ID: uuid.New(), - CreatedAt: dbtestutil.NowInDefaultTimezone(), - OrganizationID: org.ID, - Name: strings.ToLower(coderdtest.RandomName(s.T())), - HashedSecret: []byte(coderdtest.RandomName(s.T())), - } - //nolint:gosimple // casting is not a simplification - check.Args(database.InsertProvisionerKeyParams{ - ID: pk.ID, - CreatedAt: pk.CreatedAt, - OrganizationID: pk.OrganizationID, - Name: pk.Name, - HashedSecret: pk.HashedSecret, - }).Asserts(pk, policy.ActionCreate).Returns(pk) - })) - s.Run("GetProvisionerKeyByID", s.Subtest(func(db database.Store, check *expects) { - org := dbgen.Organization(s.T(), db, database.Organization{}) - pk := dbgen.ProvisionerKey(s.T(), db, database.ProvisionerKey{OrganizationID: org.ID}) + s.Run("InsertProvisionerKey", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) { + org := testutil.Fake(s.T(), faker, database.Organization{}) + pk := testutil.Fake(s.T(), faker, database.ProvisionerKey{OrganizationID: org.ID}) + arg := database.InsertProvisionerKeyParams{ID: pk.ID, CreatedAt: pk.CreatedAt, OrganizationID: pk.OrganizationID, Name: pk.Name, HashedSecret: pk.HashedSecret} + dbm.EXPECT().InsertProvisionerKey(gomock.Any(), arg).Return(pk, nil).AnyTimes() + check.Args(arg).Asserts(rbac.ResourceProvisionerDaemon.InOrg(org.ID).WithID(pk.ID), policy.ActionCreate).Returns(pk) + })) + s.Run("GetProvisionerKeyByID", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) { + org := testutil.Fake(s.T(), faker, database.Organization{}) + pk := testutil.Fake(s.T(), faker, database.ProvisionerKey{OrganizationID: org.ID}) + dbm.EXPECT().GetProvisionerKeyByID(gomock.Any(), pk.ID).Return(pk, nil).AnyTimes() check.Args(pk.ID).Asserts(pk, policy.ActionRead).Returns(pk) })) - s.Run("GetProvisionerKeyByHashedSecret", s.Subtest(func(db database.Store, check *expects) { - org := dbgen.Organization(s.T(), db, database.Organization{}) - pk := dbgen.ProvisionerKey(s.T(), db, database.ProvisionerKey{OrganizationID: org.ID, HashedSecret: []byte("foo")}) + s.Run("GetProvisionerKeyByHashedSecret", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) { + org := testutil.Fake(s.T(), faker, database.Organization{}) + pk := testutil.Fake(s.T(), faker, database.ProvisionerKey{OrganizationID: org.ID, HashedSecret: []byte("foo")}) + dbm.EXPECT().GetProvisionerKeyByHashedSecret(gomock.Any(), []byte("foo")).Return(pk, nil).AnyTimes() check.Args([]byte("foo")).Asserts(pk, policy.ActionRead).Returns(pk) })) - s.Run("GetProvisionerKeyByName", s.Subtest(func(db database.Store, check *expects) { - org := dbgen.Organization(s.T(), db, database.Organization{}) - pk := dbgen.ProvisionerKey(s.T(), db, database.ProvisionerKey{OrganizationID: org.ID}) - check.Args(database.GetProvisionerKeyByNameParams{ - OrganizationID: org.ID, - Name: pk.Name, - }).Asserts(pk, policy.ActionRead).Returns(pk) - })) - s.Run("ListProvisionerKeysByOrganization", s.Subtest(func(db database.Store, check *expects) { - org := dbgen.Organization(s.T(), db, database.Organization{}) - pk := dbgen.ProvisionerKey(s.T(), db, database.ProvisionerKey{OrganizationID: org.ID}) - pks := []database.ProvisionerKey{ - { - ID: pk.ID, - CreatedAt: pk.CreatedAt, - OrganizationID: pk.OrganizationID, - Name: pk.Name, - HashedSecret: pk.HashedSecret, - }, - } - check.Args(org.ID).Asserts(pk, policy.ActionRead).Returns(pks) - })) - s.Run("ListProvisionerKeysByOrganizationExcludeReserved", s.Subtest(func(db database.Store, check *expects) { - org := dbgen.Organization(s.T(), db, database.Organization{}) - pk := dbgen.ProvisionerKey(s.T(), db, database.ProvisionerKey{OrganizationID: org.ID}) - pks := []database.ProvisionerKey{ - { - ID: pk.ID, - CreatedAt: pk.CreatedAt, - OrganizationID: pk.OrganizationID, - Name: pk.Name, - HashedSecret: pk.HashedSecret, - }, - } - check.Args(org.ID).Asserts(pk, policy.ActionRead).Returns(pks) - })) - s.Run("DeleteProvisionerKey", s.Subtest(func(db database.Store, check *expects) { - org := dbgen.Organization(s.T(), db, database.Organization{}) - pk := dbgen.ProvisionerKey(s.T(), db, database.ProvisionerKey{OrganizationID: org.ID}) + s.Run("GetProvisionerKeyByName", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) { + org := testutil.Fake(s.T(), faker, database.Organization{}) + pk := testutil.Fake(s.T(), faker, database.ProvisionerKey{OrganizationID: org.ID}) + arg := database.GetProvisionerKeyByNameParams{OrganizationID: org.ID, Name: pk.Name} + dbm.EXPECT().GetProvisionerKeyByName(gomock.Any(), arg).Return(pk, nil).AnyTimes() + check.Args(arg).Asserts(pk, policy.ActionRead).Returns(pk) + })) + s.Run("ListProvisionerKeysByOrganization", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) { + org := testutil.Fake(s.T(), faker, database.Organization{}) + a := testutil.Fake(s.T(), faker, database.ProvisionerKey{OrganizationID: org.ID}) + b := testutil.Fake(s.T(), faker, database.ProvisionerKey{OrganizationID: org.ID}) + dbm.EXPECT().ListProvisionerKeysByOrganization(gomock.Any(), org.ID).Return([]database.ProvisionerKey{a, b}, nil).AnyTimes() + check.Args(org.ID).Asserts(a, policy.ActionRead, b, policy.ActionRead).Returns([]database.ProvisionerKey{a, b}) + })) + s.Run("ListProvisionerKeysByOrganizationExcludeReserved", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) { + org := testutil.Fake(s.T(), faker, database.Organization{}) + pk := testutil.Fake(s.T(), faker, database.ProvisionerKey{OrganizationID: org.ID}) + dbm.EXPECT().ListProvisionerKeysByOrganizationExcludeReserved(gomock.Any(), org.ID).Return([]database.ProvisionerKey{pk}, nil).AnyTimes() + check.Args(org.ID).Asserts(pk, policy.ActionRead).Returns([]database.ProvisionerKey{pk}) + })) + s.Run("DeleteProvisionerKey", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) { + org := testutil.Fake(s.T(), faker, database.Organization{}) + pk := testutil.Fake(s.T(), faker, database.ProvisionerKey{OrganizationID: org.ID}) + dbm.EXPECT().GetProvisionerKeyByID(gomock.Any(), pk.ID).Return(pk, nil).AnyTimes() + dbm.EXPECT().DeleteProvisionerKey(gomock.Any(), pk.ID).Return(nil).AnyTimes() check.Args(pk.ID).Asserts(pk, policy.ActionDelete).Returns() })) } @@ -3892,21 +3634,20 @@ func (s *MethodTestSuite) TestTailnetFunctions() { } func (s *MethodTestSuite) TestDBCrypt() { - s.Run("GetDBCryptKeys", s.Subtest(func(db database.Store, check *expects) { + s.Run("GetDBCryptKeys", s.Mocked(func(dbm *dbmock.MockStore, _ *gofakeit.Faker, check *expects) { + dbm.EXPECT().GetDBCryptKeys(gomock.Any()).Return([]database.DBCryptKey{}, nil).AnyTimes() check.Args(). Asserts(rbac.ResourceSystem, policy.ActionRead). Returns([]database.DBCryptKey{}) })) - s.Run("InsertDBCryptKey", s.Subtest(func(db database.Store, check *expects) { + s.Run("InsertDBCryptKey", s.Mocked(func(dbm *dbmock.MockStore, _ *gofakeit.Faker, check *expects) { + dbm.EXPECT().InsertDBCryptKey(gomock.Any(), database.InsertDBCryptKeyParams{}).Return(nil).AnyTimes() check.Args(database.InsertDBCryptKeyParams{}). Asserts(rbac.ResourceSystem, policy.ActionCreate). Returns() })) - s.Run("RevokeDBCryptKey", s.Subtest(func(db database.Store, check *expects) { - err := db.InsertDBCryptKey(context.Background(), database.InsertDBCryptKeyParams{ - ActiveKeyDigest: "revoke me", - }) - s.NoError(err) + s.Run("RevokeDBCryptKey", s.Mocked(func(dbm *dbmock.MockStore, _ *gofakeit.Faker, check *expects) { + dbm.EXPECT().RevokeDBCryptKey(gomock.Any(), "revoke me").Return(nil).AnyTimes() check.Args("revoke me"). Asserts(rbac.ResourceSystem, policy.ActionUpdate). Returns() @@ -3914,56 +3655,44 @@ func (s *MethodTestSuite) TestDBCrypt() { } func (s *MethodTestSuite) TestCryptoKeys() { - s.Run("GetCryptoKeys", s.Subtest(func(db database.Store, check *expects) { + s.Run("GetCryptoKeys", s.Mocked(func(dbm *dbmock.MockStore, _ *gofakeit.Faker, check *expects) { + dbm.EXPECT().GetCryptoKeys(gomock.Any()).Return([]database.CryptoKey{}, nil).AnyTimes() check.Args(). Asserts(rbac.ResourceCryptoKey, policy.ActionRead) })) - s.Run("InsertCryptoKey", s.Subtest(func(db database.Store, check *expects) { - check.Args(database.InsertCryptoKeyParams{ - Feature: database.CryptoKeyFeatureWorkspaceAppsAPIKey, - }). + s.Run("InsertCryptoKey", s.Mocked(func(dbm *dbmock.MockStore, _ *gofakeit.Faker, check *expects) { + arg := database.InsertCryptoKeyParams{Feature: database.CryptoKeyFeatureWorkspaceAppsAPIKey} + dbm.EXPECT().InsertCryptoKey(gomock.Any(), arg).Return(database.CryptoKey{}, nil).AnyTimes() + check.Args(arg). Asserts(rbac.ResourceCryptoKey, policy.ActionCreate) })) - s.Run("DeleteCryptoKey", s.Subtest(func(db database.Store, check *expects) { - key := dbgen.CryptoKey(s.T(), db, database.CryptoKey{ - Feature: database.CryptoKeyFeatureWorkspaceAppsAPIKey, - Sequence: 4, - }) - check.Args(database.DeleteCryptoKeyParams{ - Feature: key.Feature, - Sequence: key.Sequence, - }).Asserts(rbac.ResourceCryptoKey, policy.ActionDelete) - })) - s.Run("GetCryptoKeyByFeatureAndSequence", s.Subtest(func(db database.Store, check *expects) { - key := dbgen.CryptoKey(s.T(), db, database.CryptoKey{ - Feature: database.CryptoKeyFeatureWorkspaceAppsAPIKey, - Sequence: 4, - }) - check.Args(database.GetCryptoKeyByFeatureAndSequenceParams{ - Feature: key.Feature, - Sequence: key.Sequence, - }).Asserts(rbac.ResourceCryptoKey, policy.ActionRead).Returns(key) - })) - s.Run("GetLatestCryptoKeyByFeature", s.Subtest(func(db database.Store, check *expects) { - dbgen.CryptoKey(s.T(), db, database.CryptoKey{ - Feature: database.CryptoKeyFeatureWorkspaceAppsAPIKey, - Sequence: 4, - }) - check.Args(database.CryptoKeyFeatureWorkspaceAppsAPIKey).Asserts(rbac.ResourceCryptoKey, policy.ActionRead) - })) - s.Run("UpdateCryptoKeyDeletesAt", s.Subtest(func(db database.Store, check *expects) { - key := dbgen.CryptoKey(s.T(), db, database.CryptoKey{ - Feature: database.CryptoKeyFeatureWorkspaceAppsAPIKey, - Sequence: 4, - }) - check.Args(database.UpdateCryptoKeyDeletesAtParams{ - Feature: key.Feature, - Sequence: key.Sequence, - DeletesAt: sql.NullTime{Time: time.Now(), Valid: true}, - }).Asserts(rbac.ResourceCryptoKey, policy.ActionUpdate) - })) - s.Run("GetCryptoKeysByFeature", s.Subtest(func(db database.Store, check *expects) { - check.Args(database.CryptoKeyFeatureWorkspaceAppsAPIKey). + s.Run("DeleteCryptoKey", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) { + key := testutil.Fake(s.T(), faker, database.CryptoKey{Feature: database.CryptoKeyFeatureWorkspaceAppsAPIKey, Sequence: 4}) + arg := database.DeleteCryptoKeyParams{Feature: key.Feature, Sequence: key.Sequence} + dbm.EXPECT().DeleteCryptoKey(gomock.Any(), arg).Return(key, nil).AnyTimes() + check.Args(arg).Asserts(rbac.ResourceCryptoKey, policy.ActionDelete) + })) + s.Run("GetCryptoKeyByFeatureAndSequence", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) { + key := testutil.Fake(s.T(), faker, database.CryptoKey{Feature: database.CryptoKeyFeatureWorkspaceAppsAPIKey, Sequence: 4}) + arg := database.GetCryptoKeyByFeatureAndSequenceParams{Feature: key.Feature, Sequence: key.Sequence} + dbm.EXPECT().GetCryptoKeyByFeatureAndSequence(gomock.Any(), arg).Return(key, nil).AnyTimes() + check.Args(arg).Asserts(rbac.ResourceCryptoKey, policy.ActionRead).Returns(key) + })) + s.Run("GetLatestCryptoKeyByFeature", s.Mocked(func(dbm *dbmock.MockStore, _ *gofakeit.Faker, check *expects) { + feature := database.CryptoKeyFeatureWorkspaceAppsAPIKey + dbm.EXPECT().GetLatestCryptoKeyByFeature(gomock.Any(), feature).Return(database.CryptoKey{}, nil).AnyTimes() + check.Args(feature).Asserts(rbac.ResourceCryptoKey, policy.ActionRead) + })) + s.Run("UpdateCryptoKeyDeletesAt", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) { + key := testutil.Fake(s.T(), faker, database.CryptoKey{Feature: database.CryptoKeyFeatureWorkspaceAppsAPIKey, Sequence: 4}) + arg := database.UpdateCryptoKeyDeletesAtParams{Feature: key.Feature, Sequence: key.Sequence, DeletesAt: sql.NullTime{Time: time.Now(), Valid: true}} + dbm.EXPECT().UpdateCryptoKeyDeletesAt(gomock.Any(), arg).Return(key, nil).AnyTimes() + check.Args(arg).Asserts(rbac.ResourceCryptoKey, policy.ActionUpdate) + })) + s.Run("GetCryptoKeysByFeature", s.Mocked(func(dbm *dbmock.MockStore, _ *gofakeit.Faker, check *expects) { + feature := database.CryptoKeyFeatureWorkspaceAppsAPIKey + dbm.EXPECT().GetCryptoKeysByFeature(gomock.Any(), feature).Return([]database.CryptoKey{}, nil).AnyTimes() + check.Args(feature). Asserts(rbac.ResourceCryptoKey, policy.ActionRead) })) } @@ -4006,18 +3735,9 @@ func (s *MethodTestSuite) TestSystemFunctions() { LoginType: l.LoginType, }).Asserts(rbac.ResourceSystem, policy.ActionRead).Returns(l) })) - s.Run("GetLatestWorkspaceBuilds", s.Subtest(func(db database.Store, check *expects) { - dbtestutil.DisableForeignKeysAndTriggers(s.T(), db) - dbgen.WorkspaceBuild(s.T(), db, database.WorkspaceBuild{}) - dbgen.WorkspaceBuild(s.T(), db, database.WorkspaceBuild{}) - check.Args().Asserts(rbac.ResourceSystem, policy.ActionRead) - })) s.Run("GetActiveUserCount", s.Subtest(func(db database.Store, check *expects) { check.Args(false).Asserts(rbac.ResourceSystem, policy.ActionRead).Returns(int64(0)) })) - s.Run("GetUnexpiredLicenses", s.Subtest(func(db database.Store, check *expects) { - check.Args().Asserts(rbac.ResourceSystem, policy.ActionRead) - })) s.Run("GetAuthorizationUserRoles", s.Subtest(func(db database.Store, check *expects) { u := dbgen.User(s.T(), db, database.User{}) check.Args(u.ID).Asserts(rbac.ResourceSystem, policy.ActionRead) @@ -4339,6 +4059,20 @@ func (s *MethodTestSuite) TestSystemFunctions() { UpdatedAt: time.Now(), }).Asserts(rbac.ResourceProvisionerJobs, policy.ActionUpdate) })) + s.Run("UpdateProvisionerJobLogsLength", s.Subtest(func(db database.Store, check *expects) { + j := dbgen.ProvisionerJob(s.T(), db, nil, database.ProvisionerJob{}) + check.Args(database.UpdateProvisionerJobLogsLengthParams{ + ID: j.ID, + LogsLength: 100, + }).Asserts(rbac.ResourceProvisionerJobs, policy.ActionUpdate) + })) + s.Run("UpdateProvisionerJobLogsOverflowed", s.Subtest(func(db database.Store, check *expects) { + j := dbgen.ProvisionerJob(s.T(), db, nil, database.ProvisionerJob{}) + check.Args(database.UpdateProvisionerJobLogsOverflowedParams{ + ID: j.ID, + LogsOverflowed: true, + }).Asserts(rbac.ResourceProvisionerJobs, policy.ActionUpdate) + })) s.Run("InsertProvisionerJob", s.Subtest(func(db database.Store, check *expects) { dbtestutil.DisableForeignKeysAndTriggers(s.T(), db) check.Args(database.InsertProvisionerJobParams{ @@ -4794,9 +4528,6 @@ func (s *MethodTestSuite) TestSystemFunctions() { }) check.Args(j.ID).Asserts(v.RBACObject(tpl), policy.ActionRead).Returns(j) })) - s.Run("HasTemplateVersionsWithAITask", s.Subtest(func(db database.Store, check *expects) { - check.Args().Asserts() - })) } func (s *MethodTestSuite) TestNotifications() { @@ -5199,6 +4930,22 @@ func (s *MethodTestSuite) TestPrebuilds() { template, policy.ActionUse, ).Errors(sql.ErrNoRows) })) + s.Run("FindMatchingPresetID", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) { + t1 := testutil.Fake(s.T(), faker, database.Template{}) + tv := testutil.Fake(s.T(), faker, database.TemplateVersion{TemplateID: uuid.NullUUID{UUID: t1.ID, Valid: true}}) + dbm.EXPECT().FindMatchingPresetID(gomock.Any(), database.FindMatchingPresetIDParams{ + TemplateVersionID: tv.ID, + ParameterNames: []string{"test"}, + ParameterValues: []string{"test"}, + }).Return(uuid.Nil, nil).AnyTimes() + dbm.EXPECT().GetTemplateVersionByID(gomock.Any(), tv.ID).Return(tv, nil).AnyTimes() + dbm.EXPECT().GetTemplateByID(gomock.Any(), t1.ID).Return(t1, nil).AnyTimes() + check.Args(database.FindMatchingPresetIDParams{ + TemplateVersionID: tv.ID, + ParameterNames: []string{"test"}, + ParameterValues: []string{"test"}, + }).Asserts(tv.RBACObject(t1), policy.ActionRead).Returns(uuid.Nil) + })) s.Run("GetPrebuildMetrics", s.Subtest(func(_ database.Store, check *expects) { check.Args(). Asserts(rbac.ResourceWorkspace.All(), policy.ActionRead) @@ -5845,3 +5592,88 @@ func (s *MethodTestSuite) TestAuthorizePrebuiltWorkspace() { }).Asserts(w, policy.ActionUpdate, w.AsPrebuild(), policy.ActionUpdate) })) } + +func (s *MethodTestSuite) TestUserSecrets() { + s.Run("GetUserSecretByUserIDAndName", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) { + user := testutil.Fake(s.T(), faker, database.User{}) + secret := testutil.Fake(s.T(), faker, database.UserSecret{UserID: user.ID}) + arg := database.GetUserSecretByUserIDAndNameParams{UserID: user.ID, Name: secret.Name} + dbm.EXPECT().GetUserSecretByUserIDAndName(gomock.Any(), arg).Return(secret, nil).AnyTimes() + check.Args(arg). + Asserts(rbac.ResourceUserSecret.WithOwner(user.ID.String()), policy.ActionRead). + Returns(secret) + })) + s.Run("GetUserSecret", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) { + secret := testutil.Fake(s.T(), faker, database.UserSecret{}) + dbm.EXPECT().GetUserSecret(gomock.Any(), secret.ID).Return(secret, nil).AnyTimes() + check.Args(secret.ID). + Asserts(secret, policy.ActionRead). + Returns(secret) + })) + s.Run("ListUserSecrets", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) { + user := testutil.Fake(s.T(), faker, database.User{}) + secret := testutil.Fake(s.T(), faker, database.UserSecret{UserID: user.ID}) + dbm.EXPECT().ListUserSecrets(gomock.Any(), user.ID).Return([]database.UserSecret{secret}, nil).AnyTimes() + check.Args(user.ID). + Asserts(rbac.ResourceUserSecret.WithOwner(user.ID.String()), policy.ActionRead). + Returns([]database.UserSecret{secret}) + })) + s.Run("CreateUserSecret", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) { + user := testutil.Fake(s.T(), faker, database.User{}) + arg := database.CreateUserSecretParams{UserID: user.ID} + ret := testutil.Fake(s.T(), faker, database.UserSecret{UserID: user.ID}) + dbm.EXPECT().CreateUserSecret(gomock.Any(), arg).Return(ret, nil).AnyTimes() + check.Args(arg). + Asserts(rbac.ResourceUserSecret.WithOwner(user.ID.String()), policy.ActionCreate). + Returns(ret) + })) + s.Run("UpdateUserSecret", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) { + secret := testutil.Fake(s.T(), faker, database.UserSecret{}) + updated := testutil.Fake(s.T(), faker, database.UserSecret{ID: secret.ID}) + arg := database.UpdateUserSecretParams{ID: secret.ID} + dbm.EXPECT().GetUserSecret(gomock.Any(), secret.ID).Return(secret, nil).AnyTimes() + dbm.EXPECT().UpdateUserSecret(gomock.Any(), arg).Return(updated, nil).AnyTimes() + check.Args(arg). + Asserts(secret, policy.ActionUpdate). + Returns(updated) + })) + s.Run("DeleteUserSecret", s.Mocked(func(dbm *dbmock.MockStore, faker *gofakeit.Faker, check *expects) { + secret := testutil.Fake(s.T(), faker, database.UserSecret{}) + dbm.EXPECT().GetUserSecret(gomock.Any(), secret.ID).Return(secret, nil).AnyTimes() + dbm.EXPECT().DeleteUserSecret(gomock.Any(), secret.ID).Return(nil).AnyTimes() + check.Args(secret.ID). + Asserts(secret, policy.ActionRead, secret, policy.ActionDelete). + Returns() + })) +} + +func (s *MethodTestSuite) TestUsageEvents() { + s.Run("InsertUsageEvent", s.Mocked(func(db *dbmock.MockStore, faker *gofakeit.Faker, check *expects) { + params := database.InsertUsageEventParams{ + ID: "1", + EventType: "dc_managed_agents_v1", + EventData: []byte("{}"), + CreatedAt: dbtime.Now(), + } + db.EXPECT().InsertUsageEvent(gomock.Any(), params).Return(nil) + check.Args(params).Asserts(rbac.ResourceUsageEvent, policy.ActionCreate) + })) + + s.Run("SelectUsageEventsForPublishing", s.Mocked(func(db *dbmock.MockStore, faker *gofakeit.Faker, check *expects) { + now := dbtime.Now() + db.EXPECT().SelectUsageEventsForPublishing(gomock.Any(), now).Return([]database.UsageEvent{}, nil) + check.Args(now).Asserts(rbac.ResourceUsageEvent, policy.ActionUpdate) + })) + + s.Run("UpdateUsageEventsPostPublish", s.Mocked(func(db *dbmock.MockStore, faker *gofakeit.Faker, check *expects) { + now := dbtime.Now() + params := database.UpdateUsageEventsPostPublishParams{ + Now: now, + IDs: []string{"1", "2"}, + FailureMessages: []string{"error", "error"}, + SetPublishedAts: []bool{false, false}, + } + db.EXPECT().UpdateUsageEventsPostPublish(gomock.Any(), params).Return(nil) + check.Args(params).Asserts(rbac.ResourceUsageEvent, policy.ActionUpdate) + })) +} diff --git a/coderd/database/dbauthz/setup_test.go b/coderd/database/dbauthz/setup_test.go index 3fc4b06b7f69d..c9a1b2063d691 100644 --- a/coderd/database/dbauthz/setup_test.go +++ b/coderd/database/dbauthz/setup_test.go @@ -10,6 +10,7 @@ import ( "strings" "testing" + "github.com/brianvoe/gofakeit/v7" "github.com/google/go-cmp/cmp" "github.com/google/go-cmp/cmp/cmpopts" "github.com/google/uuid" @@ -20,7 +21,6 @@ import ( "golang.org/x/xerrors" "cdr.dev/slog" - "github.com/coder/coder/v2/coderd/rbac/policy" "github.com/coder/coder/v2/coderd/coderdtest" "github.com/coder/coder/v2/coderd/database" @@ -28,6 +28,7 @@ import ( "github.com/coder/coder/v2/coderd/database/dbmock" "github.com/coder/coder/v2/coderd/database/dbtestutil" "github.com/coder/coder/v2/coderd/rbac" + "github.com/coder/coder/v2/coderd/rbac/policy" "github.com/coder/coder/v2/coderd/rbac/regosql" "github.com/coder/coder/v2/coderd/util/slice" ) @@ -105,11 +106,37 @@ func (s *MethodTestSuite) TearDownSuite() { var testActorID = uuid.New() -// Subtest is a helper function that returns a function that can be passed to +// Mocked runs a subtest with a mocked database. Removing the overhead of a real +// postgres database resulting in much faster tests. +func (s *MethodTestSuite) Mocked(testCaseF func(dmb *dbmock.MockStore, faker *gofakeit.Faker, check *expects)) func() { + t := s.T() + mDB := dbmock.NewMockStore(gomock.NewController(t)) + mDB.EXPECT().Wrappers().Return([]string{}).AnyTimes() + + // Use a constant seed to prevent flakes from random data generation. + faker := gofakeit.New(0) + + // The usual Subtest assumes the test setup will use a real database to populate + // with data. In this mocked case, we want to pass the underlying mocked database + // to the test case instead. + return s.SubtestWithDB(mDB, func(_ database.Store, check *expects) { + testCaseF(mDB, faker, check) + }) +} + +// Subtest starts up a real postgres database for each test case. +// Deprecated: Use 'Mocked' instead for much faster tests. +func (s *MethodTestSuite) Subtest(testCaseF func(db database.Store, check *expects)) func() { + t := s.T() + db, _ := dbtestutil.NewDB(t) + return s.SubtestWithDB(db, testCaseF) +} + +// SubtestWithDB is a helper function that returns a function that can be passed to // s.Run(). This function will run the test case for the method that is being // tested. The check parameter is used to assert the results of the method. // If the caller does not use the `check` parameter, the test will fail. -func (s *MethodTestSuite) Subtest(testCaseF func(db database.Store, check *expects)) func() { +func (s *MethodTestSuite) SubtestWithDB(db database.Store, testCaseF func(db database.Store, check *expects)) func() { return func() { t := s.T() testName := s.T().Name() @@ -117,7 +144,6 @@ func (s *MethodTestSuite) Subtest(testCaseF func(db database.Store, check *expec methodName := names[len(names)-1] s.methodAccounting[methodName]++ - db, _ := dbtestutil.NewDB(t) fakeAuthorizer := &coderdtest.FakeAuthorizer{} rec := &coderdtest.RecordingAuthorizer{ Wrapped: fakeAuthorizer, diff --git a/coderd/database/dbfake/dbfake.go b/coderd/database/dbfake/dbfake.go index 98e98122e74e5..6d99005fb3334 100644 --- a/coderd/database/dbfake/dbfake.go +++ b/coderd/database/dbfake/dbfake.go @@ -12,6 +12,9 @@ import ( "github.com/sqlc-dev/pqtype" "github.com/stretchr/testify/require" + "cdr.dev/slog" + "cdr.dev/slog/sloggers/slogtest" + "github.com/coder/coder/v2/coderd/database" "github.com/coder/coder/v2/coderd/database/dbauthz" "github.com/coder/coder/v2/coderd/database/dbgen" @@ -43,6 +46,7 @@ type WorkspaceResponse struct { // resources. type WorkspaceBuildBuilder struct { t testing.TB + logger slog.Logger db database.Store ps pubsub.Pubsub ws database.WorkspaceTable @@ -62,7 +66,10 @@ type workspaceBuildDisposition struct { // Omitting the template ID on a workspace will also generate a new template // with a template version. func WorkspaceBuild(t testing.TB, db database.Store, ws database.WorkspaceTable) WorkspaceBuildBuilder { - return WorkspaceBuildBuilder{t: t, db: db, ws: ws} + return WorkspaceBuildBuilder{ + t: t, db: db, ws: ws, + logger: slogtest.Make(t, &slogtest.Options{}).Named("dbfake").Leveled(slog.LevelDebug), + } } func (b WorkspaceBuildBuilder) Pubsub(ps pubsub.Pubsub) WorkspaceBuildBuilder { @@ -131,6 +138,7 @@ func (b WorkspaceBuildBuilder) Do() WorkspaceResponse { AgentToken: b.agentToken, } if b.ws.TemplateID == uuid.Nil { + b.logger.Debug(context.Background(), "creating template and version") resp.TemplateVersionResponse = TemplateVersion(b.t, b.db). Resources(b.resources...). Pubsub(b.ps). @@ -145,6 +153,7 @@ func (b WorkspaceBuildBuilder) Do() WorkspaceResponse { // If no template version is set assume the active version. if b.seed.TemplateVersionID == uuid.Nil { + b.logger.Debug(context.Background(), "assuming active template version") template, err := b.db.GetTemplateByID(ownerCtx, b.ws.TemplateID) require.NoError(b.t, err) require.NotNil(b.t, template.ActiveVersionID, "active version ID unexpectedly nil") @@ -156,6 +165,9 @@ func (b WorkspaceBuildBuilder) Do() WorkspaceResponse { // nolint: revive b.ws = dbgen.Workspace(b.t, b.db, b.ws) resp.Workspace = b.ws + b.logger.Debug(context.Background(), "created workspace", + slog.F("name", resp.Workspace.Name), + slog.F("workspace_id", resp.Workspace.ID)) } b.seed.WorkspaceID = b.ws.ID b.seed.InitiatorID = takeFirst(b.seed.InitiatorID, b.ws.OwnerID) @@ -179,12 +191,15 @@ func (b WorkspaceBuildBuilder) Do() WorkspaceResponse { Input: payload, Tags: map[string]string{}, TraceMetadata: pqtype.NullRawMessage{}, + LogsOverflowed: false, }) require.NoError(b.t, err, "insert job") + b.logger.Debug(context.Background(), "inserted provisioner job", slog.F("job_id", job.ID)) if b.dispo.starting { // might need to do this multiple times if we got a template version // import job as well + b.logger.Debug(context.Background(), "looping to acquire provisioner job") for { j, err := b.db.AcquireProvisionerJob(ownerCtx, database.AcquireProvisionerJobParams{ OrganizationID: job.OrganizationID, @@ -201,10 +216,12 @@ func (b WorkspaceBuildBuilder) Do() WorkspaceResponse { }) require.NoError(b.t, err, "acquire starting job") if j.ID == job.ID { + b.logger.Debug(context.Background(), "acquired provisioner job", slog.F("job_id", job.ID)) break } } } else { + b.logger.Debug(context.Background(), "completing the provisioner job") err = b.db.UpdateProvisionerJobWithCompleteByID(ownerCtx, database.UpdateProvisionerJobWithCompleteByIDParams{ ID: job.ID, UpdatedAt: dbtime.Now(), @@ -220,11 +237,16 @@ func (b WorkspaceBuildBuilder) Do() WorkspaceResponse { } resp.Build = dbgen.WorkspaceBuild(b.t, b.db, b.seed) + b.logger.Debug(context.Background(), "created workspace build", + slog.F("build_id", resp.Build.ID), + slog.F("workspace_id", resp.Workspace.ID), + slog.F("build_number", resp.Build.BuildNumber)) for i := range b.params { b.params[i].WorkspaceBuildID = resp.Build.ID } - _ = dbgen.WorkspaceBuildParameters(b.t, b.db, b.params) + params := dbgen.WorkspaceBuildParameters(b.t, b.db, b.params) + b.logger.Debug(context.Background(), "created workspace build parameters", slog.F("count", len(params))) if b.ws.Deleted { err = b.db.UpdateWorkspaceDeletedByID(ownerCtx, database.UpdateWorkspaceDeletedByIDParams{ @@ -232,6 +254,7 @@ func (b WorkspaceBuildBuilder) Do() WorkspaceResponse { Deleted: true, }) require.NoError(b.t, err) + b.logger.Debug(context.Background(), "deleted workspace", slog.F("workspace_id", resp.Workspace.ID)) } if b.ps != nil { @@ -242,6 +265,9 @@ func (b WorkspaceBuildBuilder) Do() WorkspaceResponse { require.NoError(b.t, err) err = b.ps.Publish(wspubsub.WorkspaceEventChannel(resp.Workspace.OwnerID), msg) require.NoError(b.t, err) + b.logger.Debug(context.Background(), "published workspace event", + slog.F("owner_id", resp.Workspace.ID), + slog.F("owner_id", resp.Workspace.OwnerID)) } agents, err := b.db.GetWorkspaceAgentsByWorkspaceAndBuildNumber(ownerCtx, database.GetWorkspaceAgentsByWorkspaceAndBuildNumberParams{ @@ -259,7 +285,12 @@ func (b WorkspaceBuildBuilder) Do() WorkspaceResponse { err = b.db.DeleteWorkspaceSubAgentByID(ownerCtx, subAgent.ID) require.NoError(b.t, err, "delete workspace agent subagent antagonist") - b.t.Logf("inserted deleted subagent antagonist %s (%v) for workspace agent %s (%v)", subAgent.Name, subAgent.ID, agent.Name, agent.ID) + b.logger.Debug(context.Background(), "inserted deleted subagent antagonist", + slog.F("subagent_name", subAgent.Name), + slog.F("subagent_id", subAgent.ID), + slog.F("agent_name", agent.Name), + slog.F("agent_id", agent.ID), + ) } } @@ -268,6 +299,7 @@ func (b WorkspaceBuildBuilder) Do() WorkspaceResponse { type ProvisionerJobResourcesBuilder struct { t testing.TB + logger slog.Logger db database.Store jobID uuid.UUID transition database.WorkspaceTransition @@ -280,6 +312,7 @@ func ProvisionerJobResources( ) ProvisionerJobResourcesBuilder { return ProvisionerJobResourcesBuilder{ t: t, + logger: slogtest.Make(t, &slogtest.Options{}).Named("dbfake").Leveled(slog.LevelDebug).With(slog.F("job_id", jobID)), db: db, jobID: jobID, transition: transition, @@ -291,13 +324,17 @@ func (b ProvisionerJobResourcesBuilder) Do() { b.t.Helper() transition := b.transition if transition == "" { - // Default to start! + b.logger.Debug(context.Background(), "setting default transition to start") transition = database.WorkspaceTransitionStart } for _, resource := range b.resources { //nolint:gocritic // This is only used by tests. err := provisionerdserver.InsertWorkspaceResource(ownerCtx, b.db, b.jobID, transition, resource, &telemetry.Snapshot{}) require.NoError(b.t, err) + b.logger.Debug(context.Background(), "created workspace resource", + slog.F("resource_name", resource.Name), + slog.F("agent_count", len(resource.Agents)), + ) } } @@ -308,6 +345,7 @@ type TemplateVersionResponse struct { type TemplateVersionBuilder struct { t testing.TB + logger slog.Logger db database.Store seed database.TemplateVersion fileID uuid.UUID @@ -325,6 +363,7 @@ type TemplateVersionBuilder struct { func TemplateVersion(t testing.TB, db database.Store) TemplateVersionBuilder { return TemplateVersionBuilder{ t: t, + logger: slogtest.Make(t, &slogtest.Options{}).Named("dbfake").Leveled(slog.LevelDebug), db: db, promote: true, autoCreateTemplate: true, @@ -395,9 +434,16 @@ func (t TemplateVersionBuilder) Do() TemplateVersionResponse { Valid: true, UUID: resp.Template.ID, } + t.logger.Debug(context.Background(), "created template", + slog.F("organization_id", resp.Template.OrganizationID), + slog.F("template_id", resp.Template.CreatedBy), + ) } version := dbgen.TemplateVersion(t.t, t.db, t.seed) + t.logger.Debug(context.Background(), "created template version", + slog.F("template_version_id", version.ID), + ) if t.promote { err := t.db.UpdateTemplateActiveVersionByID(ownerCtx, database.UpdateTemplateActiveVersionByIDParams{ ID: t.seed.TemplateID.UUID, @@ -405,10 +451,13 @@ func (t TemplateVersionBuilder) Do() TemplateVersionResponse { UpdatedAt: dbtime.Now(), }) require.NoError(t.t, err) + t.logger.Debug(context.Background(), "promoted template version", + slog.F("template_version_id", t.seed.ID), + ) } for _, preset := range t.presets { - dbgen.Preset(t.t, t.db, database.InsertPresetParams{ + prst := dbgen.Preset(t.t, t.db, database.InsertPresetParams{ ID: preset.ID, TemplateVersionID: version.ID, Name: preset.Name, @@ -417,15 +466,22 @@ func (t TemplateVersionBuilder) Do() TemplateVersionResponse { InvalidateAfterSecs: preset.InvalidateAfterSecs, SchedulingTimezone: preset.SchedulingTimezone, IsDefault: false, + Description: preset.Description, + Icon: preset.Icon, }) + t.logger.Debug(context.Background(), "added preset", + slog.F("preset_id", prst.ID), + slog.F("preset_name", prst.Name), + ) } for _, presetParam := range t.presetParams { - dbgen.PresetParameter(t.t, t.db, database.InsertPresetParametersParams{ + prm := dbgen.PresetParameter(t.t, t.db, database.InsertPresetParametersParams{ TemplateVersionPresetID: presetParam.TemplateVersionPresetID, Names: []string{presetParam.Name}, Values: []string{presetParam.Value}, }) + t.logger.Debug(context.Background(), "added preset parameter", slog.F("param_name", prm[0].Name)) } payload, err := json.Marshal(provisionerdserver.TemplateVersionImportJob{ @@ -445,6 +501,7 @@ func (t TemplateVersionBuilder) Do() TemplateVersionResponse { }, FileID: t.fileID, }) + t.logger.Debug(context.Background(), "added template version import job", slog.F("job_id", job.ID)) t.seed.JobID = job.ID diff --git a/coderd/database/dbgen/dbgen.go b/coderd/database/dbgen/dbgen.go index d5693afe98826..fbf886f860d4c 100644 --- a/coderd/database/dbgen/dbgen.go +++ b/coderd/database/dbgen/dbgen.go @@ -147,7 +147,8 @@ func Template(t testing.TB, db database.Store, seed database.Template) database. DisplayName: takeFirst(seed.DisplayName, testutil.GetRandomName(t)), AllowUserCancelWorkspaceJobs: seed.AllowUserCancelWorkspaceJobs, MaxPortSharingLevel: takeFirst(seed.MaxPortSharingLevel, database.AppSharingLevelOwner), - UseClassicParameterFlow: takeFirst(seed.UseClassicParameterFlow, true), + UseClassicParameterFlow: takeFirst(seed.UseClassicParameterFlow, false), + CorsBehavior: takeFirst(seed.CorsBehavior, database.CorsBehaviorSimple), }) require.NoError(t, err, "insert template") @@ -436,6 +437,7 @@ func WorkspaceBuild(t testing.TB, db database.Store, orig database.WorkspaceBuil jobID := takeFirst(orig.JobID, uuid.New()) hasAITask := takeFirst(orig.HasAITask, sql.NullBool{}) sidebarAppID := takeFirst(orig.AITaskSidebarAppID, uuid.NullUUID{}) + hasExternalAgent := takeFirst(orig.HasExternalAgent, sql.NullBool{}) var build database.WorkspaceBuild err := db.InTx(func(db database.Store) error { err := db.InsertWorkspaceBuild(genCtx, database.InsertWorkspaceBuildParams{ @@ -469,12 +471,13 @@ func WorkspaceBuild(t testing.TB, db database.Store, orig database.WorkspaceBuil require.NoError(t, err) } - if hasAITask.Valid { - require.NoError(t, db.UpdateWorkspaceBuildAITaskByID(genCtx, database.UpdateWorkspaceBuildAITaskByIDParams{ - HasAITask: hasAITask, - SidebarAppID: sidebarAppID, - UpdatedAt: dbtime.Now(), - ID: buildID, + if hasAITask.Valid || hasExternalAgent.Valid { + require.NoError(t, db.UpdateWorkspaceBuildFlagsByID(genCtx, database.UpdateWorkspaceBuildFlagsByIDParams{ + ID: buildID, + HasAITask: hasAITask, + HasExternalAgent: hasExternalAgent, + SidebarAppID: sidebarAppID, + UpdatedAt: dbtime.Now(), })) } @@ -774,6 +777,7 @@ func ProvisionerJob(t testing.TB, db database.Store, ps pubsub.Pubsub, orig data Input: takeFirstSlice(orig.Input, []byte("{}")), Tags: tags, TraceMetadata: pqtype.NullRawMessage{}, + LogsOverflowed: false, }) require.NoError(t, err, "insert job") if ps != nil { @@ -1026,6 +1030,7 @@ func ExternalAuthLink(t testing.TB, db database.Store, orig database.ExternalAut func TemplateVersion(t testing.TB, db database.Store, orig database.TemplateVersion) database.TemplateVersion { var version database.TemplateVersion hasAITask := takeFirst(orig.HasAITask, sql.NullBool{}) + hasExternalAgent := takeFirst(orig.HasExternalAgent, sql.NullBool{}) jobID := takeFirst(orig.JobID, uuid.New()) err := db.InTx(func(db database.Store) error { versionID := takeFirst(orig.ID, uuid.New()) @@ -1046,11 +1051,12 @@ func TemplateVersion(t testing.TB, db database.Store, orig database.TemplateVers return err } - if hasAITask.Valid { - require.NoError(t, db.UpdateTemplateVersionAITaskByJobID(genCtx, database.UpdateTemplateVersionAITaskByJobIDParams{ - JobID: jobID, - HasAITask: hasAITask, - UpdatedAt: dbtime.Now(), + if hasAITask.Valid || hasExternalAgent.Valid { + require.NoError(t, db.UpdateTemplateVersionFlagsByJobID(genCtx, database.UpdateTemplateVersionFlagsByJobIDParams{ + JobID: jobID, + HasAITask: hasAITask, + HasExternalAgent: hasExternalAgent, + UpdatedAt: dbtime.Now(), })) } @@ -1392,6 +1398,8 @@ func Preset(t testing.TB, db database.Store, seed database.InsertPresetParams) d InvalidateAfterSecs: seed.InvalidateAfterSecs, SchedulingTimezone: seed.SchedulingTimezone, IsDefault: seed.IsDefault, + Description: seed.Description, + Icon: seed.Icon, }) require.NoError(t, err, "insert preset") return preset @@ -1418,11 +1426,39 @@ func PresetParameter(t testing.TB, db database.Store, seed database.InsertPreset return parameters } -func ClaimPrebuild(t testing.TB, db database.Store, newUserID uuid.UUID, newName string, presetID uuid.UUID) database.ClaimPrebuiltWorkspaceRow { +func UserSecret(t testing.TB, db database.Store, seed database.UserSecret) database.UserSecret { + userSecret, err := db.CreateUserSecret(genCtx, database.CreateUserSecretParams{ + ID: takeFirst(seed.ID, uuid.New()), + UserID: takeFirst(seed.UserID, uuid.New()), + Name: takeFirst(seed.Name, "secret-name"), + Description: takeFirst(seed.Description, "secret description"), + Value: takeFirst(seed.Value, "secret value"), + EnvName: takeFirst(seed.EnvName, "SECRET_ENV_NAME"), + FilePath: takeFirst(seed.FilePath, "~/secret/file/path"), + }) + require.NoError(t, err, "failed to insert user secret") + return userSecret +} + +func ClaimPrebuild( + t testing.TB, + db database.Store, + now time.Time, + newUserID uuid.UUID, + newName string, + presetID uuid.UUID, + autostartSchedule sql.NullString, + nextStartAt sql.NullTime, + ttl sql.NullInt64, +) database.ClaimPrebuiltWorkspaceRow { claimedWorkspace, err := db.ClaimPrebuiltWorkspace(genCtx, database.ClaimPrebuiltWorkspaceParams{ - NewUserID: newUserID, - NewName: newName, - PresetID: presetID, + NewUserID: newUserID, + NewName: newName, + Now: now, + PresetID: presetID, + AutostartSchedule: autostartSchedule, + NextStartAt: nextStartAt, + WorkspaceTtl: ttl, }) require.NoError(t, err, "claim prebuilt workspace") diff --git a/coderd/database/dbgen/dbgen_test.go b/coderd/database/dbgen/dbgen_test.go index 7653176da8079..872704fa1dce0 100644 --- a/coderd/database/dbgen/dbgen_test.go +++ b/coderd/database/dbgen/dbgen_test.go @@ -168,6 +168,8 @@ func TestGenerator(t *testing.T) { DeletingAt: w.DeletingAt, AutomaticUpdates: w.AutomaticUpdates, Favorite: w.Favorite, + GroupACL: database.WorkspaceACL{}, + UserACL: database.WorkspaceACL{}, } require.Equal(t, exp, table) }) diff --git a/coderd/database/dbmetrics/querymetrics.go b/coderd/database/dbmetrics/querymetrics.go index 811d945ac7da9..11d21eab3b593 100644 --- a/coderd/database/dbmetrics/querymetrics.go +++ b/coderd/database/dbmetrics/querymetrics.go @@ -215,6 +215,13 @@ func (m queryMetricsStore) CountUnreadInboxNotificationsByUserID(ctx context.Con return r0, r1 } +func (m queryMetricsStore) CreateUserSecret(ctx context.Context, arg database.CreateUserSecretParams) (database.UserSecret, error) { + start := time.Now() + r0, r1 := m.s.CreateUserSecret(ctx, arg) + m.queryLatencies.WithLabelValues("CreateUserSecret").Observe(time.Since(start).Seconds()) + return r0, r1 +} + func (m queryMetricsStore) CustomRoles(ctx context.Context, arg database.CustomRolesParams) ([]database.CustomRole, error) { start := time.Now() r0, r1 := m.s.CustomRoles(ctx, arg) @@ -460,6 +467,13 @@ func (m queryMetricsStore) DeleteTailnetTunnel(ctx context.Context, arg database return r0, r1 } +func (m queryMetricsStore) DeleteUserSecret(ctx context.Context, id uuid.UUID) error { + start := time.Now() + r0 := m.s.DeleteUserSecret(ctx, id) + m.queryLatencies.WithLabelValues("DeleteUserSecret").Observe(time.Since(start).Seconds()) + return r0 +} + func (m queryMetricsStore) DeleteWebpushSubscriptionByUserIDAndEndpoint(ctx context.Context, arg database.DeleteWebpushSubscriptionByUserIDAndEndpointParams) error { start := time.Now() r0 := m.s.DeleteWebpushSubscriptionByUserIDAndEndpoint(ctx, arg) @@ -551,6 +565,13 @@ func (m queryMetricsStore) FetchVolumesResourceMonitorsUpdatedAfter(ctx context. return r0, r1 } +func (m queryMetricsStore) FindMatchingPresetID(ctx context.Context, arg database.FindMatchingPresetIDParams) (uuid.UUID, error) { + start := time.Now() + r0, r1 := m.s.FindMatchingPresetID(ctx, arg) + m.queryLatencies.WithLabelValues("FindMatchingPresetID").Observe(time.Since(start).Seconds()) + return r0, r1 +} + func (m queryMetricsStore) GetAPIKeyByID(ctx context.Context, id string) (database.APIKey, error) { start := time.Now() apiKey, err := m.s.GetAPIKeyByID(ctx, id) @@ -929,13 +950,6 @@ func (m queryMetricsStore) GetLatestWorkspaceBuildByWorkspaceID(ctx context.Cont return build, err } -func (m queryMetricsStore) GetLatestWorkspaceBuilds(ctx context.Context) ([]database.WorkspaceBuild, error) { - start := time.Now() - builds, err := m.s.GetLatestWorkspaceBuilds(ctx) - m.queryLatencies.WithLabelValues("GetLatestWorkspaceBuilds").Observe(time.Since(start).Seconds()) - return builds, err -} - func (m queryMetricsStore) GetLatestWorkspaceBuildsByWorkspaceIDs(ctx context.Context, ids []uuid.UUID) ([]database.WorkspaceBuild, error) { start := time.Now() builds, err := m.s.GetLatestWorkspaceBuildsByWorkspaceIDs(ctx, ids) @@ -1524,6 +1538,13 @@ func (m queryMetricsStore) GetTemplateVersionByTemplateIDAndName(ctx context.Con return version, err } +func (m queryMetricsStore) GetTemplateVersionHasAITask(ctx context.Context, id uuid.UUID) (bool, error) { + start := time.Now() + r0, r1 := m.s.GetTemplateVersionHasAITask(ctx, id) + m.queryLatencies.WithLabelValues("GetTemplateVersionHasAITask").Observe(time.Since(start).Seconds()) + return r0, r1 +} + func (m queryMetricsStore) GetTemplateVersionParameters(ctx context.Context, templateVersionID uuid.UUID) ([]database.TemplateVersionParameter, error) { start := time.Now() parameters, err := m.s.GetTemplateVersionParameters(ctx, templateVersionID) @@ -1657,6 +1678,20 @@ func (m queryMetricsStore) GetUserNotificationPreferences(ctx context.Context, u return r0, r1 } +func (m queryMetricsStore) GetUserSecret(ctx context.Context, id uuid.UUID) (database.UserSecret, error) { + start := time.Now() + r0, r1 := m.s.GetUserSecret(ctx, id) + m.queryLatencies.WithLabelValues("GetUserSecret").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) GetUserSecretByUserIDAndName(ctx context.Context, arg database.GetUserSecretByUserIDAndNameParams) (database.UserSecret, error) { + start := time.Now() + r0, r1 := m.s.GetUserSecretByUserIDAndName(ctx, arg) + m.queryLatencies.WithLabelValues("GetUserSecretByUserIDAndName").Observe(time.Since(start).Seconds()) + return r0, r1 +} + func (m queryMetricsStore) GetUserStatusCounts(ctx context.Context, arg database.GetUserStatusCountsParams) ([]database.GetUserStatusCountsRow, error) { start := time.Now() r0, r1 := m.s.GetUserStatusCounts(ctx, arg) @@ -2098,13 +2133,6 @@ func (m queryMetricsStore) GetWorkspacesEligibleForTransition(ctx context.Contex return workspaces, err } -func (m queryMetricsStore) HasTemplateVersionsWithAITask(ctx context.Context) (bool, error) { - start := time.Now() - r0, r1 := m.s.HasTemplateVersionsWithAITask(ctx) - m.queryLatencies.WithLabelValues("HasTemplateVersionsWithAITask").Observe(time.Since(start).Seconds()) - return r0, r1 -} - func (m queryMetricsStore) InsertAPIKey(ctx context.Context, arg database.InsertAPIKeyParams) (database.APIKey, error) { start := time.Now() key, err := m.s.InsertAPIKey(ctx, arg) @@ -2371,6 +2399,13 @@ func (m queryMetricsStore) InsertTemplateVersionWorkspaceTag(ctx context.Context return r0, r1 } +func (m queryMetricsStore) InsertUsageEvent(ctx context.Context, arg database.InsertUsageEventParams) error { + start := time.Now() + r0 := m.s.InsertUsageEvent(ctx, arg) + m.queryLatencies.WithLabelValues("InsertUsageEvent").Observe(time.Since(start).Seconds()) + return r0 +} + func (m queryMetricsStore) InsertUser(ctx context.Context, arg database.InsertUserParams) (database.User, error) { start := time.Now() user, err := m.s.InsertUser(ctx, arg) @@ -2546,6 +2581,13 @@ func (m queryMetricsStore) ListProvisionerKeysByOrganizationExcludeReserved(ctx return r0, r1 } +func (m queryMetricsStore) ListUserSecrets(ctx context.Context, userID uuid.UUID) ([]database.UserSecret, error) { + start := time.Now() + r0, r1 := m.s.ListUserSecrets(ctx, userID) + m.queryLatencies.WithLabelValues("ListUserSecrets").Observe(time.Since(start).Seconds()) + return r0, r1 +} + func (m queryMetricsStore) ListWorkspaceAgentPortShares(ctx context.Context, workspaceID uuid.UUID) ([]database.WorkspaceAgentPortShare, error) { start := time.Now() r0, r1 := m.s.ListWorkspaceAgentPortShares(ctx, workspaceID) @@ -2623,6 +2665,13 @@ func (m queryMetricsStore) RevokeDBCryptKey(ctx context.Context, activeKeyDigest return r0 } +func (m queryMetricsStore) SelectUsageEventsForPublishing(ctx context.Context, arg time.Time) ([]database.UsageEvent, error) { + start := time.Now() + r0, r1 := m.s.SelectUsageEventsForPublishing(ctx, arg) + m.queryLatencies.WithLabelValues("SelectUsageEventsForPublishing").Observe(time.Since(start).Seconds()) + return r0, r1 +} + func (m queryMetricsStore) TryAcquireLock(ctx context.Context, pgTryAdvisoryXactLock int64) (bool, error) { start := time.Now() ok, err := m.s.TryAcquireLock(ctx, pgTryAdvisoryXactLock) @@ -2784,6 +2833,20 @@ func (m queryMetricsStore) UpdateProvisionerJobByID(ctx context.Context, arg dat return err } +func (m queryMetricsStore) UpdateProvisionerJobLogsLength(ctx context.Context, arg database.UpdateProvisionerJobLogsLengthParams) error { + start := time.Now() + r0 := m.s.UpdateProvisionerJobLogsLength(ctx, arg) + m.queryLatencies.WithLabelValues("UpdateProvisionerJobLogsLength").Observe(time.Since(start).Seconds()) + return r0 +} + +func (m queryMetricsStore) UpdateProvisionerJobLogsOverflowed(ctx context.Context, arg database.UpdateProvisionerJobLogsOverflowedParams) error { + start := time.Now() + r0 := m.s.UpdateProvisionerJobLogsOverflowed(ctx, arg) + m.queryLatencies.WithLabelValues("UpdateProvisionerJobLogsOverflowed").Observe(time.Since(start).Seconds()) + return r0 +} + func (m queryMetricsStore) UpdateProvisionerJobWithCancelByID(ctx context.Context, arg database.UpdateProvisionerJobWithCancelByIDParams) error { start := time.Now() err := m.s.UpdateProvisionerJobWithCancelByID(ctx, arg) @@ -2861,13 +2924,6 @@ func (m queryMetricsStore) UpdateTemplateScheduleByID(ctx context.Context, arg d return err } -func (m queryMetricsStore) UpdateTemplateVersionAITaskByJobID(ctx context.Context, arg database.UpdateTemplateVersionAITaskByJobIDParams) error { - start := time.Now() - r0 := m.s.UpdateTemplateVersionAITaskByJobID(ctx, arg) - m.queryLatencies.WithLabelValues("UpdateTemplateVersionAITaskByJobID").Observe(time.Since(start).Seconds()) - return r0 -} - func (m queryMetricsStore) UpdateTemplateVersionByID(ctx context.Context, arg database.UpdateTemplateVersionByIDParams) error { start := time.Now() err := m.s.UpdateTemplateVersionByID(ctx, arg) @@ -2889,6 +2945,13 @@ func (m queryMetricsStore) UpdateTemplateVersionExternalAuthProvidersByJobID(ctx return err } +func (m queryMetricsStore) UpdateTemplateVersionFlagsByJobID(ctx context.Context, arg database.UpdateTemplateVersionFlagsByJobIDParams) error { + start := time.Now() + r0 := m.s.UpdateTemplateVersionFlagsByJobID(ctx, arg) + m.queryLatencies.WithLabelValues("UpdateTemplateVersionFlagsByJobID").Observe(time.Since(start).Seconds()) + return r0 +} + func (m queryMetricsStore) UpdateTemplateWorkspacesLastUsedAt(ctx context.Context, arg database.UpdateTemplateWorkspacesLastUsedAtParams) error { start := time.Now() r0 := m.s.UpdateTemplateWorkspacesLastUsedAt(ctx, arg) @@ -2896,6 +2959,13 @@ func (m queryMetricsStore) UpdateTemplateWorkspacesLastUsedAt(ctx context.Contex return r0 } +func (m queryMetricsStore) UpdateUsageEventsPostPublish(ctx context.Context, arg database.UpdateUsageEventsPostPublishParams) error { + start := time.Now() + r0 := m.s.UpdateUsageEventsPostPublish(ctx, arg) + m.queryLatencies.WithLabelValues("UpdateUsageEventsPostPublish").Observe(time.Since(start).Seconds()) + return r0 +} + func (m queryMetricsStore) UpdateUserDeletedByID(ctx context.Context, id uuid.UUID) error { start := time.Now() r0 := m.s.UpdateUserDeletedByID(ctx, id) @@ -2980,6 +3050,13 @@ func (m queryMetricsStore) UpdateUserRoles(ctx context.Context, arg database.Upd return user, err } +func (m queryMetricsStore) UpdateUserSecret(ctx context.Context, arg database.UpdateUserSecretParams) (database.UserSecret, error) { + start := time.Now() + r0, r1 := m.s.UpdateUserSecret(ctx, arg) + m.queryLatencies.WithLabelValues("UpdateUserSecret").Observe(time.Since(start).Seconds()) + return r0, r1 +} + func (m queryMetricsStore) UpdateUserStatus(ctx context.Context, arg database.UpdateUserStatusParams) (database.User, error) { start := time.Now() user, err := m.s.UpdateUserStatus(ctx, arg) @@ -3015,6 +3092,13 @@ func (m queryMetricsStore) UpdateWorkspace(ctx context.Context, arg database.Upd return workspace, err } +func (m queryMetricsStore) UpdateWorkspaceACLByID(ctx context.Context, arg database.UpdateWorkspaceACLByIDParams) error { + start := time.Now() + r0 := m.s.UpdateWorkspaceACLByID(ctx, arg) + m.queryLatencies.WithLabelValues("UpdateWorkspaceACLByID").Observe(time.Since(start).Seconds()) + return r0 +} + func (m queryMetricsStore) UpdateWorkspaceAgentConnectionByID(ctx context.Context, arg database.UpdateWorkspaceAgentConnectionByIDParams) error { start := time.Now() err := m.s.UpdateWorkspaceAgentConnectionByID(ctx, arg) @@ -3071,13 +3155,6 @@ func (m queryMetricsStore) UpdateWorkspaceAutostart(ctx context.Context, arg dat return err } -func (m queryMetricsStore) UpdateWorkspaceBuildAITaskByID(ctx context.Context, arg database.UpdateWorkspaceBuildAITaskByIDParams) error { - start := time.Now() - r0 := m.s.UpdateWorkspaceBuildAITaskByID(ctx, arg) - m.queryLatencies.WithLabelValues("UpdateWorkspaceBuildAITaskByID").Observe(time.Since(start).Seconds()) - return r0 -} - func (m queryMetricsStore) UpdateWorkspaceBuildCostByID(ctx context.Context, arg database.UpdateWorkspaceBuildCostByIDParams) error { start := time.Now() err := m.s.UpdateWorkspaceBuildCostByID(ctx, arg) @@ -3092,6 +3169,13 @@ func (m queryMetricsStore) UpdateWorkspaceBuildDeadlineByID(ctx context.Context, return r0 } +func (m queryMetricsStore) UpdateWorkspaceBuildFlagsByID(ctx context.Context, arg database.UpdateWorkspaceBuildFlagsByIDParams) error { + start := time.Now() + r0 := m.s.UpdateWorkspaceBuildFlagsByID(ctx, arg) + m.queryLatencies.WithLabelValues("UpdateWorkspaceBuildFlagsByID").Observe(time.Since(start).Seconds()) + return r0 +} + func (m queryMetricsStore) UpdateWorkspaceBuildProvisionerStateByID(ctx context.Context, arg database.UpdateWorkspaceBuildProvisionerStateByIDParams) error { start := time.Now() r0 := m.s.UpdateWorkspaceBuildProvisionerStateByID(ctx, arg) @@ -3358,6 +3442,20 @@ func (m queryMetricsStore) UpsertWorkspaceAppAuditSession(ctx context.Context, a return r0, r1 } +func (m queryMetricsStore) ValidateGroupIDs(ctx context.Context, groupIds []uuid.UUID) (database.ValidateGroupIDsRow, error) { + start := time.Now() + r0, r1 := m.s.ValidateGroupIDs(ctx, groupIds) + m.queryLatencies.WithLabelValues("ValidateGroupIDs").Observe(time.Since(start).Seconds()) + return r0, r1 +} + +func (m queryMetricsStore) ValidateUserIDs(ctx context.Context, userIds []uuid.UUID) (database.ValidateUserIDsRow, error) { + start := time.Now() + r0, r1 := m.s.ValidateUserIDs(ctx, userIds) + m.queryLatencies.WithLabelValues("ValidateUserIDs").Observe(time.Since(start).Seconds()) + return r0, r1 +} + func (m queryMetricsStore) GetAuthorizedTemplates(ctx context.Context, arg database.GetTemplatesWithFilterParams, prepared rbac.PreparedAuthorized) ([]database.Template, error) { start := time.Now() templates, err := m.s.GetAuthorizedTemplates(ctx, arg, prepared) diff --git a/coderd/database/dbmock/dbmock.go b/coderd/database/dbmock/dbmock.go index b20c3d06209b5..67244cf2b01e9 100644 --- a/coderd/database/dbmock/dbmock.go +++ b/coderd/database/dbmock/dbmock.go @@ -338,6 +338,21 @@ func (mr *MockStoreMockRecorder) CountUnreadInboxNotificationsByUserID(ctx, user return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "CountUnreadInboxNotificationsByUserID", reflect.TypeOf((*MockStore)(nil).CountUnreadInboxNotificationsByUserID), ctx, userID) } +// CreateUserSecret mocks base method. +func (m *MockStore) CreateUserSecret(ctx context.Context, arg database.CreateUserSecretParams) (database.UserSecret, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "CreateUserSecret", ctx, arg) + ret0, _ := ret[0].(database.UserSecret) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// CreateUserSecret indicates an expected call of CreateUserSecret. +func (mr *MockStoreMockRecorder) CreateUserSecret(ctx, arg any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "CreateUserSecret", reflect.TypeOf((*MockStore)(nil).CreateUserSecret), ctx, arg) +} + // CustomRoles mocks base method. func (m *MockStore) CustomRoles(ctx context.Context, arg database.CustomRolesParams) ([]database.CustomRole, error) { m.ctrl.T.Helper() @@ -835,6 +850,20 @@ func (mr *MockStoreMockRecorder) DeleteTailnetTunnel(ctx, arg any) *gomock.Call return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "DeleteTailnetTunnel", reflect.TypeOf((*MockStore)(nil).DeleteTailnetTunnel), ctx, arg) } +// DeleteUserSecret mocks base method. +func (m *MockStore) DeleteUserSecret(ctx context.Context, id uuid.UUID) error { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "DeleteUserSecret", ctx, id) + ret0, _ := ret[0].(error) + return ret0 +} + +// DeleteUserSecret indicates an expected call of DeleteUserSecret. +func (mr *MockStoreMockRecorder) DeleteUserSecret(ctx, id any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "DeleteUserSecret", reflect.TypeOf((*MockStore)(nil).DeleteUserSecret), ctx, id) +} + // DeleteWebpushSubscriptionByUserIDAndEndpoint mocks base method. func (m *MockStore) DeleteWebpushSubscriptionByUserIDAndEndpoint(ctx context.Context, arg database.DeleteWebpushSubscriptionByUserIDAndEndpointParams) error { m.ctrl.T.Helper() @@ -1022,6 +1051,21 @@ func (mr *MockStoreMockRecorder) FetchVolumesResourceMonitorsUpdatedAfter(ctx, u return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "FetchVolumesResourceMonitorsUpdatedAfter", reflect.TypeOf((*MockStore)(nil).FetchVolumesResourceMonitorsUpdatedAfter), ctx, updatedAt) } +// FindMatchingPresetID mocks base method. +func (m *MockStore) FindMatchingPresetID(ctx context.Context, arg database.FindMatchingPresetIDParams) (uuid.UUID, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "FindMatchingPresetID", ctx, arg) + ret0, _ := ret[0].(uuid.UUID) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// FindMatchingPresetID indicates an expected call of FindMatchingPresetID. +func (mr *MockStoreMockRecorder) FindMatchingPresetID(ctx, arg any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "FindMatchingPresetID", reflect.TypeOf((*MockStore)(nil).FindMatchingPresetID), ctx, arg) +} + // GetAPIKeyByID mocks base method. func (m *MockStore) GetAPIKeyByID(ctx context.Context, id string) (database.APIKey, error) { m.ctrl.T.Helper() @@ -1937,21 +1981,6 @@ func (mr *MockStoreMockRecorder) GetLatestWorkspaceBuildByWorkspaceID(ctx, works return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetLatestWorkspaceBuildByWorkspaceID", reflect.TypeOf((*MockStore)(nil).GetLatestWorkspaceBuildByWorkspaceID), ctx, workspaceID) } -// GetLatestWorkspaceBuilds mocks base method. -func (m *MockStore) GetLatestWorkspaceBuilds(ctx context.Context) ([]database.WorkspaceBuild, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetLatestWorkspaceBuilds", ctx) - ret0, _ := ret[0].([]database.WorkspaceBuild) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// GetLatestWorkspaceBuilds indicates an expected call of GetLatestWorkspaceBuilds. -func (mr *MockStoreMockRecorder) GetLatestWorkspaceBuilds(ctx any) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetLatestWorkspaceBuilds", reflect.TypeOf((*MockStore)(nil).GetLatestWorkspaceBuilds), ctx) -} - // GetLatestWorkspaceBuildsByWorkspaceIDs mocks base method. func (m *MockStore) GetLatestWorkspaceBuildsByWorkspaceIDs(ctx context.Context, ids []uuid.UUID) ([]database.WorkspaceBuild, error) { m.ctrl.T.Helper() @@ -3242,6 +3271,21 @@ func (mr *MockStoreMockRecorder) GetTemplateVersionByTemplateIDAndName(ctx, arg return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetTemplateVersionByTemplateIDAndName", reflect.TypeOf((*MockStore)(nil).GetTemplateVersionByTemplateIDAndName), ctx, arg) } +// GetTemplateVersionHasAITask mocks base method. +func (m *MockStore) GetTemplateVersionHasAITask(ctx context.Context, id uuid.UUID) (bool, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetTemplateVersionHasAITask", ctx, id) + ret0, _ := ret[0].(bool) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// GetTemplateVersionHasAITask indicates an expected call of GetTemplateVersionHasAITask. +func (mr *MockStoreMockRecorder) GetTemplateVersionHasAITask(ctx, id any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetTemplateVersionHasAITask", reflect.TypeOf((*MockStore)(nil).GetTemplateVersionHasAITask), ctx, id) +} + // GetTemplateVersionParameters mocks base method. func (m *MockStore) GetTemplateVersionParameters(ctx context.Context, templateVersionID uuid.UUID) ([]database.TemplateVersionParameter, error) { m.ctrl.T.Helper() @@ -3527,6 +3571,36 @@ func (mr *MockStoreMockRecorder) GetUserNotificationPreferences(ctx, userID any) return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetUserNotificationPreferences", reflect.TypeOf((*MockStore)(nil).GetUserNotificationPreferences), ctx, userID) } +// GetUserSecret mocks base method. +func (m *MockStore) GetUserSecret(ctx context.Context, id uuid.UUID) (database.UserSecret, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetUserSecret", ctx, id) + ret0, _ := ret[0].(database.UserSecret) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// GetUserSecret indicates an expected call of GetUserSecret. +func (mr *MockStoreMockRecorder) GetUserSecret(ctx, id any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetUserSecret", reflect.TypeOf((*MockStore)(nil).GetUserSecret), ctx, id) +} + +// GetUserSecretByUserIDAndName mocks base method. +func (m *MockStore) GetUserSecretByUserIDAndName(ctx context.Context, arg database.GetUserSecretByUserIDAndNameParams) (database.UserSecret, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetUserSecretByUserIDAndName", ctx, arg) + ret0, _ := ret[0].(database.UserSecret) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// GetUserSecretByUserIDAndName indicates an expected call of GetUserSecretByUserIDAndName. +func (mr *MockStoreMockRecorder) GetUserSecretByUserIDAndName(ctx, arg any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetUserSecretByUserIDAndName", reflect.TypeOf((*MockStore)(nil).GetUserSecretByUserIDAndName), ctx, arg) +} + // GetUserStatusCounts mocks base method. func (m *MockStore) GetUserStatusCounts(ctx context.Context, arg database.GetUserStatusCountsParams) ([]database.GetUserStatusCountsRow, error) { m.ctrl.T.Helper() @@ -4472,21 +4546,6 @@ func (mr *MockStoreMockRecorder) GetWorkspacesEligibleForTransition(ctx, now any return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetWorkspacesEligibleForTransition", reflect.TypeOf((*MockStore)(nil).GetWorkspacesEligibleForTransition), ctx, now) } -// HasTemplateVersionsWithAITask mocks base method. -func (m *MockStore) HasTemplateVersionsWithAITask(ctx context.Context) (bool, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "HasTemplateVersionsWithAITask", ctx) - ret0, _ := ret[0].(bool) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// HasTemplateVersionsWithAITask indicates an expected call of HasTemplateVersionsWithAITask. -func (mr *MockStoreMockRecorder) HasTemplateVersionsWithAITask(ctx any) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "HasTemplateVersionsWithAITask", reflect.TypeOf((*MockStore)(nil).HasTemplateVersionsWithAITask), ctx) -} - // InTx mocks base method. func (m *MockStore) InTx(arg0 func(database.Store) error, arg1 *database.TxOptions) error { m.ctrl.T.Helper() @@ -5063,6 +5122,20 @@ func (mr *MockStoreMockRecorder) InsertTemplateVersionWorkspaceTag(ctx, arg any) return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "InsertTemplateVersionWorkspaceTag", reflect.TypeOf((*MockStore)(nil).InsertTemplateVersionWorkspaceTag), ctx, arg) } +// InsertUsageEvent mocks base method. +func (m *MockStore) InsertUsageEvent(ctx context.Context, arg database.InsertUsageEventParams) error { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "InsertUsageEvent", ctx, arg) + ret0, _ := ret[0].(error) + return ret0 +} + +// InsertUsageEvent indicates an expected call of InsertUsageEvent. +func (mr *MockStoreMockRecorder) InsertUsageEvent(ctx, arg any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "InsertUsageEvent", reflect.TypeOf((*MockStore)(nil).InsertUsageEvent), ctx, arg) +} + // InsertUser mocks base method. func (m *MockStore) InsertUser(ctx context.Context, arg database.InsertUserParams) (database.User, error) { m.ctrl.T.Helper() @@ -5432,6 +5505,21 @@ func (mr *MockStoreMockRecorder) ListProvisionerKeysByOrganizationExcludeReserve return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ListProvisionerKeysByOrganizationExcludeReserved", reflect.TypeOf((*MockStore)(nil).ListProvisionerKeysByOrganizationExcludeReserved), ctx, organizationID) } +// ListUserSecrets mocks base method. +func (m *MockStore) ListUserSecrets(ctx context.Context, userID uuid.UUID) ([]database.UserSecret, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "ListUserSecrets", ctx, userID) + ret0, _ := ret[0].([]database.UserSecret) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// ListUserSecrets indicates an expected call of ListUserSecrets. +func (mr *MockStoreMockRecorder) ListUserSecrets(ctx, userID any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ListUserSecrets", reflect.TypeOf((*MockStore)(nil).ListUserSecrets), ctx, userID) +} + // ListWorkspaceAgentPortShares mocks base method. func (m *MockStore) ListWorkspaceAgentPortShares(ctx context.Context, workspaceID uuid.UUID) ([]database.WorkspaceAgentPortShare, error) { m.ctrl.T.Helper() @@ -5623,6 +5711,21 @@ func (mr *MockStoreMockRecorder) RevokeDBCryptKey(ctx, activeKeyDigest any) *gom return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "RevokeDBCryptKey", reflect.TypeOf((*MockStore)(nil).RevokeDBCryptKey), ctx, activeKeyDigest) } +// SelectUsageEventsForPublishing mocks base method. +func (m *MockStore) SelectUsageEventsForPublishing(ctx context.Context, now time.Time) ([]database.UsageEvent, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "SelectUsageEventsForPublishing", ctx, now) + ret0, _ := ret[0].([]database.UsageEvent) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// SelectUsageEventsForPublishing indicates an expected call of SelectUsageEventsForPublishing. +func (mr *MockStoreMockRecorder) SelectUsageEventsForPublishing(ctx, now any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "SelectUsageEventsForPublishing", reflect.TypeOf((*MockStore)(nil).SelectUsageEventsForPublishing), ctx, now) +} + // TryAcquireLock mocks base method. func (m *MockStore) TryAcquireLock(ctx context.Context, pgTryAdvisoryXactLock int64) (bool, error) { m.ctrl.T.Helper() @@ -5958,6 +6061,34 @@ func (mr *MockStoreMockRecorder) UpdateProvisionerJobByID(ctx, arg any) *gomock. return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "UpdateProvisionerJobByID", reflect.TypeOf((*MockStore)(nil).UpdateProvisionerJobByID), ctx, arg) } +// UpdateProvisionerJobLogsLength mocks base method. +func (m *MockStore) UpdateProvisionerJobLogsLength(ctx context.Context, arg database.UpdateProvisionerJobLogsLengthParams) error { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "UpdateProvisionerJobLogsLength", ctx, arg) + ret0, _ := ret[0].(error) + return ret0 +} + +// UpdateProvisionerJobLogsLength indicates an expected call of UpdateProvisionerJobLogsLength. +func (mr *MockStoreMockRecorder) UpdateProvisionerJobLogsLength(ctx, arg any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "UpdateProvisionerJobLogsLength", reflect.TypeOf((*MockStore)(nil).UpdateProvisionerJobLogsLength), ctx, arg) +} + +// UpdateProvisionerJobLogsOverflowed mocks base method. +func (m *MockStore) UpdateProvisionerJobLogsOverflowed(ctx context.Context, arg database.UpdateProvisionerJobLogsOverflowedParams) error { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "UpdateProvisionerJobLogsOverflowed", ctx, arg) + ret0, _ := ret[0].(error) + return ret0 +} + +// UpdateProvisionerJobLogsOverflowed indicates an expected call of UpdateProvisionerJobLogsOverflowed. +func (mr *MockStoreMockRecorder) UpdateProvisionerJobLogsOverflowed(ctx, arg any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "UpdateProvisionerJobLogsOverflowed", reflect.TypeOf((*MockStore)(nil).UpdateProvisionerJobLogsOverflowed), ctx, arg) +} + // UpdateProvisionerJobWithCancelByID mocks base method. func (m *MockStore) UpdateProvisionerJobWithCancelByID(ctx context.Context, arg database.UpdateProvisionerJobWithCancelByIDParams) error { m.ctrl.T.Helper() @@ -6113,20 +6244,6 @@ func (mr *MockStoreMockRecorder) UpdateTemplateScheduleByID(ctx, arg any) *gomoc return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "UpdateTemplateScheduleByID", reflect.TypeOf((*MockStore)(nil).UpdateTemplateScheduleByID), ctx, arg) } -// UpdateTemplateVersionAITaskByJobID mocks base method. -func (m *MockStore) UpdateTemplateVersionAITaskByJobID(ctx context.Context, arg database.UpdateTemplateVersionAITaskByJobIDParams) error { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "UpdateTemplateVersionAITaskByJobID", ctx, arg) - ret0, _ := ret[0].(error) - return ret0 -} - -// UpdateTemplateVersionAITaskByJobID indicates an expected call of UpdateTemplateVersionAITaskByJobID. -func (mr *MockStoreMockRecorder) UpdateTemplateVersionAITaskByJobID(ctx, arg any) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "UpdateTemplateVersionAITaskByJobID", reflect.TypeOf((*MockStore)(nil).UpdateTemplateVersionAITaskByJobID), ctx, arg) -} - // UpdateTemplateVersionByID mocks base method. func (m *MockStore) UpdateTemplateVersionByID(ctx context.Context, arg database.UpdateTemplateVersionByIDParams) error { m.ctrl.T.Helper() @@ -6169,6 +6286,20 @@ func (mr *MockStoreMockRecorder) UpdateTemplateVersionExternalAuthProvidersByJob return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "UpdateTemplateVersionExternalAuthProvidersByJobID", reflect.TypeOf((*MockStore)(nil).UpdateTemplateVersionExternalAuthProvidersByJobID), ctx, arg) } +// UpdateTemplateVersionFlagsByJobID mocks base method. +func (m *MockStore) UpdateTemplateVersionFlagsByJobID(ctx context.Context, arg database.UpdateTemplateVersionFlagsByJobIDParams) error { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "UpdateTemplateVersionFlagsByJobID", ctx, arg) + ret0, _ := ret[0].(error) + return ret0 +} + +// UpdateTemplateVersionFlagsByJobID indicates an expected call of UpdateTemplateVersionFlagsByJobID. +func (mr *MockStoreMockRecorder) UpdateTemplateVersionFlagsByJobID(ctx, arg any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "UpdateTemplateVersionFlagsByJobID", reflect.TypeOf((*MockStore)(nil).UpdateTemplateVersionFlagsByJobID), ctx, arg) +} + // UpdateTemplateWorkspacesLastUsedAt mocks base method. func (m *MockStore) UpdateTemplateWorkspacesLastUsedAt(ctx context.Context, arg database.UpdateTemplateWorkspacesLastUsedAtParams) error { m.ctrl.T.Helper() @@ -6183,6 +6314,20 @@ func (mr *MockStoreMockRecorder) UpdateTemplateWorkspacesLastUsedAt(ctx, arg any return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "UpdateTemplateWorkspacesLastUsedAt", reflect.TypeOf((*MockStore)(nil).UpdateTemplateWorkspacesLastUsedAt), ctx, arg) } +// UpdateUsageEventsPostPublish mocks base method. +func (m *MockStore) UpdateUsageEventsPostPublish(ctx context.Context, arg database.UpdateUsageEventsPostPublishParams) error { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "UpdateUsageEventsPostPublish", ctx, arg) + ret0, _ := ret[0].(error) + return ret0 +} + +// UpdateUsageEventsPostPublish indicates an expected call of UpdateUsageEventsPostPublish. +func (mr *MockStoreMockRecorder) UpdateUsageEventsPostPublish(ctx, arg any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "UpdateUsageEventsPostPublish", reflect.TypeOf((*MockStore)(nil).UpdateUsageEventsPostPublish), ctx, arg) +} + // UpdateUserDeletedByID mocks base method. func (m *MockStore) UpdateUserDeletedByID(ctx context.Context, id uuid.UUID) error { m.ctrl.T.Helper() @@ -6359,6 +6504,21 @@ func (mr *MockStoreMockRecorder) UpdateUserRoles(ctx, arg any) *gomock.Call { return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "UpdateUserRoles", reflect.TypeOf((*MockStore)(nil).UpdateUserRoles), ctx, arg) } +// UpdateUserSecret mocks base method. +func (m *MockStore) UpdateUserSecret(ctx context.Context, arg database.UpdateUserSecretParams) (database.UserSecret, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "UpdateUserSecret", ctx, arg) + ret0, _ := ret[0].(database.UserSecret) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// UpdateUserSecret indicates an expected call of UpdateUserSecret. +func (mr *MockStoreMockRecorder) UpdateUserSecret(ctx, arg any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "UpdateUserSecret", reflect.TypeOf((*MockStore)(nil).UpdateUserSecret), ctx, arg) +} + // UpdateUserStatus mocks base method. func (m *MockStore) UpdateUserStatus(ctx context.Context, arg database.UpdateUserStatusParams) (database.User, error) { m.ctrl.T.Helper() @@ -6433,6 +6593,20 @@ func (mr *MockStoreMockRecorder) UpdateWorkspace(ctx, arg any) *gomock.Call { return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "UpdateWorkspace", reflect.TypeOf((*MockStore)(nil).UpdateWorkspace), ctx, arg) } +// UpdateWorkspaceACLByID mocks base method. +func (m *MockStore) UpdateWorkspaceACLByID(ctx context.Context, arg database.UpdateWorkspaceACLByIDParams) error { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "UpdateWorkspaceACLByID", ctx, arg) + ret0, _ := ret[0].(error) + return ret0 +} + +// UpdateWorkspaceACLByID indicates an expected call of UpdateWorkspaceACLByID. +func (mr *MockStoreMockRecorder) UpdateWorkspaceACLByID(ctx, arg any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "UpdateWorkspaceACLByID", reflect.TypeOf((*MockStore)(nil).UpdateWorkspaceACLByID), ctx, arg) +} + // UpdateWorkspaceAgentConnectionByID mocks base method. func (m *MockStore) UpdateWorkspaceAgentConnectionByID(ctx context.Context, arg database.UpdateWorkspaceAgentConnectionByIDParams) error { m.ctrl.T.Helper() @@ -6545,20 +6719,6 @@ func (mr *MockStoreMockRecorder) UpdateWorkspaceAutostart(ctx, arg any) *gomock. return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "UpdateWorkspaceAutostart", reflect.TypeOf((*MockStore)(nil).UpdateWorkspaceAutostart), ctx, arg) } -// UpdateWorkspaceBuildAITaskByID mocks base method. -func (m *MockStore) UpdateWorkspaceBuildAITaskByID(ctx context.Context, arg database.UpdateWorkspaceBuildAITaskByIDParams) error { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "UpdateWorkspaceBuildAITaskByID", ctx, arg) - ret0, _ := ret[0].(error) - return ret0 -} - -// UpdateWorkspaceBuildAITaskByID indicates an expected call of UpdateWorkspaceBuildAITaskByID. -func (mr *MockStoreMockRecorder) UpdateWorkspaceBuildAITaskByID(ctx, arg any) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "UpdateWorkspaceBuildAITaskByID", reflect.TypeOf((*MockStore)(nil).UpdateWorkspaceBuildAITaskByID), ctx, arg) -} - // UpdateWorkspaceBuildCostByID mocks base method. func (m *MockStore) UpdateWorkspaceBuildCostByID(ctx context.Context, arg database.UpdateWorkspaceBuildCostByIDParams) error { m.ctrl.T.Helper() @@ -6587,6 +6747,20 @@ func (mr *MockStoreMockRecorder) UpdateWorkspaceBuildDeadlineByID(ctx, arg any) return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "UpdateWorkspaceBuildDeadlineByID", reflect.TypeOf((*MockStore)(nil).UpdateWorkspaceBuildDeadlineByID), ctx, arg) } +// UpdateWorkspaceBuildFlagsByID mocks base method. +func (m *MockStore) UpdateWorkspaceBuildFlagsByID(ctx context.Context, arg database.UpdateWorkspaceBuildFlagsByIDParams) error { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "UpdateWorkspaceBuildFlagsByID", ctx, arg) + ret0, _ := ret[0].(error) + return ret0 +} + +// UpdateWorkspaceBuildFlagsByID indicates an expected call of UpdateWorkspaceBuildFlagsByID. +func (mr *MockStoreMockRecorder) UpdateWorkspaceBuildFlagsByID(ctx, arg any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "UpdateWorkspaceBuildFlagsByID", reflect.TypeOf((*MockStore)(nil).UpdateWorkspaceBuildFlagsByID), ctx, arg) +} + // UpdateWorkspaceBuildProvisionerStateByID mocks base method. func (m *MockStore) UpdateWorkspaceBuildProvisionerStateByID(ctx context.Context, arg database.UpdateWorkspaceBuildProvisionerStateByIDParams) error { m.ctrl.T.Helper() @@ -7132,6 +7306,36 @@ func (mr *MockStoreMockRecorder) UpsertWorkspaceAppAuditSession(ctx, arg any) *g return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "UpsertWorkspaceAppAuditSession", reflect.TypeOf((*MockStore)(nil).UpsertWorkspaceAppAuditSession), ctx, arg) } +// ValidateGroupIDs mocks base method. +func (m *MockStore) ValidateGroupIDs(ctx context.Context, groupIds []uuid.UUID) (database.ValidateGroupIDsRow, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "ValidateGroupIDs", ctx, groupIds) + ret0, _ := ret[0].(database.ValidateGroupIDsRow) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// ValidateGroupIDs indicates an expected call of ValidateGroupIDs. +func (mr *MockStoreMockRecorder) ValidateGroupIDs(ctx, groupIds any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ValidateGroupIDs", reflect.TypeOf((*MockStore)(nil).ValidateGroupIDs), ctx, groupIds) +} + +// ValidateUserIDs mocks base method. +func (m *MockStore) ValidateUserIDs(ctx context.Context, userIds []uuid.UUID) (database.ValidateUserIDsRow, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "ValidateUserIDs", ctx, userIds) + ret0, _ := ret[0].(database.ValidateUserIDsRow) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// ValidateUserIDs indicates an expected call of ValidateUserIDs. +func (mr *MockStoreMockRecorder) ValidateUserIDs(ctx, userIds any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ValidateUserIDs", reflect.TypeOf((*MockStore)(nil).ValidateUserIDs), ctx, userIds) +} + // Wrappers mocks base method. func (m *MockStore) Wrappers() []string { m.ctrl.T.Helper() diff --git a/coderd/database/dbpurge/dbpurge.go b/coderd/database/dbpurge/dbpurge.go index 135d7f40b05dd..5afa9b4ba2975 100644 --- a/coderd/database/dbpurge/dbpurge.go +++ b/coderd/database/dbpurge/dbpurge.go @@ -12,6 +12,7 @@ import ( "github.com/coder/coder/v2/coderd/database" "github.com/coder/coder/v2/coderd/database/dbauthz" "github.com/coder/coder/v2/coderd/database/dbtime" + "github.com/coder/coder/v2/coderd/pproflabel" "github.com/coder/quartz" ) @@ -38,7 +39,7 @@ func New(ctx context.Context, logger slog.Logger, db database.Store, clk quartz. // Start the ticker with the initial delay. ticker := clk.NewTicker(delay) - doTick := func(start time.Time) { + doTick := func(ctx context.Context, start time.Time) { defer ticker.Reset(delay) // Start a transaction to grab advisory lock, we don't want to run // multiple purges at the same time (multiple replicas). @@ -85,21 +86,21 @@ func New(ctx context.Context, logger slog.Logger, db database.Store, clk quartz. } } - go func() { + pproflabel.Go(ctx, pproflabel.Service(pproflabel.ServiceDBPurge), func(ctx context.Context) { defer close(closed) defer ticker.Stop() // Force an initial tick. - doTick(dbtime.Time(clk.Now()).UTC()) + doTick(ctx, dbtime.Time(clk.Now()).UTC()) for { select { case <-ctx.Done(): return case tick := <-ticker.C: ticker.Stop() - doTick(dbtime.Time(tick).UTC()) + doTick(ctx, dbtime.Time(tick).UTC()) } } - }() + }) return &instance{ cancel: cancelFunc, closed: closed, diff --git a/coderd/database/dbpurge/dbpurge_test.go b/coderd/database/dbpurge/dbpurge_test.go index 1d57a87e68f48..b3be0f82631c0 100644 --- a/coderd/database/dbpurge/dbpurge_test.go +++ b/coderd/database/dbpurge/dbpurge_test.go @@ -15,12 +15,14 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" "go.uber.org/goleak" + "go.uber.org/mock/gomock" "cdr.dev/slog" "cdr.dev/slog/sloggers/slogtest" "github.com/coder/coder/v2/coderd/database" "github.com/coder/coder/v2/coderd/database/dbgen" + "github.com/coder/coder/v2/coderd/database/dbmock" "github.com/coder/coder/v2/coderd/database/dbpurge" "github.com/coder/coder/v2/coderd/database/dbrollup" "github.com/coder/coder/v2/coderd/database/dbtestutil" @@ -46,8 +48,9 @@ func TestPurge(t *testing.T) { // We want to make sure dbpurge is actually started so that this test is meaningful. clk := quartz.NewMock(t) done := awaitDoTick(ctx, t, clk) - db, _ := dbtestutil.NewDB(t) - purger := dbpurge.New(context.Background(), testutil.Logger(t), db, clk) + mDB := dbmock.NewMockStore(gomock.NewController(t)) + mDB.EXPECT().InTx(gomock.Any(), database.DefaultTXOptions().WithID("db_purge")).Return(nil).Times(2) + purger := dbpurge.New(context.Background(), testutil.Logger(t), mDB, clk) <-done // wait for doTick() to run. require.NoError(t, purger.Close()) } diff --git a/coderd/database/dbtestutil/db.go b/coderd/database/dbtestutil/db.go index f67e3206b09d1..4c7d7dcbf230e 100644 --- a/coderd/database/dbtestutil/db.go +++ b/coderd/database/dbtestutil/db.go @@ -206,7 +206,7 @@ func DumpOnFailure(t testing.TB, connectionURL string) { outPath := filepath.Join(cwd, snakeCaseName+"."+timeSuffix+".test.sql") dump, err := PGDump(connectionURL) if err != nil { - t.Errorf("dump on failure: failed to run pg_dump") + t.Errorf("dump on failure: failed to run pg_dump: %s", err.Error()) return } if err := os.WriteFile(outPath, normalizeDump(dump), 0o600); err != nil { diff --git a/coderd/database/dbtestutil/postgres.go b/coderd/database/dbtestutil/postgres.go index e5aa4b14de83b..1ab80569dedb1 100644 --- a/coderd/database/dbtestutil/postgres.go +++ b/coderd/database/dbtestutil/postgres.go @@ -138,6 +138,7 @@ func initDefaultConnection(t TBSubset) error { type OpenOptions struct { DBFrom *string + LogDSN bool } type OpenOption func(*OpenOptions) @@ -150,9 +151,18 @@ func WithDBFrom(dbFrom string) OpenOption { } } +// WithLogDSN sets whether the DSN should be logged during testing. +// This provides an ergonomic way to connect to test databases during debugging. +func WithLogDSN(logDSN bool) OpenOption { + return func(o *OpenOptions) { + o.LogDSN = logDSN + } +} + // TBSubset is a subset of the testing.TB interface. // It allows to use dbtestutil.Open outside of tests. type TBSubset interface { + Name() string Cleanup(func()) Helper() Logf(format string, args ...any) @@ -227,6 +237,11 @@ func Open(t TBSubset, opts ...OpenOption) (string, error) { Port: port, DBName: dbName, }.DSN() + + // Optionally log the DSN to help connect to the test database. + if openOptions.LogDSN { + _, _ = fmt.Fprintf(os.Stderr, "Connect to the database for %s using: psql '%s'\n", t.Name(), dsn) + } return dsn, nil } diff --git a/coderd/database/dump.sql b/coderd/database/dump.sql index eb07a5735088f..066fe0b1b8847 100644 --- a/coderd/database/dump.sql +++ b/coderd/database/dump.sql @@ -73,6 +73,11 @@ CREATE TYPE connection_type AS ENUM ( 'port_forwarding' ); +CREATE TYPE cors_behavior AS ENUM ( + 'simple', + 'passthru' +); + CREATE TYPE crypto_key_feature AS ENUM ( 'workspace_apps_token', 'workspace_apps_api_key', @@ -937,13 +942,16 @@ CREATE TABLE external_auth_links ( oauth_expiry timestamp with time zone NOT NULL, oauth_access_token_key_id text, oauth_refresh_token_key_id text, - oauth_extra jsonb + oauth_extra jsonb, + oauth_refresh_failure_reason text DEFAULT ''::text NOT NULL ); COMMENT ON COLUMN external_auth_links.oauth_access_token_key_id IS 'The ID of the key used to encrypt the OAuth access token. If this is NULL, the access token is not encrypted'; COMMENT ON COLUMN external_auth_links.oauth_refresh_token_key_id IS 'The ID of the key used to encrypt the OAuth refresh token. If this is NULL, the refresh token is not encrypted'; +COMMENT ON COLUMN external_auth_links.oauth_refresh_failure_reason IS 'This error means the refresh token is invalid. Cached so we can avoid calling the external provider again for the same error.'; + CREATE TABLE files ( hash character varying(64) NOT NULL, created_at timestamp with time zone NOT NULL, @@ -1007,7 +1015,8 @@ CREATE TABLE users ( hashed_one_time_passcode bytea, one_time_passcode_expires_at timestamp with time zone, is_system boolean DEFAULT false NOT NULL, - CONSTRAINT one_time_passcode_set CHECK ((((hashed_one_time_passcode IS NULL) AND (one_time_passcode_expires_at IS NULL)) OR ((hashed_one_time_passcode IS NOT NULL) AND (one_time_passcode_expires_at IS NOT NULL)))) + CONSTRAINT one_time_passcode_set CHECK ((((hashed_one_time_passcode IS NULL) AND (one_time_passcode_expires_at IS NULL)) OR ((hashed_one_time_passcode IS NOT NULL) AND (one_time_passcode_expires_at IS NOT NULL)))), + CONSTRAINT users_username_min_length CHECK ((length(username) >= 1)) ); COMMENT ON COLUMN users.quiet_hours_schedule IS 'Daily (!) cron schedule (with optional CRON_TZ) signifying the start of the user''s quiet hours. If empty, the default quiet hours on the instance is used instead.'; @@ -1414,11 +1423,18 @@ CASE WHEN (started_at IS NULL) THEN 'pending'::provisioner_job_status ELSE 'running'::provisioner_job_status END -END) STORED NOT NULL +END) STORED NOT NULL, + logs_length integer DEFAULT 0 NOT NULL, + logs_overflowed boolean DEFAULT false NOT NULL, + CONSTRAINT max_provisioner_logs_length CHECK ((logs_length <= 1048576)) ); COMMENT ON COLUMN provisioner_jobs.job_status IS 'Computed column to track the status of the job.'; +COMMENT ON COLUMN provisioner_jobs.logs_length IS 'Total length of provisioner logs'; + +COMMENT ON COLUMN provisioner_jobs.logs_overflowed IS 'Whether the provisioner logs overflowed in length'; + CREATE TABLE provisioner_keys ( id uuid NOT NULL, created_at timestamp with time zone NOT NULL, @@ -1618,9 +1634,15 @@ CREATE TABLE template_version_presets ( invalidate_after_secs integer DEFAULT 0, prebuild_status prebuild_status DEFAULT 'healthy'::prebuild_status NOT NULL, scheduling_timezone text DEFAULT ''::text NOT NULL, - is_default boolean DEFAULT false NOT NULL + is_default boolean DEFAULT false NOT NULL, + description character varying(128) DEFAULT ''::character varying NOT NULL, + icon character varying(256) DEFAULT ''::character varying NOT NULL ); +COMMENT ON COLUMN template_version_presets.description IS 'Short text describing the preset (max 128 characters).'; + +COMMENT ON COLUMN template_version_presets.icon IS 'URL or path to an icon representing the preset (max 256 characters).'; + CREATE TABLE template_version_terraform_values ( template_version_id uuid NOT NULL, updated_at timestamp with time zone DEFAULT now() NOT NULL, @@ -1670,7 +1692,8 @@ CREATE TABLE template_versions ( message character varying(1048576) DEFAULT ''::character varying NOT NULL, archived boolean DEFAULT false NOT NULL, source_example_id text, - has_ai_task boolean + has_ai_task boolean, + has_external_agent boolean ); COMMENT ON COLUMN template_versions.external_auth_providers IS 'IDs of External auth providers for a specific template version'; @@ -1701,6 +1724,7 @@ CREATE VIEW template_version_with_user AS template_versions.archived, template_versions.source_example_id, template_versions.has_ai_task, + template_versions.has_external_agent, COALESCE(visible_users.avatar_url, ''::text) AS created_by_avatar_url, COALESCE(visible_users.username, ''::text) AS created_by_username, COALESCE(visible_users.name, ''::text) AS created_by_name @@ -1744,7 +1768,8 @@ CREATE TABLE templates ( deprecated text DEFAULT ''::text NOT NULL, activity_bump bigint DEFAULT '3600000000000'::bigint NOT NULL, max_port_sharing_level app_sharing_level DEFAULT 'owner'::app_sharing_level NOT NULL, - use_classic_parameter_flow boolean DEFAULT true NOT NULL + use_classic_parameter_flow boolean DEFAULT false NOT NULL, + cors_behavior cors_behavior DEFAULT 'simple'::cors_behavior NOT NULL ); COMMENT ON COLUMN templates.default_ttl IS 'The default duration for autostop for workspaces created from this template.'; @@ -1797,6 +1822,7 @@ CREATE VIEW template_with_names AS templates.activity_bump, templates.max_port_sharing_level, templates.use_classic_parameter_flow, + templates.cors_behavior, COALESCE(visible_users.avatar_url, ''::text) AS created_by_avatar_url, COALESCE(visible_users.username, ''::text) AS created_by_username, COALESCE(visible_users.name, ''::text) AS created_by_name, @@ -1809,6 +1835,31 @@ CREATE VIEW template_with_names AS COMMENT ON VIEW template_with_names IS 'Joins in the display name information such as username, avatar, and organization name.'; +CREATE TABLE usage_events ( + id text NOT NULL, + event_type text NOT NULL, + event_data jsonb NOT NULL, + created_at timestamp with time zone NOT NULL, + publish_started_at timestamp with time zone, + published_at timestamp with time zone, + failure_message text, + CONSTRAINT usage_event_type_check CHECK ((event_type = 'dc_managed_agents_v1'::text)) +); + +COMMENT ON TABLE usage_events IS 'usage_events contains usage data that is collected from the product and potentially shipped to the usage collector service.'; + +COMMENT ON COLUMN usage_events.id IS 'For "discrete" event types, this is a random UUID. For "heartbeat" event types, this is a combination of the event type and a truncated timestamp.'; + +COMMENT ON COLUMN usage_events.event_type IS 'The usage event type with version. "dc" means "discrete" (e.g. a single event, for counters), "hb" means "heartbeat" (e.g. a recurring event that contains a total count of usage generated from the database, for gauges).'; + +COMMENT ON COLUMN usage_events.event_data IS 'Event payload. Determined by the matching usage struct for this event type.'; + +COMMENT ON COLUMN usage_events.publish_started_at IS 'Set to a timestamp while the event is being published by a Coder replica to the usage collector service. Used to avoid duplicate publishes by multiple replicas. Timestamps older than 1 hour are considered expired.'; + +COMMENT ON COLUMN usage_events.published_at IS 'Set to a timestamp when the event is successfully (or permanently unsuccessfully) published to the usage collector service. If set, the event should never be attempted to be published again.'; + +COMMENT ON COLUMN usage_events.failure_message IS 'Set to an error message when the event is temporarily or permanently unsuccessfully published to the usage collector service.'; + CREATE TABLE user_configs ( user_id uuid NOT NULL, key character varying(256) NOT NULL, @@ -1841,6 +1892,18 @@ COMMENT ON COLUMN user_links.oauth_refresh_token_key_id IS 'The ID of the key us COMMENT ON COLUMN user_links.claims IS 'Claims from the IDP for the linked user. Includes both id_token and userinfo claims. '; +CREATE TABLE user_secrets ( + id uuid DEFAULT gen_random_uuid() NOT NULL, + user_id uuid NOT NULL, + name text NOT NULL, + description text NOT NULL, + value text NOT NULL, + env_name text DEFAULT ''::text NOT NULL, + file_path text DEFAULT ''::text NOT NULL, + created_at timestamp with time zone DEFAULT CURRENT_TIMESTAMP NOT NULL, + updated_at timestamp with time zone DEFAULT CURRENT_TIMESTAMP NOT NULL +); + CREATE TABLE user_status_changes ( id uuid DEFAULT gen_random_uuid() NOT NULL, user_id uuid NOT NULL, @@ -2204,7 +2267,9 @@ CREATE TABLE workspace_builds ( template_version_preset_id uuid, has_ai_task boolean, ai_task_sidebar_app_id uuid, - CONSTRAINT workspace_builds_ai_task_sidebar_app_id_required CHECK (((((has_ai_task IS NULL) OR (has_ai_task = false)) AND (ai_task_sidebar_app_id IS NULL)) OR ((has_ai_task = true) AND (ai_task_sidebar_app_id IS NOT NULL)))) + has_external_agent boolean, + CONSTRAINT workspace_builds_ai_task_sidebar_app_id_required CHECK (((((has_ai_task IS NULL) OR (has_ai_task = false)) AND (ai_task_sidebar_app_id IS NULL)) OR ((has_ai_task = true) AND (ai_task_sidebar_app_id IS NOT NULL)))), + CONSTRAINT workspace_builds_deadline_below_max_deadline CHECK ((((deadline <> '0001-01-01 00:00:00+00'::timestamp with time zone) AND (deadline <= max_deadline)) OR (max_deadline = '0001-01-01 00:00:00+00'::timestamp with time zone))) ); CREATE VIEW workspace_build_with_user AS @@ -2225,6 +2290,7 @@ CREATE VIEW workspace_build_with_user AS workspace_builds.template_version_preset_id, workspace_builds.has_ai_task, workspace_builds.ai_task_sidebar_app_id, + workspace_builds.has_external_agent, COALESCE(visible_users.avatar_url, ''::text) AS initiator_by_avatar_url, COALESCE(visible_users.username, ''::text) AS initiator_by_username, COALESCE(visible_users.name, ''::text) AS initiator_by_name @@ -2249,7 +2315,9 @@ CREATE TABLE workspaces ( deleting_at timestamp with time zone, automatic_updates automatic_updates DEFAULT 'never'::automatic_updates NOT NULL, favorite boolean DEFAULT false NOT NULL, - next_start_at timestamp with time zone + next_start_at timestamp with time zone, + group_acl jsonb DEFAULT '{}'::jsonb NOT NULL, + user_acl jsonb DEFAULT '{}'::jsonb NOT NULL ); COMMENT ON COLUMN workspaces.favorite IS 'Favorite is true if the workspace owner has favorited the workspace.'; @@ -2428,6 +2496,8 @@ CREATE VIEW workspaces_expanded AS workspaces.automatic_updates, workspaces.favorite, workspaces.next_start_at, + workspaces.group_acl, + workspaces.user_acl, visible_users.avatar_url AS owner_avatar_url, visible_users.username AS owner_username, visible_users.name AS owner_name, @@ -2641,6 +2711,9 @@ ALTER TABLE ONLY template_versions ALTER TABLE ONLY templates ADD CONSTRAINT templates_pkey PRIMARY KEY (id); +ALTER TABLE ONLY usage_events + ADD CONSTRAINT usage_events_pkey PRIMARY KEY (id); + ALTER TABLE ONLY user_configs ADD CONSTRAINT user_configs_pkey PRIMARY KEY (user_id, key); @@ -2650,6 +2723,9 @@ ALTER TABLE ONLY user_deleted ALTER TABLE ONLY user_links ADD CONSTRAINT user_links_pkey PRIMARY KEY (user_id, login_type); +ALTER TABLE ONLY user_secrets + ADD CONSTRAINT user_secrets_pkey PRIMARY KEY (id); + ALTER TABLE ONLY user_status_changes ADD CONSTRAINT user_status_changes_pkey PRIMARY KEY (id); @@ -2806,6 +2882,8 @@ CREATE INDEX idx_template_versions_has_ai_task ON template_versions USING btree CREATE UNIQUE INDEX idx_unique_preset_name ON template_version_presets USING btree (name, template_version_id); +CREATE INDEX idx_usage_events_select_for_publishing ON usage_events USING btree (published_at, publish_started_at, created_at); + CREATE INDEX idx_user_deleted_deleted_at ON user_deleted USING btree (deleted_at); CREATE INDEX idx_user_status_changes_changed_at ON user_status_changes USING btree (changed_at); @@ -2838,6 +2916,12 @@ CREATE UNIQUE INDEX templates_organization_id_name_idx ON templates USING btree CREATE UNIQUE INDEX user_links_linked_id_login_type_idx ON user_links USING btree (linked_id, login_type) WHERE (linked_id <> ''::text); +CREATE UNIQUE INDEX user_secrets_user_env_name_idx ON user_secrets USING btree (user_id, env_name) WHERE (env_name <> ''::text); + +CREATE UNIQUE INDEX user_secrets_user_file_path_idx ON user_secrets USING btree (user_id, file_path) WHERE (file_path <> ''::text); + +CREATE UNIQUE INDEX user_secrets_user_name_idx ON user_secrets USING btree (user_id, name); + CREATE UNIQUE INDEX users_email_lower_idx ON users USING btree (lower(email)) WHERE (deleted = false); CREATE UNIQUE INDEX users_username_lower_idx ON users USING btree (lower(username)) WHERE (deleted = false); @@ -3143,6 +3227,9 @@ ALTER TABLE ONLY user_links ALTER TABLE ONLY user_links ADD CONSTRAINT user_links_user_id_fkey FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE CASCADE; +ALTER TABLE ONLY user_secrets + ADD CONSTRAINT user_secrets_user_id_fkey FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE CASCADE; + ALTER TABLE ONLY user_status_changes ADD CONSTRAINT user_status_changes_user_id_fkey FOREIGN KEY (user_id) REFERENCES users(id); diff --git a/coderd/database/errors.go b/coderd/database/errors.go index 66c702de24445..9d0c3fee7e865 100644 --- a/coderd/database/errors.go +++ b/coderd/database/errors.go @@ -59,6 +59,28 @@ func IsForeignKeyViolation(err error, foreignKeyConstraints ...ForeignKeyConstra return false } +// IsCheckViolation checks if the error is due to a check violation. If one or +// more specific check constraints are given as arguments, the error must be +// caused by one of them. If no constraints are given, this function returns +// true for any check violation. +func IsCheckViolation(err error, checkConstraints ...CheckConstraint) bool { + var pqErr *pq.Error + if errors.As(err, &pqErr) { + if pqErr.Code.Name() == "check_violation" { + if len(checkConstraints) == 0 { + return true + } + for _, cc := range checkConstraints { + if pqErr.Constraint == string(cc) { + return true + } + } + } + } + + return false +} + // IsQueryCanceledError checks if the error is due to a query being canceled. func IsQueryCanceledError(err error) bool { var pqErr *pq.Error @@ -79,3 +101,11 @@ func IsWorkspaceAgentLogsLimitError(err error) bool { return false } + +func IsProvisionerJobLogsLimitError(err error) bool { + var pqErr *pq.Error + if errors.As(err, &pqErr) { + return pqErr.Constraint == "max_provisioner_logs_length" && pqErr.Table == "provisioner_jobs" + } + return false +} diff --git a/coderd/database/foreign_key_constraint.go b/coderd/database/foreign_key_constraint.go index c3aaf7342a97c..33aa8edd69032 100644 --- a/coderd/database/foreign_key_constraint.go +++ b/coderd/database/foreign_key_constraint.go @@ -63,6 +63,7 @@ const ( ForeignKeyUserLinksOauthAccessTokenKeyID ForeignKeyConstraint = "user_links_oauth_access_token_key_id_fkey" // ALTER TABLE ONLY user_links ADD CONSTRAINT user_links_oauth_access_token_key_id_fkey FOREIGN KEY (oauth_access_token_key_id) REFERENCES dbcrypt_keys(active_key_digest); ForeignKeyUserLinksOauthRefreshTokenKeyID ForeignKeyConstraint = "user_links_oauth_refresh_token_key_id_fkey" // ALTER TABLE ONLY user_links ADD CONSTRAINT user_links_oauth_refresh_token_key_id_fkey FOREIGN KEY (oauth_refresh_token_key_id) REFERENCES dbcrypt_keys(active_key_digest); ForeignKeyUserLinksUserID ForeignKeyConstraint = "user_links_user_id_fkey" // ALTER TABLE ONLY user_links ADD CONSTRAINT user_links_user_id_fkey FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE CASCADE; + ForeignKeyUserSecretsUserID ForeignKeyConstraint = "user_secrets_user_id_fkey" // ALTER TABLE ONLY user_secrets ADD CONSTRAINT user_secrets_user_id_fkey FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE CASCADE; ForeignKeyUserStatusChangesUserID ForeignKeyConstraint = "user_status_changes_user_id_fkey" // ALTER TABLE ONLY user_status_changes ADD CONSTRAINT user_status_changes_user_id_fkey FOREIGN KEY (user_id) REFERENCES users(id); ForeignKeyWebpushSubscriptionsUserID ForeignKeyConstraint = "webpush_subscriptions_user_id_fkey" // ALTER TABLE ONLY webpush_subscriptions ADD CONSTRAINT webpush_subscriptions_user_id_fkey FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE CASCADE; ForeignKeyWorkspaceAgentDevcontainersWorkspaceAgentID ForeignKeyConstraint = "workspace_agent_devcontainers_workspace_agent_id_fkey" // ALTER TABLE ONLY workspace_agent_devcontainers ADD CONSTRAINT workspace_agent_devcontainers_workspace_agent_id_fkey FOREIGN KEY (workspace_agent_id) REFERENCES workspace_agents(id) ON DELETE CASCADE; diff --git a/coderd/database/gen/dump/main.go b/coderd/database/gen/dump/main.go index f99b69bdaef93..1d84339eecce9 100644 --- a/coderd/database/gen/dump/main.go +++ b/coderd/database/gen/dump/main.go @@ -19,6 +19,10 @@ type mockTB struct { cleanup []func() } +func (*mockTB) Name() string { + return "mockTB" +} + func (t *mockTB) Cleanup(f func()) { t.cleanup = append(t.cleanup, f) } diff --git a/coderd/database/migrations/000351_add_icon_and_description_template_version_presets.down.sql b/coderd/database/migrations/000351_add_icon_and_description_template_version_presets.down.sql new file mode 100644 index 0000000000000..ce626d3929226 --- /dev/null +++ b/coderd/database/migrations/000351_add_icon_and_description_template_version_presets.down.sql @@ -0,0 +1,3 @@ +ALTER TABLE template_version_presets + DROP COLUMN IF EXISTS description, + DROP COLUMN IF EXISTS icon; diff --git a/coderd/database/migrations/000351_add_icon_and_description_template_version_presets.up.sql b/coderd/database/migrations/000351_add_icon_and_description_template_version_presets.up.sql new file mode 100644 index 0000000000000..dcbb2d3b3834c --- /dev/null +++ b/coderd/database/migrations/000351_add_icon_and_description_template_version_presets.up.sql @@ -0,0 +1,6 @@ +ALTER TABLE template_version_presets + ADD COLUMN IF NOT EXISTS description VARCHAR(128) NOT NULL DEFAULT '', + ADD COLUMN IF NOT EXISTS icon VARCHAR(256) NOT NULL DEFAULT ''; + +COMMENT ON COLUMN template_version_presets.description IS 'Short text describing the preset (max 128 characters).'; +COMMENT ON COLUMN template_version_presets.icon IS 'URL or path to an icon representing the preset (max 256 characters).'; diff --git a/coderd/database/migrations/000352_default_dynamic_templates.down.sql b/coderd/database/migrations/000352_default_dynamic_templates.down.sql new file mode 100644 index 0000000000000..548cd7e2c30b2 --- /dev/null +++ b/coderd/database/migrations/000352_default_dynamic_templates.down.sql @@ -0,0 +1 @@ +ALTER TABLE templates ALTER COLUMN use_classic_parameter_flow SET DEFAULT true; diff --git a/coderd/database/migrations/000352_default_dynamic_templates.up.sql b/coderd/database/migrations/000352_default_dynamic_templates.up.sql new file mode 100644 index 0000000000000..51bcab9f099f8 --- /dev/null +++ b/coderd/database/migrations/000352_default_dynamic_templates.up.sql @@ -0,0 +1 @@ +ALTER TABLE templates ALTER COLUMN use_classic_parameter_flow SET DEFAULT false; diff --git a/coderd/database/migrations/000353_template_level_cors.down.sql b/coderd/database/migrations/000353_template_level_cors.down.sql new file mode 100644 index 0000000000000..370e4bf36d9ed --- /dev/null +++ b/coderd/database/migrations/000353_template_level_cors.down.sql @@ -0,0 +1,46 @@ +DROP VIEW IF EXISTS template_with_names; +CREATE VIEW template_with_names AS + SELECT templates.id, + templates.created_at, + templates.updated_at, + templates.organization_id, + templates.deleted, + templates.name, + templates.provisioner, + templates.active_version_id, + templates.description, + templates.default_ttl, + templates.created_by, + templates.icon, + templates.user_acl, + templates.group_acl, + templates.display_name, + templates.allow_user_cancel_workspace_jobs, + templates.allow_user_autostart, + templates.allow_user_autostop, + templates.failure_ttl, + templates.time_til_dormant, + templates.time_til_dormant_autodelete, + templates.autostop_requirement_days_of_week, + templates.autostop_requirement_weeks, + templates.autostart_block_days_of_week, + templates.require_active_version, + templates.deprecated, + templates.activity_bump, + templates.max_port_sharing_level, + templates.use_classic_parameter_flow, + COALESCE(visible_users.avatar_url, ''::text) AS created_by_avatar_url, + COALESCE(visible_users.username, ''::text) AS created_by_username, + COALESCE(visible_users.name, ''::text) AS created_by_name, + COALESCE(organizations.name, ''::text) AS organization_name, + COALESCE(organizations.display_name, ''::text) AS organization_display_name, + COALESCE(organizations.icon, ''::text) AS organization_icon + FROM ((templates + LEFT JOIN visible_users ON ((templates.created_by = visible_users.id))) + LEFT JOIN organizations ON ((templates.organization_id = organizations.id))); + +COMMENT ON VIEW template_with_names IS 'Joins in the display name information such as username, avatar, and organization name.'; + +ALTER TABLE templates DROP COLUMN cors_behavior; + +DROP TYPE IF EXISTS cors_behavior; diff --git a/coderd/database/migrations/000353_template_level_cors.up.sql b/coderd/database/migrations/000353_template_level_cors.up.sql new file mode 100644 index 0000000000000..ddb5849fcb65a --- /dev/null +++ b/coderd/database/migrations/000353_template_level_cors.up.sql @@ -0,0 +1,52 @@ +CREATE TYPE cors_behavior AS ENUM ( + 'simple', + 'passthru' +); + +ALTER TABLE templates +ADD COLUMN cors_behavior cors_behavior NOT NULL DEFAULT 'simple'::cors_behavior; + +-- Update the template_with_users view by recreating it. +DROP VIEW IF EXISTS template_with_names; +CREATE VIEW template_with_names AS + SELECT templates.id, + templates.created_at, + templates.updated_at, + templates.organization_id, + templates.deleted, + templates.name, + templates.provisioner, + templates.active_version_id, + templates.description, + templates.default_ttl, + templates.created_by, + templates.icon, + templates.user_acl, + templates.group_acl, + templates.display_name, + templates.allow_user_cancel_workspace_jobs, + templates.allow_user_autostart, + templates.allow_user_autostop, + templates.failure_ttl, + templates.time_til_dormant, + templates.time_til_dormant_autodelete, + templates.autostop_requirement_days_of_week, + templates.autostop_requirement_weeks, + templates.autostart_block_days_of_week, + templates.require_active_version, + templates.deprecated, + templates.activity_bump, + templates.max_port_sharing_level, + templates.use_classic_parameter_flow, + templates.cors_behavior, -- <--- adding this column + COALESCE(visible_users.avatar_url, ''::text) AS created_by_avatar_url, + COALESCE(visible_users.username, ''::text) AS created_by_username, + COALESCE(visible_users.name, ''::text) AS created_by_name, + COALESCE(organizations.name, ''::text) AS organization_name, + COALESCE(organizations.display_name, ''::text) AS organization_display_name, + COALESCE(organizations.icon, ''::text) AS organization_icon + FROM ((templates + LEFT JOIN visible_users ON ((templates.created_by = visible_users.id))) + LEFT JOIN organizations ON ((templates.organization_id = organizations.id))); + +COMMENT ON VIEW template_with_names IS 'Joins in the display name information such as username, avatar, and organization name.'; diff --git a/coderd/database/migrations/000354_workspace_acl.down.sql b/coderd/database/migrations/000354_workspace_acl.down.sql new file mode 100644 index 0000000000000..97f0acc6b03c8 --- /dev/null +++ b/coderd/database/migrations/000354_workspace_acl.down.sql @@ -0,0 +1,40 @@ +DROP VIEW workspaces_expanded; + +ALTER TABLE workspaces + DROP COLUMN group_acl, + DROP COLUMN user_acl; + +CREATE VIEW workspaces_expanded AS + SELECT workspaces.id, + workspaces.created_at, + workspaces.updated_at, + workspaces.owner_id, + workspaces.organization_id, + workspaces.template_id, + workspaces.deleted, + workspaces.name, + workspaces.autostart_schedule, + workspaces.ttl, + workspaces.last_used_at, + workspaces.dormant_at, + workspaces.deleting_at, + workspaces.automatic_updates, + workspaces.favorite, + workspaces.next_start_at, + visible_users.avatar_url AS owner_avatar_url, + visible_users.username AS owner_username, + visible_users.name AS owner_name, + organizations.name AS organization_name, + organizations.display_name AS organization_display_name, + organizations.icon AS organization_icon, + organizations.description AS organization_description, + templates.name AS template_name, + templates.display_name AS template_display_name, + templates.icon AS template_icon, + templates.description AS template_description + FROM (((workspaces + JOIN visible_users ON ((workspaces.owner_id = visible_users.id))) + JOIN organizations ON ((workspaces.organization_id = organizations.id))) + JOIN templates ON ((workspaces.template_id = templates.id))); + +COMMENT ON VIEW workspaces_expanded IS 'Joins in the display name information such as username, avatar, and organization name.'; diff --git a/coderd/database/migrations/000354_workspace_acl.up.sql b/coderd/database/migrations/000354_workspace_acl.up.sql new file mode 100644 index 0000000000000..6d6a375679aa5 --- /dev/null +++ b/coderd/database/migrations/000354_workspace_acl.up.sql @@ -0,0 +1,43 @@ +DROP VIEW workspaces_expanded; + +ALTER TABLE workspaces + ADD COLUMN group_acl jsonb not null default '{}'::jsonb, + ADD COLUMN user_acl jsonb not null default '{}'::jsonb; + +-- Recreate the view, now including the new columns +CREATE VIEW workspaces_expanded AS + SELECT workspaces.id, + workspaces.created_at, + workspaces.updated_at, + workspaces.owner_id, + workspaces.organization_id, + workspaces.template_id, + workspaces.deleted, + workspaces.name, + workspaces.autostart_schedule, + workspaces.ttl, + workspaces.last_used_at, + workspaces.dormant_at, + workspaces.deleting_at, + workspaces.automatic_updates, + workspaces.favorite, + workspaces.next_start_at, + workspaces.group_acl, + workspaces.user_acl, + visible_users.avatar_url AS owner_avatar_url, + visible_users.username AS owner_username, + visible_users.name AS owner_name, + organizations.name AS organization_name, + organizations.display_name AS organization_display_name, + organizations.icon AS organization_icon, + organizations.description AS organization_description, + templates.name AS template_name, + templates.display_name AS template_display_name, + templates.icon AS template_icon, + templates.description AS template_description + FROM (((workspaces + JOIN visible_users ON ((workspaces.owner_id = visible_users.id))) + JOIN organizations ON ((workspaces.organization_id = organizations.id))) + JOIN templates ON ((workspaces.template_id = templates.id))); + +COMMENT ON VIEW workspaces_expanded IS 'Joins in the display name information such as username, avatar, and organization name.'; diff --git a/coderd/database/migrations/000355_add_provisioner_logs_overflowed.down.sql b/coderd/database/migrations/000355_add_provisioner_logs_overflowed.down.sql new file mode 100644 index 0000000000000..39f34a2b491ee --- /dev/null +++ b/coderd/database/migrations/000355_add_provisioner_logs_overflowed.down.sql @@ -0,0 +1,2 @@ +ALTER TABLE provisioner_jobs DROP COLUMN logs_length; +ALTER TABLE provisioner_jobs DROP COLUMN logs_overflowed; \ No newline at end of file diff --git a/coderd/database/migrations/000355_add_provisioner_logs_overflowed.up.sql b/coderd/database/migrations/000355_add_provisioner_logs_overflowed.up.sql new file mode 100644 index 0000000000000..80f58cf5c6693 --- /dev/null +++ b/coderd/database/migrations/000355_add_provisioner_logs_overflowed.up.sql @@ -0,0 +1,6 @@ + -- Add logs length tracking and overflow flag, similar to workspace agents + ALTER TABLE provisioner_jobs ADD COLUMN logs_length integer NOT NULL DEFAULT 0 CONSTRAINT max_provisioner_logs_length CHECK (logs_length <= 1048576); + ALTER TABLE provisioner_jobs ADD COLUMN logs_overflowed boolean NOT NULL DEFAULT false; + + COMMENT ON COLUMN provisioner_jobs.logs_length IS 'Total length of provisioner logs'; + COMMENT ON COLUMN provisioner_jobs.logs_overflowed IS 'Whether the provisioner logs overflowed in length'; diff --git a/coderd/database/migrations/000356_enforce_deadline_below_max_deadline.down.sql b/coderd/database/migrations/000356_enforce_deadline_below_max_deadline.down.sql new file mode 100644 index 0000000000000..a9b2b6ff7f459 --- /dev/null +++ b/coderd/database/migrations/000356_enforce_deadline_below_max_deadline.down.sql @@ -0,0 +1,2 @@ +ALTER TABLE workspace_builds + DROP CONSTRAINT workspace_builds_deadline_below_max_deadline; diff --git a/coderd/database/migrations/000356_enforce_deadline_below_max_deadline.up.sql b/coderd/database/migrations/000356_enforce_deadline_below_max_deadline.up.sql new file mode 100644 index 0000000000000..00c36ddd0b5dd --- /dev/null +++ b/coderd/database/migrations/000356_enforce_deadline_below_max_deadline.up.sql @@ -0,0 +1,22 @@ +-- New constraint: (deadline IS NOT zero AND deadline <= max_deadline) UNLESS max_deadline is zero. +-- Unfortunately, "zero" here means `time.Time{}`... + +-- Update previous builds that would fail this new constraint. This matches the +-- intended behaviour of the autostop algorithm. +UPDATE + workspace_builds +SET + deadline = max_deadline +WHERE + (deadline = '0001-01-01 00:00:00+00'::timestamptz OR deadline > max_deadline) + AND max_deadline != '0001-01-01 00:00:00+00'::timestamptz; + +-- Add the new constraint. +ALTER TABLE workspace_builds + ADD CONSTRAINT workspace_builds_deadline_below_max_deadline + CHECK ( + -- (deadline is not zero AND deadline <= max_deadline)... + (deadline != '0001-01-01 00:00:00+00'::timestamptz AND deadline <= max_deadline) + -- UNLESS max_deadline is zero. + OR max_deadline = '0001-01-01 00:00:00+00'::timestamptz + ); diff --git a/coderd/database/migrations/000357_add_user_secrets.down.sql b/coderd/database/migrations/000357_add_user_secrets.down.sql new file mode 100644 index 0000000000000..67bd30002e23a --- /dev/null +++ b/coderd/database/migrations/000357_add_user_secrets.down.sql @@ -0,0 +1,7 @@ +-- Drop the unique indexes first (in reverse order of creation) +DROP INDEX IF EXISTS user_secrets_user_file_path_idx; +DROP INDEX IF EXISTS user_secrets_user_env_name_idx; +DROP INDEX IF EXISTS user_secrets_user_name_idx; + +-- Drop the table +DROP TABLE IF EXISTS user_secrets; diff --git a/coderd/database/migrations/000357_add_user_secrets.up.sql b/coderd/database/migrations/000357_add_user_secrets.up.sql new file mode 100644 index 0000000000000..8a4d398f490eb --- /dev/null +++ b/coderd/database/migrations/000357_add_user_secrets.up.sql @@ -0,0 +1,34 @@ +-- Stores encrypted user secrets (global, available across all organizations) +CREATE TABLE user_secrets ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + user_id UUID NOT NULL REFERENCES users(id) ON DELETE CASCADE, + name TEXT NOT NULL, + description TEXT NOT NULL, + + -- The encrypted secret value (base64-encoded encrypted data) + value TEXT NOT NULL, + + -- Auto-injection settings + -- Environment variable name (e.g., "DATABASE_PASSWORD", "API_KEY") + -- Empty string means don't inject as env var + env_name TEXT NOT NULL DEFAULT '', + + -- File path where secret should be written (e.g., "/home/coder/.ssh/id_rsa") + -- Empty string means don't inject as file + file_path TEXT NOT NULL DEFAULT '', + + -- Timestamps + created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP NOT NULL, + updated_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP NOT NULL +); + +-- Unique constraint: user can't have duplicate secret names +CREATE UNIQUE INDEX user_secrets_user_name_idx ON user_secrets(user_id, name); + +-- Unique constraint: user can't have duplicate env names +CREATE UNIQUE INDEX user_secrets_user_env_name_idx ON user_secrets(user_id, env_name) +WHERE env_name != ''; + +-- Unique constraint: user can't have duplicate file paths +CREATE UNIQUE INDEX user_secrets_user_file_path_idx ON user_secrets(user_id, file_path) +WHERE file_path != ''; diff --git a/coderd/database/migrations/000358_failed_ext_auth_error.down.sql b/coderd/database/migrations/000358_failed_ext_auth_error.down.sql new file mode 100644 index 0000000000000..72cad82d36a1e --- /dev/null +++ b/coderd/database/migrations/000358_failed_ext_auth_error.down.sql @@ -0,0 +1,3 @@ +ALTER TABLE external_auth_links + DROP COLUMN oauth_refresh_failure_reason +; diff --git a/coderd/database/migrations/000358_failed_ext_auth_error.up.sql b/coderd/database/migrations/000358_failed_ext_auth_error.up.sql new file mode 100644 index 0000000000000..f2030ecbeeca2 --- /dev/null +++ b/coderd/database/migrations/000358_failed_ext_auth_error.up.sql @@ -0,0 +1,7 @@ +ALTER TABLE external_auth_links + ADD COLUMN oauth_refresh_failure_reason TEXT NOT NULL DEFAULT '' +; + +COMMENT ON COLUMN external_auth_links.oauth_refresh_failure_reason IS + 'This error means the refresh token is invalid. Cached so we can avoid calling the external provider again for the same error.' +; diff --git a/coderd/database/migrations/000359_create_usage_events_table.down.sql b/coderd/database/migrations/000359_create_usage_events_table.down.sql new file mode 100644 index 0000000000000..cb86155db10e8 --- /dev/null +++ b/coderd/database/migrations/000359_create_usage_events_table.down.sql @@ -0,0 +1 @@ +DROP TABLE usage_events; diff --git a/coderd/database/migrations/000359_create_usage_events_table.up.sql b/coderd/database/migrations/000359_create_usage_events_table.up.sql new file mode 100644 index 0000000000000..d03d4ad7414c9 --- /dev/null +++ b/coderd/database/migrations/000359_create_usage_events_table.up.sql @@ -0,0 +1,25 @@ +CREATE TABLE usage_events ( + id TEXT PRIMARY KEY, + -- We use a TEXT column with a CHECK constraint rather than an enum because of + -- the limitations with adding new values to an enum and using them in the + -- same transaction. + event_type TEXT NOT NULL CONSTRAINT usage_event_type_check CHECK (event_type IN ('dc_managed_agents_v1')), + event_data JSONB NOT NULL, + created_at TIMESTAMP WITH TIME ZONE NOT NULL, + publish_started_at TIMESTAMP WITH TIME ZONE DEFAULT NULL, + published_at TIMESTAMP WITH TIME ZONE DEFAULT NULL, + failure_message TEXT DEFAULT NULL +); + +COMMENT ON TABLE usage_events IS 'usage_events contains usage data that is collected from the product and potentially shipped to the usage collector service.'; +COMMENT ON COLUMN usage_events.id IS 'For "discrete" event types, this is a random UUID. For "heartbeat" event types, this is a combination of the event type and a truncated timestamp.'; +COMMENT ON COLUMN usage_events.event_type IS 'The usage event type with version. "dc" means "discrete" (e.g. a single event, for counters), "hb" means "heartbeat" (e.g. a recurring event that contains a total count of usage generated from the database, for gauges).'; +COMMENT ON COLUMN usage_events.event_data IS 'Event payload. Determined by the matching usage struct for this event type.'; +COMMENT ON COLUMN usage_events.publish_started_at IS 'Set to a timestamp while the event is being published by a Coder replica to the usage collector service. Used to avoid duplicate publishes by multiple replicas. Timestamps older than 1 hour are considered expired.'; +COMMENT ON COLUMN usage_events.published_at IS 'Set to a timestamp when the event is successfully (or permanently unsuccessfully) published to the usage collector service. If set, the event should never be attempted to be published again.'; +COMMENT ON COLUMN usage_events.failure_message IS 'Set to an error message when the event is temporarily or permanently unsuccessfully published to the usage collector service.'; + +-- Create an index with all three fields used by the +-- SelectUsageEventsForPublishing query. +CREATE INDEX idx_usage_events_select_for_publishing + ON usage_events (published_at, publish_started_at, created_at); diff --git a/coderd/database/migrations/000360_external_agents.down.sql b/coderd/database/migrations/000360_external_agents.down.sql new file mode 100644 index 0000000000000..a17d0cc7982a6 --- /dev/null +++ b/coderd/database/migrations/000360_external_agents.down.sql @@ -0,0 +1,77 @@ +DROP VIEW template_version_with_user; +DROP VIEW workspace_build_with_user; + +ALTER TABLE template_versions DROP COLUMN has_external_agent; +ALTER TABLE workspace_builds DROP COLUMN has_external_agent; + +-- Recreate `template_version_with_user` as defined in dump.sql +CREATE VIEW template_version_with_user AS +SELECT + template_versions.id, + template_versions.template_id, + template_versions.organization_id, + template_versions.created_at, + template_versions.updated_at, + template_versions.name, + template_versions.readme, + template_versions.job_id, + template_versions.created_by, + template_versions.external_auth_providers, + template_versions.message, + template_versions.archived, + template_versions.source_example_id, + template_versions.has_ai_task, + COALESCE(visible_users.avatar_url, '' :: text) AS created_by_avatar_url, + COALESCE(visible_users.username, '' :: text) AS created_by_username, + COALESCE(visible_users.name, '' :: text) AS created_by_name +FROM + ( + template_versions + LEFT JOIN visible_users ON ( + (template_versions.created_by = visible_users.id) + ) + ); + +COMMENT ON VIEW template_version_with_user IS 'Joins in the username + avatar url of the created by user.'; + +-- Recreate `workspace_build_with_user` as defined in dump.sql +CREATE VIEW workspace_build_with_user AS +SELECT + workspace_builds.id, + workspace_builds.created_at, + workspace_builds.updated_at, + workspace_builds.workspace_id, + workspace_builds.template_version_id, + workspace_builds.build_number, + workspace_builds.transition, + workspace_builds.initiator_id, + workspace_builds.provisioner_state, + workspace_builds.job_id, + workspace_builds.deadline, + workspace_builds.reason, + workspace_builds.daily_cost, + workspace_builds.max_deadline, + workspace_builds.template_version_preset_id, + workspace_builds.has_ai_task, + workspace_builds.ai_task_sidebar_app_id, + COALESCE( + visible_users.avatar_url, + '' :: text + ) AS initiator_by_avatar_url, + COALESCE( + visible_users.username, + '' :: text + ) AS initiator_by_username, + COALESCE(visible_users.name, '' :: text) AS initiator_by_name +FROM + ( + workspace_builds + LEFT JOIN visible_users ON ( + ( + workspace_builds.initiator_id = visible_users.id + ) + ) + ); + +COMMENT ON VIEW workspace_build_with_user IS 'Joins in the username + avatar url of the initiated by user.'; + diff --git a/coderd/database/migrations/000360_external_agents.up.sql b/coderd/database/migrations/000360_external_agents.up.sql new file mode 100644 index 0000000000000..00b7d865dfd30 --- /dev/null +++ b/coderd/database/migrations/000360_external_agents.up.sql @@ -0,0 +1,89 @@ +-- Determines if a coder_external_agent resource is defined in a template version. +ALTER TABLE + template_versions +ADD + COLUMN has_external_agent BOOLEAN; + +DROP VIEW template_version_with_user; + +-- We're adding the external_agents column. +CREATE VIEW template_version_with_user AS +SELECT + template_versions.id, + template_versions.template_id, + template_versions.organization_id, + template_versions.created_at, + template_versions.updated_at, + template_versions.name, + template_versions.readme, + template_versions.job_id, + template_versions.created_by, + template_versions.external_auth_providers, + template_versions.message, + template_versions.archived, + template_versions.source_example_id, + template_versions.has_ai_task, + template_versions.has_external_agent, + COALESCE(visible_users.avatar_url, '' :: text) AS created_by_avatar_url, + COALESCE(visible_users.username, '' :: text) AS created_by_username, + COALESCE(visible_users.name, '' :: text) AS created_by_name +FROM + ( + template_versions + LEFT JOIN visible_users ON ( + (template_versions.created_by = visible_users.id) + ) + ); + +COMMENT ON VIEW template_version_with_user IS 'Joins in the username + avatar url of the created by user.'; + +-- Determines if a coder_external_agent resource was included in a +-- workspace build. +ALTER TABLE + workspace_builds +ADD + COLUMN has_external_agent BOOLEAN; + +DROP VIEW workspace_build_with_user; + +-- We're adding the has_external_agent column. +CREATE VIEW workspace_build_with_user AS +SELECT + workspace_builds.id, + workspace_builds.created_at, + workspace_builds.updated_at, + workspace_builds.workspace_id, + workspace_builds.template_version_id, + workspace_builds.build_number, + workspace_builds.transition, + workspace_builds.initiator_id, + workspace_builds.provisioner_state, + workspace_builds.job_id, + workspace_builds.deadline, + workspace_builds.reason, + workspace_builds.daily_cost, + workspace_builds.max_deadline, + workspace_builds.template_version_preset_id, + workspace_builds.has_ai_task, + workspace_builds.ai_task_sidebar_app_id, + workspace_builds.has_external_agent, + COALESCE( + visible_users.avatar_url, + '' :: text + ) AS initiator_by_avatar_url, + COALESCE( + visible_users.username, + '' :: text + ) AS initiator_by_username, + COALESCE(visible_users.name, '' :: text) AS initiator_by_name +FROM + ( + workspace_builds + LEFT JOIN visible_users ON ( + ( + workspace_builds.initiator_id = visible_users.id + ) + ) + ); + +COMMENT ON VIEW workspace_build_with_user IS 'Joins in the username + avatar url of the initiated by user.'; diff --git a/coderd/database/migrations/000361_username_length_constraint.down.sql b/coderd/database/migrations/000361_username_length_constraint.down.sql new file mode 100644 index 0000000000000..cb3fccad73098 --- /dev/null +++ b/coderd/database/migrations/000361_username_length_constraint.down.sql @@ -0,0 +1,2 @@ +ALTER TABLE users +DROP CONSTRAINT IF EXISTS users_username_min_length; diff --git a/coderd/database/migrations/000361_username_length_constraint.up.sql b/coderd/database/migrations/000361_username_length_constraint.up.sql new file mode 100644 index 0000000000000..526d31c0a7246 --- /dev/null +++ b/coderd/database/migrations/000361_username_length_constraint.up.sql @@ -0,0 +1,3 @@ +ALTER TABLE users +ADD CONSTRAINT users_username_min_length +CHECK (length(username) >= 1); diff --git a/coderd/database/migrations/testdata/fixtures/000357_add_user_secrets.up.sql b/coderd/database/migrations/testdata/fixtures/000357_add_user_secrets.up.sql new file mode 100644 index 0000000000000..a82ceb593b629 --- /dev/null +++ b/coderd/database/migrations/testdata/fixtures/000357_add_user_secrets.up.sql @@ -0,0 +1,18 @@ +INSERT INTO user_secrets ( + id, + user_id, + name, + description, + value, + env_name, + file_path +) +VALUES ( + '4848b19e-b392-4a1b-bc7d-0b7ffb41ef87', + '30095c71-380b-457a-8995-97b8ee6e5307', + 'secret-name', + 'secret description', + 'secret value', + 'SECRET_ENV_NAME', + '~/secret/file/path' +); diff --git a/coderd/database/migrations/testdata/fixtures/000359_create_usage_events_table.up.sql b/coderd/database/migrations/testdata/fixtures/000359_create_usage_events_table.up.sql new file mode 100644 index 0000000000000..aa7c53f5eb94c --- /dev/null +++ b/coderd/database/migrations/testdata/fixtures/000359_create_usage_events_table.up.sql @@ -0,0 +1,60 @@ +INSERT INTO usage_events ( + id, + event_type, + event_data, + created_at, + publish_started_at, + published_at, + failure_message +) +VALUES +-- Unpublished dc_managed_agents_v1 event. +( + 'event1', + 'dc_managed_agents_v1', + '{"count":1}', + '2023-01-01 00:00:00+00', + NULL, + NULL, + NULL +), +-- Successfully published dc_managed_agents_v1 event. +( + 'event2', + 'dc_managed_agents_v1', + '{"count":2}', + '2023-01-01 00:00:00+00', + NULL, + '2023-01-01 00:00:02+00', + NULL +), +-- Publish in progress dc_managed_agents_v1 event. +( + 'event3', + 'dc_managed_agents_v1', + '{"count":3}', + '2023-01-01 00:00:00+00', + '2023-01-01 00:00:01+00', + NULL, + NULL +), +-- Temporarily failed to publish dc_managed_agents_v1 event. +( + 'event4', + 'dc_managed_agents_v1', + '{"count":4}', + '2023-01-01 00:00:00+00', + NULL, + NULL, + 'publish failed temporarily' +), +-- Permanently failed to publish dc_managed_agents_v1 event. +( + 'event5', + 'dc_managed_agents_v1', + '{"count":5}', + '2023-01-01 00:00:00+00', + NULL, + '2023-01-01 00:00:02+00', + 'publish failed permanently' +) diff --git a/coderd/database/modelmethods.go b/coderd/database/modelmethods.go index b49fa113d4b12..e080c7d7e4217 100644 --- a/coderd/database/modelmethods.go +++ b/coderd/database/modelmethods.go @@ -242,6 +242,8 @@ func (w Workspace) WorkspaceTable() WorkspaceTable { AutomaticUpdates: w.AutomaticUpdates, Favorite: w.Favorite, NextStartAt: w.NextStartAt, + GroupACL: w.GroupACL, + UserACL: w.UserACL, } } @@ -274,7 +276,9 @@ func (w WorkspaceTable) RBACObject() rbac.Object { return rbac.ResourceWorkspace.WithID(w.ID). InOrg(w.OrganizationID). - WithOwner(w.OwnerID.String()) + WithOwner(w.OwnerID.String()). + WithGroupACL(w.GroupACL.RBACACL()). + WithACLUserList(w.UserACL.RBACACL()) } func (w WorkspaceTable) DormantRBAC() rbac.Object { @@ -628,3 +632,7 @@ func (m WorkspaceAgentVolumeResourceMonitor) Debounce( return m.DebouncedUntil, false } + +func (s UserSecret) RBACObject() rbac.Object { + return rbac.ResourceUserSecret.WithID(s.ID).WithOwner(s.UserID.String()) +} diff --git a/coderd/database/modelqueries.go b/coderd/database/modelqueries.go index 6bb7483847a2e..69bea8d81adab 100644 --- a/coderd/database/modelqueries.go +++ b/coderd/database/modelqueries.go @@ -82,6 +82,9 @@ func (q *sqlQuerier) GetAuthorizedTemplates(ctx context.Context, arg GetTemplate pq.Array(arg.IDs), arg.Deprecated, arg.HasAITask, + arg.AuthorID, + arg.AuthorUsername, + arg.HasExternalAgent, ) if err != nil { return nil, err @@ -120,6 +123,7 @@ func (q *sqlQuerier) GetAuthorizedTemplates(ctx context.Context, arg GetTemplate &i.ActivityBump, &i.MaxPortSharingLevel, &i.UseClassicParameterFlow, + &i.CorsBehavior, &i.CreatedByAvatarURL, &i.CreatedByUsername, &i.CreatedByName, @@ -268,6 +272,7 @@ func (q *sqlQuerier) GetAuthorizedWorkspaces(ctx context.Context, arg GetWorkspa arg.LastUsedAfter, arg.UsingActive, arg.HasAITask, + arg.HasExternalAgent, arg.RequesterID, arg.Offset, arg.Limit, @@ -297,6 +302,8 @@ func (q *sqlQuerier) GetAuthorizedWorkspaces(ctx context.Context, arg GetWorkspa &i.AutomaticUpdates, &i.Favorite, &i.NextStartAt, + &i.GroupACL, + &i.UserACL, &i.OwnerAvatarUrl, &i.OwnerUsername, &i.OwnerName, @@ -316,6 +323,7 @@ func (q *sqlQuerier) GetAuthorizedWorkspaces(ctx context.Context, arg GetWorkspa &i.LatestBuildTransition, &i.LatestBuildStatus, &i.LatestBuildHasAITask, + &i.LatestBuildHasExternalAgent, &i.Count, ); err != nil { return nil, err diff --git a/coderd/database/models.go b/coderd/database/models.go index e23efe0de0521..effd436f4d18d 100644 --- a/coderd/database/models.go +++ b/coderd/database/models.go @@ -559,6 +559,64 @@ func AllConnectionTypeValues() []ConnectionType { } } +type CorsBehavior string + +const ( + CorsBehaviorSimple CorsBehavior = "simple" + CorsBehaviorPassthru CorsBehavior = "passthru" +) + +func (e *CorsBehavior) Scan(src interface{}) error { + switch s := src.(type) { + case []byte: + *e = CorsBehavior(s) + case string: + *e = CorsBehavior(s) + default: + return fmt.Errorf("unsupported scan type for CorsBehavior: %T", src) + } + return nil +} + +type NullCorsBehavior struct { + CorsBehavior CorsBehavior `json:"cors_behavior"` + Valid bool `json:"valid"` // Valid is true if CorsBehavior is not NULL +} + +// Scan implements the Scanner interface. +func (ns *NullCorsBehavior) Scan(value interface{}) error { + if value == nil { + ns.CorsBehavior, ns.Valid = "", false + return nil + } + ns.Valid = true + return ns.CorsBehavior.Scan(value) +} + +// Value implements the driver Valuer interface. +func (ns NullCorsBehavior) Value() (driver.Value, error) { + if !ns.Valid { + return nil, nil + } + return string(ns.CorsBehavior), nil +} + +func (e CorsBehavior) Valid() bool { + switch e { + case CorsBehaviorSimple, + CorsBehaviorPassthru: + return true + } + return false +} + +func AllCorsBehaviorValues() []CorsBehavior { + return []CorsBehavior{ + CorsBehaviorSimple, + CorsBehaviorPassthru, + } +} + type CryptoKeyFeature string const ( @@ -3007,6 +3065,8 @@ type ExternalAuthLink struct { // The ID of the key used to encrypt the OAuth refresh token. If this is NULL, the refresh token is not encrypted OAuthRefreshTokenKeyID sql.NullString `db:"oauth_refresh_token_key_id" json:"oauth_refresh_token_key_id"` OAuthExtra pqtype.NullRawMessage `db:"oauth_extra" json:"oauth_extra"` + // This error means the refresh token is invalid. Cached so we can avoid calling the external provider again for the same error. + OauthRefreshFailureReason string `db:"oauth_refresh_failure_reason" json:"oauth_refresh_failure_reason"` } type File struct { @@ -3326,6 +3386,10 @@ type ProvisionerJob struct { TraceMetadata pqtype.NullRawMessage `db:"trace_metadata" json:"trace_metadata"` // Computed column to track the status of the job. JobStatus ProvisionerJobStatus `db:"job_status" json:"job_status"` + // Total length of provisioner logs + LogsLength int32 `db:"logs_length" json:"logs_length"` + // Whether the provisioner logs overflowed in length + LogsOverflowed bool `db:"logs_overflowed" json:"logs_overflowed"` } type ProvisionerJobLog struct { @@ -3474,6 +3538,7 @@ type Template struct { ActivityBump int64 `db:"activity_bump" json:"activity_bump"` MaxPortSharingLevel AppSharingLevel `db:"max_port_sharing_level" json:"max_port_sharing_level"` UseClassicParameterFlow bool `db:"use_classic_parameter_flow" json:"use_classic_parameter_flow"` + CorsBehavior CorsBehavior `db:"cors_behavior" json:"cors_behavior"` CreatedByAvatarURL string `db:"created_by_avatar_url" json:"created_by_avatar_url"` CreatedByUsername string `db:"created_by_username" json:"created_by_username"` CreatedByName string `db:"created_by_name" json:"created_by_name"` @@ -3521,7 +3586,8 @@ type TemplateTable struct { ActivityBump int64 `db:"activity_bump" json:"activity_bump"` MaxPortSharingLevel AppSharingLevel `db:"max_port_sharing_level" json:"max_port_sharing_level"` // Determines whether to default to the dynamic parameter creation flow for this template or continue using the legacy classic parameter creation flow.This is a template wide setting, the template admin can revert to the classic flow if there are any issues. An escape hatch is required, as workspace creation is a core workflow and cannot break. This column will be removed when the dynamic parameter creation flow is stable. - UseClassicParameterFlow bool `db:"use_classic_parameter_flow" json:"use_classic_parameter_flow"` + UseClassicParameterFlow bool `db:"use_classic_parameter_flow" json:"use_classic_parameter_flow"` + CorsBehavior CorsBehavior `db:"cors_behavior" json:"cors_behavior"` } // Records aggregated usage statistics for templates/users. All usage is rounded up to the nearest minute. @@ -3568,6 +3634,7 @@ type TemplateVersion struct { Archived bool `db:"archived" json:"archived"` SourceExampleID sql.NullString `db:"source_example_id" json:"source_example_id"` HasAITask sql.NullBool `db:"has_ai_task" json:"has_ai_task"` + HasExternalAgent sql.NullBool `db:"has_external_agent" json:"has_external_agent"` CreatedByAvatarURL string `db:"created_by_avatar_url" json:"created_by_avatar_url"` CreatedByUsername string `db:"created_by_username" json:"created_by_username"` CreatedByName string `db:"created_by_name" json:"created_by_name"` @@ -3621,6 +3688,10 @@ type TemplateVersionPreset struct { PrebuildStatus PrebuildStatus `db:"prebuild_status" json:"prebuild_status"` SchedulingTimezone string `db:"scheduling_timezone" json:"scheduling_timezone"` IsDefault bool `db:"is_default" json:"is_default"` + // Short text describing the preset (max 128 characters). + Description string `db:"description" json:"description"` + // URL or path to an icon representing the preset (max 256 characters). + Icon string `db:"icon" json:"icon"` } type TemplateVersionPresetParameter struct { @@ -3650,10 +3721,11 @@ type TemplateVersionTable struct { // IDs of External auth providers for a specific template version ExternalAuthProviders json.RawMessage `db:"external_auth_providers" json:"external_auth_providers"` // Message describing the changes in this version of the template, similar to a Git commit message. Like a commit message, this should be a short, high-level description of the changes in this version of the template. This message is immutable and should not be updated after the fact. - Message string `db:"message" json:"message"` - Archived bool `db:"archived" json:"archived"` - SourceExampleID sql.NullString `db:"source_example_id" json:"source_example_id"` - HasAITask sql.NullBool `db:"has_ai_task" json:"has_ai_task"` + Message string `db:"message" json:"message"` + Archived bool `db:"archived" json:"archived"` + SourceExampleID sql.NullString `db:"source_example_id" json:"source_example_id"` + HasAITask sql.NullBool `db:"has_ai_task" json:"has_ai_task"` + HasExternalAgent sql.NullBool `db:"has_external_agent" json:"has_external_agent"` } type TemplateVersionTerraformValue struct { @@ -3689,6 +3761,23 @@ type TemplateVersionWorkspaceTag struct { Value string `db:"value" json:"value"` } +// usage_events contains usage data that is collected from the product and potentially shipped to the usage collector service. +type UsageEvent struct { + // For "discrete" event types, this is a random UUID. For "heartbeat" event types, this is a combination of the event type and a truncated timestamp. + ID string `db:"id" json:"id"` + // The usage event type with version. "dc" means "discrete" (e.g. a single event, for counters), "hb" means "heartbeat" (e.g. a recurring event that contains a total count of usage generated from the database, for gauges). + EventType string `db:"event_type" json:"event_type"` + // Event payload. Determined by the matching usage struct for this event type. + EventData json.RawMessage `db:"event_data" json:"event_data"` + CreatedAt time.Time `db:"created_at" json:"created_at"` + // Set to a timestamp while the event is being published by a Coder replica to the usage collector service. Used to avoid duplicate publishes by multiple replicas. Timestamps older than 1 hour are considered expired. + PublishStartedAt sql.NullTime `db:"publish_started_at" json:"publish_started_at"` + // Set to a timestamp when the event is successfully (or permanently unsuccessfully) published to the usage collector service. If set, the event should never be attempted to be published again. + PublishedAt sql.NullTime `db:"published_at" json:"published_at"` + // Set to an error message when the event is temporarily or permanently unsuccessfully published to the usage collector service. + FailureMessage sql.NullString `db:"failure_message" json:"failure_message"` +} + type User struct { ID uuid.UUID `db:"id" json:"id"` Email string `db:"email" json:"email"` @@ -3744,6 +3833,18 @@ type UserLink struct { Claims UserLinkClaims `db:"claims" json:"claims"` } +type UserSecret struct { + ID uuid.UUID `db:"id" json:"id"` + UserID uuid.UUID `db:"user_id" json:"user_id"` + Name string `db:"name" json:"name"` + Description string `db:"description" json:"description"` + Value string `db:"value" json:"value"` + EnvName string `db:"env_name" json:"env_name"` + FilePath string `db:"file_path" json:"file_path"` + CreatedAt time.Time `db:"created_at" json:"created_at"` + UpdatedAt time.Time `db:"updated_at" json:"updated_at"` +} + // Tracks the history of user status changes type UserStatusChange struct { ID uuid.UUID `db:"id" json:"id"` @@ -3787,6 +3888,8 @@ type Workspace struct { AutomaticUpdates AutomaticUpdates `db:"automatic_updates" json:"automatic_updates"` Favorite bool `db:"favorite" json:"favorite"` NextStartAt sql.NullTime `db:"next_start_at" json:"next_start_at"` + GroupACL WorkspaceACL `db:"group_acl" json:"group_acl"` + UserACL WorkspaceACL `db:"user_acl" json:"user_acl"` OwnerAvatarUrl string `db:"owner_avatar_url" json:"owner_avatar_url"` OwnerUsername string `db:"owner_username" json:"owner_username"` OwnerName string `db:"owner_name" json:"owner_name"` @@ -4072,6 +4175,7 @@ type WorkspaceBuild struct { TemplateVersionPresetID uuid.NullUUID `db:"template_version_preset_id" json:"template_version_preset_id"` HasAITask sql.NullBool `db:"has_ai_task" json:"has_ai_task"` AITaskSidebarAppID uuid.NullUUID `db:"ai_task_sidebar_app_id" json:"ai_task_sidebar_app_id"` + HasExternalAgent sql.NullBool `db:"has_external_agent" json:"has_external_agent"` InitiatorByAvatarUrl string `db:"initiator_by_avatar_url" json:"initiator_by_avatar_url"` InitiatorByUsername string `db:"initiator_by_username" json:"initiator_by_username"` InitiatorByName string `db:"initiator_by_name" json:"initiator_by_name"` @@ -4103,6 +4207,7 @@ type WorkspaceBuildTable struct { TemplateVersionPresetID uuid.NullUUID `db:"template_version_preset_id" json:"template_version_preset_id"` HasAITask sql.NullBool `db:"has_ai_task" json:"has_ai_task"` AITaskSidebarAppID uuid.NullUUID `db:"ai_task_sidebar_app_id" json:"ai_task_sidebar_app_id"` + HasExternalAgent sql.NullBool `db:"has_external_agent" json:"has_external_agent"` } type WorkspaceLatestBuild struct { @@ -4208,4 +4313,6 @@ type WorkspaceTable struct { // Favorite is true if the workspace owner has favorited the workspace. Favorite bool `db:"favorite" json:"favorite"` NextStartAt sql.NullTime `db:"next_start_at" json:"next_start_at"` + GroupACL WorkspaceACL `db:"group_acl" json:"group_acl"` + UserACL WorkspaceACL `db:"user_acl" json:"user_acl"` } diff --git a/coderd/database/querier.go b/coderd/database/querier.go index baa5d8590b1d7..c490a04d2b653 100644 --- a/coderd/database/querier.go +++ b/coderd/database/querier.go @@ -71,6 +71,7 @@ type sqlcQuerier interface { // Prebuild considered in-progress if it's in the "starting", "stopping", or "deleting" state. CountInProgressPrebuilds(ctx context.Context) ([]CountInProgressPrebuildsRow, error) CountUnreadInboxNotificationsByUserID(ctx context.Context, userID uuid.UUID) (int64, error) + CreateUserSecret(ctx context.Context, arg CreateUserSecretParams) (UserSecret, error) CustomRoles(ctx context.Context, arg CustomRolesParams) ([]CustomRole, error) DeleteAPIKeyByID(ctx context.Context, id string) error DeleteAPIKeysByUserID(ctx context.Context, userID uuid.UUID) error @@ -118,6 +119,7 @@ type sqlcQuerier interface { DeleteTailnetClientSubscription(ctx context.Context, arg DeleteTailnetClientSubscriptionParams) error DeleteTailnetPeer(ctx context.Context, arg DeleteTailnetPeerParams) (DeleteTailnetPeerRow, error) DeleteTailnetTunnel(ctx context.Context, arg DeleteTailnetTunnelParams) (DeleteTailnetTunnelRow, error) + DeleteUserSecret(ctx context.Context, id uuid.UUID) error DeleteWebpushSubscriptionByUserIDAndEndpoint(ctx context.Context, arg DeleteWebpushSubscriptionByUserIDAndEndpointParams) error DeleteWebpushSubscriptions(ctx context.Context, ids []uuid.UUID) error DeleteWorkspaceAgentPortShare(ctx context.Context, arg DeleteWorkspaceAgentPortShareParams) error @@ -135,6 +137,11 @@ type sqlcQuerier interface { FetchNewMessageMetadata(ctx context.Context, arg FetchNewMessageMetadataParams) (FetchNewMessageMetadataRow, error) FetchVolumesResourceMonitorsByAgentID(ctx context.Context, agentID uuid.UUID) ([]WorkspaceAgentVolumeResourceMonitor, error) FetchVolumesResourceMonitorsUpdatedAfter(ctx context.Context, updatedAt time.Time) ([]WorkspaceAgentVolumeResourceMonitor, error) + // FindMatchingPresetID finds a preset ID that is the largest exact subset of the provided parameters. + // It returns the preset ID if a match is found, or NULL if no match is found. + // The query finds presets where all preset parameters are present in the provided parameters, + // and returns the preset with the most parameters (largest subset). + FindMatchingPresetID(ctx context.Context, arg FindMatchingPresetIDParams) (uuid.UUID, error) GetAPIKeyByID(ctx context.Context, id string) (APIKey, error) // there is no unique constraint on empty token names GetAPIKeyByName(ctx context.Context, arg GetAPIKeyByNameParams) (APIKey, error) @@ -211,7 +218,6 @@ type sqlcQuerier interface { GetLatestCryptoKeyByFeature(ctx context.Context, feature CryptoKeyFeature) (CryptoKey, error) GetLatestWorkspaceAppStatusesByWorkspaceIDs(ctx context.Context, ids []uuid.UUID) ([]WorkspaceAppStatus, error) GetLatestWorkspaceBuildByWorkspaceID(ctx context.Context, workspaceID uuid.UUID) (WorkspaceBuild, error) - GetLatestWorkspaceBuilds(ctx context.Context) ([]WorkspaceBuild, error) GetLatestWorkspaceBuildsByWorkspaceIDs(ctx context.Context, ids []uuid.UUID) ([]WorkspaceBuild, error) GetLicenseByID(ctx context.Context, id int32) (License, error) GetLicenses(ctx context.Context) ([]License, error) @@ -353,6 +359,7 @@ type sqlcQuerier interface { GetTemplateVersionByID(ctx context.Context, id uuid.UUID) (TemplateVersion, error) GetTemplateVersionByJobID(ctx context.Context, jobID uuid.UUID) (TemplateVersion, error) GetTemplateVersionByTemplateIDAndName(ctx context.Context, arg GetTemplateVersionByTemplateIDAndNameParams) (TemplateVersion, error) + GetTemplateVersionHasAITask(ctx context.Context, id uuid.UUID) (bool, error) GetTemplateVersionParameters(ctx context.Context, templateVersionID uuid.UUID) ([]TemplateVersionParameter, error) GetTemplateVersionTerraformValues(ctx context.Context, templateVersionID uuid.UUID) (TemplateVersionTerraformValue, error) GetTemplateVersionVariables(ctx context.Context, templateVersionID uuid.UUID) ([]TemplateVersionVariable, error) @@ -383,6 +390,8 @@ type sqlcQuerier interface { GetUserLinkByUserIDLoginType(ctx context.Context, arg GetUserLinkByUserIDLoginTypeParams) (UserLink, error) GetUserLinksByUserID(ctx context.Context, userID uuid.UUID) ([]UserLink, error) GetUserNotificationPreferences(ctx context.Context, userID uuid.UUID) ([]NotificationPreference, error) + GetUserSecret(ctx context.Context, id uuid.UUID) (UserSecret, error) + GetUserSecretByUserIDAndName(ctx context.Context, arg GetUserSecretByUserIDAndNameParams) (UserSecret, error) // GetUserStatusCounts returns the count of users in each status over time. // The time range is inclusively defined by the start_time and end_time parameters. // @@ -473,8 +482,6 @@ type sqlcQuerier interface { GetWorkspacesAndAgentsByOwnerID(ctx context.Context, ownerID uuid.UUID) ([]GetWorkspacesAndAgentsByOwnerIDRow, error) GetWorkspacesByTemplateID(ctx context.Context, templateID uuid.UUID) ([]WorkspaceTable, error) GetWorkspacesEligibleForTransition(ctx context.Context, now time.Time) ([]GetWorkspacesEligibleForTransitionRow, error) - // Determines if the template versions table has any rows with has_ai_task = TRUE. - HasTemplateVersionsWithAITask(ctx context.Context) (bool, error) InsertAPIKey(ctx context.Context, arg InsertAPIKeyParams) (APIKey, error) // We use the organization_id as the id // for simplicity since all users is @@ -520,6 +527,9 @@ type sqlcQuerier interface { InsertTemplateVersionTerraformValuesByJobID(ctx context.Context, arg InsertTemplateVersionTerraformValuesByJobIDParams) error InsertTemplateVersionVariable(ctx context.Context, arg InsertTemplateVersionVariableParams) (TemplateVersionVariable, error) InsertTemplateVersionWorkspaceTag(ctx context.Context, arg InsertTemplateVersionWorkspaceTagParams) (TemplateVersionWorkspaceTag, error) + // Duplicate events are ignored intentionally to allow for multiple replicas to + // publish heartbeat events. + InsertUsageEvent(ctx context.Context, arg InsertUsageEventParams) error InsertUser(ctx context.Context, arg InsertUserParams) (User, error) // InsertUserGroupsByID adds a user to all provided groups, if they exist. // If there is a conflict, the user is already a member @@ -548,6 +558,7 @@ type sqlcQuerier interface { InsertWorkspaceResourceMetadata(ctx context.Context, arg InsertWorkspaceResourceMetadataParams) ([]WorkspaceResourceMetadatum, error) ListProvisionerKeysByOrganization(ctx context.Context, organizationID uuid.UUID) ([]ProvisionerKey, error) ListProvisionerKeysByOrganizationExcludeReserved(ctx context.Context, organizationID uuid.UUID) ([]ProvisionerKey, error) + ListUserSecrets(ctx context.Context, userID uuid.UUID) ([]UserSecret, error) ListWorkspaceAgentPortShares(ctx context.Context, workspaceID uuid.UUID) ([]WorkspaceAgentPortShare, error) MarkAllInboxNotificationsAsRead(ctx context.Context, arg MarkAllInboxNotificationsAsReadParams) error OIDCClaimFieldValues(ctx context.Context, arg OIDCClaimFieldValuesParams) ([]string, error) @@ -565,6 +576,11 @@ type sqlcQuerier interface { RemoveUserFromAllGroups(ctx context.Context, userID uuid.UUID) error RemoveUserFromGroups(ctx context.Context, arg RemoveUserFromGroupsParams) ([]uuid.UUID, error) RevokeDBCryptKey(ctx context.Context, activeKeyDigest string) error + // Note that this selects from the CTE, not the original table. The CTE is named + // the same as the original table to trick sqlc into reusing the existing struct + // for the table. + // The CTE and the reorder is required because UPDATE doesn't guarantee order. + SelectUsageEventsForPublishing(ctx context.Context, now time.Time) ([]UsageEvent, error) // Non blocking lock. Returns true if the lock was acquired, false otherwise. // // This must be called from within a transaction. The lock will be automatically @@ -593,6 +609,8 @@ type sqlcQuerier interface { UpdatePresetPrebuildStatus(ctx context.Context, arg UpdatePresetPrebuildStatusParams) error UpdateProvisionerDaemonLastSeenAt(ctx context.Context, arg UpdateProvisionerDaemonLastSeenAtParams) error UpdateProvisionerJobByID(ctx context.Context, arg UpdateProvisionerJobByIDParams) error + UpdateProvisionerJobLogsLength(ctx context.Context, arg UpdateProvisionerJobLogsLengthParams) error + UpdateProvisionerJobLogsOverflowed(ctx context.Context, arg UpdateProvisionerJobLogsOverflowedParams) error UpdateProvisionerJobWithCancelByID(ctx context.Context, arg UpdateProvisionerJobWithCancelByIDParams) error UpdateProvisionerJobWithCompleteByID(ctx context.Context, arg UpdateProvisionerJobWithCompleteByIDParams) error UpdateProvisionerJobWithCompleteWithStartedAtByID(ctx context.Context, arg UpdateProvisionerJobWithCompleteWithStartedAtByIDParams) error @@ -604,11 +622,12 @@ type sqlcQuerier interface { UpdateTemplateDeletedByID(ctx context.Context, arg UpdateTemplateDeletedByIDParams) error UpdateTemplateMetaByID(ctx context.Context, arg UpdateTemplateMetaByIDParams) error UpdateTemplateScheduleByID(ctx context.Context, arg UpdateTemplateScheduleByIDParams) error - UpdateTemplateVersionAITaskByJobID(ctx context.Context, arg UpdateTemplateVersionAITaskByJobIDParams) error UpdateTemplateVersionByID(ctx context.Context, arg UpdateTemplateVersionByIDParams) error UpdateTemplateVersionDescriptionByJobID(ctx context.Context, arg UpdateTemplateVersionDescriptionByJobIDParams) error UpdateTemplateVersionExternalAuthProvidersByJobID(ctx context.Context, arg UpdateTemplateVersionExternalAuthProvidersByJobIDParams) error + UpdateTemplateVersionFlagsByJobID(ctx context.Context, arg UpdateTemplateVersionFlagsByJobIDParams) error UpdateTemplateWorkspacesLastUsedAt(ctx context.Context, arg UpdateTemplateWorkspacesLastUsedAtParams) error + UpdateUsageEventsPostPublish(ctx context.Context, arg UpdateUsageEventsPostPublishParams) error UpdateUserDeletedByID(ctx context.Context, id uuid.UUID) error UpdateUserGithubComUserID(ctx context.Context, arg UpdateUserGithubComUserIDParams) error UpdateUserHashedOneTimePasscode(ctx context.Context, arg UpdateUserHashedOneTimePasscodeParams) error @@ -621,11 +640,13 @@ type sqlcQuerier interface { UpdateUserProfile(ctx context.Context, arg UpdateUserProfileParams) (User, error) UpdateUserQuietHoursSchedule(ctx context.Context, arg UpdateUserQuietHoursScheduleParams) (User, error) UpdateUserRoles(ctx context.Context, arg UpdateUserRolesParams) (User, error) + UpdateUserSecret(ctx context.Context, arg UpdateUserSecretParams) (UserSecret, error) UpdateUserStatus(ctx context.Context, arg UpdateUserStatusParams) (User, error) UpdateUserTerminalFont(ctx context.Context, arg UpdateUserTerminalFontParams) (UserConfig, error) UpdateUserThemePreference(ctx context.Context, arg UpdateUserThemePreferenceParams) (UserConfig, error) UpdateVolumeResourceMonitor(ctx context.Context, arg UpdateVolumeResourceMonitorParams) error UpdateWorkspace(ctx context.Context, arg UpdateWorkspaceParams) (WorkspaceTable, error) + UpdateWorkspaceACLByID(ctx context.Context, arg UpdateWorkspaceACLByIDParams) error UpdateWorkspaceAgentConnectionByID(ctx context.Context, arg UpdateWorkspaceAgentConnectionByIDParams) error UpdateWorkspaceAgentLifecycleStateByID(ctx context.Context, arg UpdateWorkspaceAgentLifecycleStateByIDParams) error UpdateWorkspaceAgentLogOverflowByID(ctx context.Context, arg UpdateWorkspaceAgentLogOverflowByIDParams) error @@ -634,9 +655,9 @@ type sqlcQuerier interface { UpdateWorkspaceAppHealthByID(ctx context.Context, arg UpdateWorkspaceAppHealthByIDParams) error UpdateWorkspaceAutomaticUpdates(ctx context.Context, arg UpdateWorkspaceAutomaticUpdatesParams) error UpdateWorkspaceAutostart(ctx context.Context, arg UpdateWorkspaceAutostartParams) error - UpdateWorkspaceBuildAITaskByID(ctx context.Context, arg UpdateWorkspaceBuildAITaskByIDParams) error UpdateWorkspaceBuildCostByID(ctx context.Context, arg UpdateWorkspaceBuildCostByIDParams) error UpdateWorkspaceBuildDeadlineByID(ctx context.Context, arg UpdateWorkspaceBuildDeadlineByIDParams) error + UpdateWorkspaceBuildFlagsByID(ctx context.Context, arg UpdateWorkspaceBuildFlagsByIDParams) error UpdateWorkspaceBuildProvisionerStateByID(ctx context.Context, arg UpdateWorkspaceBuildProvisionerStateByIDParams) error UpdateWorkspaceDeletedByID(ctx context.Context, arg UpdateWorkspaceDeletedByIDParams) error UpdateWorkspaceDormantDeletingAt(ctx context.Context, arg UpdateWorkspaceDormantDeletingAtParams) (WorkspaceTable, error) @@ -688,6 +709,8 @@ type sqlcQuerier interface { // was started. This means that a new row was inserted (no previous session) or // the updated_at is older than stale interval. UpsertWorkspaceAppAuditSession(ctx context.Context, arg UpsertWorkspaceAppAuditSessionParams) (bool, error) + ValidateGroupIDs(ctx context.Context, groupIds []uuid.UUID) (ValidateGroupIDsRow, error) + ValidateUserIDs(ctx context.Context, userIds []uuid.UUID) (ValidateUserIDsRow, error) } var _ sqlcQuerier = (*sqlQuerier)(nil) diff --git a/coderd/database/querier_test.go b/coderd/database/querier_test.go index 9c88b9b3db679..18c10d6388f37 100644 --- a/coderd/database/querier_test.go +++ b/coderd/database/querier_test.go @@ -397,6 +397,7 @@ func TestGetProvisionerDaemonsWithStatusByOrganization(t *testing.T) { daemons, err := db.GetProvisionerDaemonsWithStatusByOrganization(context.Background(), database.GetProvisionerDaemonsWithStatusByOrganizationParams{ OrganizationID: org.ID, IDs: []uuid.UUID{matchingDaemon0.ID, matchingDaemon1.ID}, + Offline: sql.NullBool{Bool: true, Valid: true}, }) require.NoError(t, err) require.Len(t, daemons, 2) @@ -430,6 +431,7 @@ func TestGetProvisionerDaemonsWithStatusByOrganization(t *testing.T) { daemons, err := db.GetProvisionerDaemonsWithStatusByOrganization(context.Background(), database.GetProvisionerDaemonsWithStatusByOrganizationParams{ OrganizationID: org.ID, Tags: database.StringMap{"foo": "bar"}, + Offline: sql.NullBool{Bool: true, Valid: true}, }) require.NoError(t, err) require.Len(t, daemons, 1) @@ -463,6 +465,7 @@ func TestGetProvisionerDaemonsWithStatusByOrganization(t *testing.T) { daemons, err := db.GetProvisionerDaemonsWithStatusByOrganization(context.Background(), database.GetProvisionerDaemonsWithStatusByOrganizationParams{ OrganizationID: org.ID, StaleIntervalMS: 45 * time.Minute.Milliseconds(), + Offline: sql.NullBool{Bool: true, Valid: true}, }) require.NoError(t, err) require.Len(t, daemons, 2) @@ -475,6 +478,230 @@ func TestGetProvisionerDaemonsWithStatusByOrganization(t *testing.T) { require.Equal(t, database.ProvisionerDaemonStatusOffline, daemons[0].Status) require.Equal(t, database.ProvisionerDaemonStatusIdle, daemons[1].Status) }) + + t.Run("ExcludeOffline", func(t *testing.T) { + t.Parallel() + db, _ := dbtestutil.NewDB(t) + org := dbgen.Organization(t, db, database.Organization{}) + + dbgen.ProvisionerDaemon(t, db, database.ProvisionerDaemon{ + Name: "offline-daemon", + OrganizationID: org.ID, + CreatedAt: dbtime.Now().Add(-time.Hour), + LastSeenAt: sql.NullTime{ + Valid: true, + Time: dbtime.Now().Add(-time.Hour), + }, + }) + fooDaemon := dbgen.ProvisionerDaemon(t, db, database.ProvisionerDaemon{ + Name: "foo-daemon", + OrganizationID: org.ID, + CreatedAt: dbtime.Now().Add(-(30 * time.Minute)), + LastSeenAt: sql.NullTime{ + Valid: true, + Time: dbtime.Now().Add(-(30 * time.Minute)), + }, + }) + + daemons, err := db.GetProvisionerDaemonsWithStatusByOrganization(context.Background(), database.GetProvisionerDaemonsWithStatusByOrganizationParams{ + OrganizationID: org.ID, + StaleIntervalMS: 45 * time.Minute.Milliseconds(), + }) + require.NoError(t, err) + require.Len(t, daemons, 1) + + require.Equal(t, fooDaemon.ID, daemons[0].ProvisionerDaemon.ID) + require.Equal(t, database.ProvisionerDaemonStatusIdle, daemons[0].Status) + }) + + t.Run("IncludeOffline", func(t *testing.T) { + t.Parallel() + db, _ := dbtestutil.NewDB(t) + org := dbgen.Organization(t, db, database.Organization{}) + + dbgen.ProvisionerDaemon(t, db, database.ProvisionerDaemon{ + Name: "offline-daemon", + OrganizationID: org.ID, + CreatedAt: dbtime.Now().Add(-time.Hour), + LastSeenAt: sql.NullTime{ + Valid: true, + Time: dbtime.Now().Add(-time.Hour), + }, + }) + dbgen.ProvisionerDaemon(t, db, database.ProvisionerDaemon{ + Name: "foo-daemon", + OrganizationID: org.ID, + Tags: database.StringMap{ + "foo": "bar", + }, + }) + dbgen.ProvisionerDaemon(t, db, database.ProvisionerDaemon{ + Name: "bar-daemon", + OrganizationID: org.ID, + CreatedAt: dbtime.Now().Add(-(30 * time.Minute)), + LastSeenAt: sql.NullTime{ + Valid: true, + Time: dbtime.Now().Add(-(30 * time.Minute)), + }, + }) + + daemons, err := db.GetProvisionerDaemonsWithStatusByOrganization(context.Background(), database.GetProvisionerDaemonsWithStatusByOrganizationParams{ + OrganizationID: org.ID, + StaleIntervalMS: 45 * time.Minute.Milliseconds(), + Offline: sql.NullBool{Bool: true, Valid: true}, + }) + require.NoError(t, err) + require.Len(t, daemons, 3) + + statusCounts := make(map[database.ProvisionerDaemonStatus]int) + for _, daemon := range daemons { + statusCounts[daemon.Status]++ + } + + require.Equal(t, 2, statusCounts[database.ProvisionerDaemonStatusIdle]) + require.Equal(t, 1, statusCounts[database.ProvisionerDaemonStatusOffline]) + }) + + t.Run("MatchesStatuses", func(t *testing.T) { + t.Parallel() + db, _ := dbtestutil.NewDB(t) + org := dbgen.Organization(t, db, database.Organization{}) + + dbgen.ProvisionerDaemon(t, db, database.ProvisionerDaemon{ + Name: "offline-daemon", + OrganizationID: org.ID, + CreatedAt: dbtime.Now().Add(-time.Hour), + LastSeenAt: sql.NullTime{ + Valid: true, + Time: dbtime.Now().Add(-time.Hour), + }, + }) + + dbgen.ProvisionerDaemon(t, db, database.ProvisionerDaemon{ + Name: "foo-daemon", + OrganizationID: org.ID, + CreatedAt: dbtime.Now().Add(-(30 * time.Minute)), + LastSeenAt: sql.NullTime{ + Valid: true, + Time: dbtime.Now().Add(-(30 * time.Minute)), + }, + }) + + type testCase struct { + name string + statuses []database.ProvisionerDaemonStatus + expectedNum int + } + + tests := []testCase{ + { + name: "Get idle and offline", + statuses: []database.ProvisionerDaemonStatus{ + database.ProvisionerDaemonStatusOffline, + database.ProvisionerDaemonStatusIdle, + }, + expectedNum: 2, + }, + { + name: "Get offline", + statuses: []database.ProvisionerDaemonStatus{ + database.ProvisionerDaemonStatusOffline, + }, + expectedNum: 1, + }, + // Offline daemons should not be included without Offline param + { + name: "Get idle - empty statuses", + statuses: []database.ProvisionerDaemonStatus{}, + expectedNum: 1, + }, + { + name: "Get idle - nil statuses", + statuses: nil, + expectedNum: 1, + }, + } + + for _, tc := range tests { + //nolint:tparallel,paralleltest + t.Run(tc.name, func(t *testing.T) { + daemons, err := db.GetProvisionerDaemonsWithStatusByOrganization(context.Background(), database.GetProvisionerDaemonsWithStatusByOrganizationParams{ + OrganizationID: org.ID, + StaleIntervalMS: 45 * time.Minute.Milliseconds(), + Statuses: tc.statuses, + }) + require.NoError(t, err) + require.Len(t, daemons, tc.expectedNum) + }) + } + }) + + t.Run("FilterByMaxAge", func(t *testing.T) { + t.Parallel() + db, _ := dbtestutil.NewDB(t) + org := dbgen.Organization(t, db, database.Organization{}) + + dbgen.ProvisionerDaemon(t, db, database.ProvisionerDaemon{ + Name: "foo-daemon", + OrganizationID: org.ID, + CreatedAt: dbtime.Now().Add(-(45 * time.Minute)), + LastSeenAt: sql.NullTime{ + Valid: true, + Time: dbtime.Now().Add(-(45 * time.Minute)), + }, + }) + + dbgen.ProvisionerDaemon(t, db, database.ProvisionerDaemon{ + Name: "bar-daemon", + OrganizationID: org.ID, + CreatedAt: dbtime.Now().Add(-(25 * time.Minute)), + LastSeenAt: sql.NullTime{ + Valid: true, + Time: dbtime.Now().Add(-(25 * time.Minute)), + }, + }) + + type testCase struct { + name string + maxAge sql.NullInt64 + expectedNum int + } + + tests := []testCase{ + { + name: "Max age 1 hour", + maxAge: sql.NullInt64{Int64: time.Hour.Milliseconds(), Valid: true}, + expectedNum: 2, + }, + { + name: "Max age 30 minutes", + maxAge: sql.NullInt64{Int64: (30 * time.Minute).Milliseconds(), Valid: true}, + expectedNum: 1, + }, + { + name: "Max age 15 minutes", + maxAge: sql.NullInt64{Int64: (15 * time.Minute).Milliseconds(), Valid: true}, + expectedNum: 0, + }, + { + name: "No max age", + maxAge: sql.NullInt64{Valid: false}, + expectedNum: 2, + }, + } + for _, tc := range tests { + //nolint:tparallel,paralleltest + t.Run(tc.name, func(t *testing.T) { + daemons, err := db.GetProvisionerDaemonsWithStatusByOrganization(context.Background(), database.GetProvisionerDaemonsWithStatusByOrganizationParams{ + OrganizationID: org.ID, + StaleIntervalMS: 60 * time.Minute.Milliseconds(), + MaxAgeMs: tc.maxAge, + }) + require.NoError(t, err) + require.Len(t, daemons, tc.expectedNum) + }) + } + }) } func TestGetWorkspaceAgentUsageStats(t *testing.T) { @@ -1552,8 +1779,11 @@ func TestUpdateSystemUser(t *testing.T) { // When: attempting to update a system user's name. _, err = db.UpdateUserProfile(ctx, database.UpdateUserProfileParams{ - ID: systemUser.ID, - Name: "not prebuilds", + ID: systemUser.ID, + Email: systemUser.Email, + Username: systemUser.Username, + AvatarURL: systemUser.AvatarURL, + Name: "not prebuilds", }) // Then: the attempt is rejected by a postgres trigger. // require.ErrorContains(t, err, "Cannot modify or delete system users") @@ -6003,3 +6233,349 @@ func TestGetRunningPrebuiltWorkspaces(t *testing.T) { require.Len(t, runningPrebuilds, 1, "expected only one running prebuilt workspace") require.Equal(t, runningPrebuild.ID, runningPrebuilds[0].ID, "expected the running prebuilt workspace to be returned") } + +func TestUserSecretsCRUDOperations(t *testing.T) { + t.Parallel() + + // Use raw database without dbauthz wrapper for this test + db, _ := dbtestutil.NewDB(t) + + t.Run("FullCRUDWorkflow", func(t *testing.T) { + t.Parallel() + ctx := testutil.Context(t, testutil.WaitMedium) + + // Create a new user for this test + testUser := dbgen.User(t, db, database.User{}) + + // 1. CREATE + secretID := uuid.New() + createParams := database.CreateUserSecretParams{ + ID: secretID, + UserID: testUser.ID, + Name: "workflow-secret", + Description: "Secret for full CRUD workflow", + Value: "workflow-value", + EnvName: "WORKFLOW_ENV", + FilePath: "/workflow/path", + } + + createdSecret, err := db.CreateUserSecret(ctx, createParams) + require.NoError(t, err) + assert.Equal(t, secretID, createdSecret.ID) + + // 2. READ by ID + readSecret, err := db.GetUserSecret(ctx, createdSecret.ID) + require.NoError(t, err) + assert.Equal(t, createdSecret.ID, readSecret.ID) + assert.Equal(t, "workflow-secret", readSecret.Name) + + // 3. READ by UserID and Name + readByNameParams := database.GetUserSecretByUserIDAndNameParams{ + UserID: testUser.ID, + Name: "workflow-secret", + } + readByNameSecret, err := db.GetUserSecretByUserIDAndName(ctx, readByNameParams) + require.NoError(t, err) + assert.Equal(t, createdSecret.ID, readByNameSecret.ID) + + // 4. LIST + secrets, err := db.ListUserSecrets(ctx, testUser.ID) + require.NoError(t, err) + require.Len(t, secrets, 1) + assert.Equal(t, createdSecret.ID, secrets[0].ID) + + // 5. UPDATE + updateParams := database.UpdateUserSecretParams{ + ID: createdSecret.ID, + Description: "Updated workflow description", + Value: "updated-workflow-value", + EnvName: "UPDATED_WORKFLOW_ENV", + FilePath: "/updated/workflow/path", + } + + updatedSecret, err := db.UpdateUserSecret(ctx, updateParams) + require.NoError(t, err) + assert.Equal(t, "Updated workflow description", updatedSecret.Description) + assert.Equal(t, "updated-workflow-value", updatedSecret.Value) + + // 6. DELETE + err = db.DeleteUserSecret(ctx, createdSecret.ID) + require.NoError(t, err) + + // Verify deletion + _, err = db.GetUserSecret(ctx, createdSecret.ID) + require.Error(t, err) + assert.Contains(t, err.Error(), "no rows in result set") + + // Verify list is empty + secrets, err = db.ListUserSecrets(ctx, testUser.ID) + require.NoError(t, err) + assert.Len(t, secrets, 0) + }) + + t.Run("UniqueConstraints", func(t *testing.T) { + t.Parallel() + ctx := testutil.Context(t, testutil.WaitMedium) + + // Create a new user for this test + testUser := dbgen.User(t, db, database.User{}) + + // Create first secret + secret1 := dbgen.UserSecret(t, db, database.UserSecret{ + UserID: testUser.ID, + Name: "unique-test", + Description: "First secret", + Value: "value1", + EnvName: "UNIQUE_ENV", + FilePath: "/unique/path", + }) + + // Try to create another secret with the same name (should fail) + _, err := db.CreateUserSecret(ctx, database.CreateUserSecretParams{ + UserID: testUser.ID, + Name: "unique-test", // Same name + Description: "Second secret", + Value: "value2", + }) + require.Error(t, err) + assert.Contains(t, err.Error(), "duplicate key value") + + // Try to create another secret with the same env_name (should fail) + _, err = db.CreateUserSecret(ctx, database.CreateUserSecretParams{ + UserID: testUser.ID, + Name: "unique-test-2", + Description: "Second secret", + Value: "value2", + EnvName: "UNIQUE_ENV", // Same env_name + }) + require.Error(t, err) + assert.Contains(t, err.Error(), "duplicate key value") + + // Try to create another secret with the same file_path (should fail) + _, err = db.CreateUserSecret(ctx, database.CreateUserSecretParams{ + UserID: testUser.ID, + Name: "unique-test-3", + Description: "Second secret", + Value: "value2", + FilePath: "/unique/path", // Same file_path + }) + require.Error(t, err) + assert.Contains(t, err.Error(), "duplicate key value") + + // Create secret with empty env_name and file_path (should succeed) + secret2 := dbgen.UserSecret(t, db, database.UserSecret{ + UserID: testUser.ID, + Name: "unique-test-4", + Description: "Second secret", + Value: "value2", + EnvName: "", // Empty env_name + FilePath: "", // Empty file_path + }) + + // Verify both secrets exist + _, err = db.GetUserSecret(ctx, secret1.ID) + require.NoError(t, err) + _, err = db.GetUserSecret(ctx, secret2.ID) + require.NoError(t, err) + }) +} + +func TestUserSecretsAuthorization(t *testing.T) { + t.Parallel() + + // Use raw database and wrap with dbauthz for authorization testing + db, _ := dbtestutil.NewDB(t) + authorizer := rbac.NewStrictCachingAuthorizer(prometheus.NewRegistry()) + authDB := dbauthz.New(db, authorizer, slogtest.Make(t, &slogtest.Options{}), coderdtest.AccessControlStorePointer()) + + // Create test users + user1 := dbgen.User(t, db, database.User{}) + user2 := dbgen.User(t, db, database.User{}) + owner := dbgen.User(t, db, database.User{}) + orgAdmin := dbgen.User(t, db, database.User{}) + + // Create organization for org-scoped roles + org := dbgen.Organization(t, db, database.Organization{}) + + // Create secrets for users + user1Secret := dbgen.UserSecret(t, db, database.UserSecret{ + UserID: user1.ID, + Name: "user1-secret", + Description: "User 1's secret", + Value: "user1-value", + }) + + user2Secret := dbgen.UserSecret(t, db, database.UserSecret{ + UserID: user2.ID, + Name: "user2-secret", + Description: "User 2's secret", + Value: "user2-value", + }) + + testCases := []struct { + name string + subject rbac.Subject + secretID uuid.UUID + expectedAccess bool + }{ + { + name: "UserCanAccessOwnSecrets", + subject: rbac.Subject{ + ID: user1.ID.String(), + Roles: rbac.RoleIdentifiers{rbac.RoleMember()}, + Scope: rbac.ScopeAll, + }, + secretID: user1Secret.ID, + expectedAccess: true, + }, + { + name: "UserCannotAccessOtherUserSecrets", + subject: rbac.Subject{ + ID: user1.ID.String(), + Roles: rbac.RoleIdentifiers{rbac.RoleMember()}, + Scope: rbac.ScopeAll, + }, + secretID: user2Secret.ID, + expectedAccess: false, + }, + { + name: "OwnerCannotAccessUserSecrets", + subject: rbac.Subject{ + ID: owner.ID.String(), + Roles: rbac.RoleIdentifiers{rbac.RoleOwner()}, + Scope: rbac.ScopeAll, + }, + secretID: user1Secret.ID, + expectedAccess: false, + }, + { + name: "OrgAdminCannotAccessUserSecrets", + subject: rbac.Subject{ + ID: orgAdmin.ID.String(), + Roles: rbac.RoleIdentifiers{rbac.ScopedRoleOrgAdmin(org.ID)}, + Scope: rbac.ScopeAll, + }, + secretID: user1Secret.ID, + expectedAccess: false, + }, + } + + for _, tc := range testCases { + tc := tc // capture range variable + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + ctx := testutil.Context(t, testutil.WaitMedium) + + authCtx := dbauthz.As(ctx, tc.subject) + + // Test GetUserSecret + _, err := authDB.GetUserSecret(authCtx, tc.secretID) + + if tc.expectedAccess { + require.NoError(t, err, "expected access to be granted") + } else { + require.Error(t, err, "expected access to be denied") + assert.True(t, dbauthz.IsNotAuthorizedError(err), "expected authorization error") + } + }) + } +} + +func TestWorkspaceBuildDeadlineConstraint(t *testing.T) { + t.Parallel() + + ctx := testutil.Context(t, testutil.WaitLong) + + db, _ := dbtestutil.NewDB(t) + org := dbgen.Organization(t, db, database.Organization{}) + user := dbgen.User(t, db, database.User{}) + template := dbgen.Template(t, db, database.Template{ + CreatedBy: user.ID, + OrganizationID: org.ID, + }) + templateVersion := dbgen.TemplateVersion(t, db, database.TemplateVersion{ + TemplateID: uuid.NullUUID{UUID: template.ID, Valid: true}, + OrganizationID: org.ID, + CreatedBy: user.ID, + }) + workspace := dbgen.Workspace(t, db, database.WorkspaceTable{ + OwnerID: user.ID, + TemplateID: template.ID, + Name: "test-workspace", + Deleted: false, + }) + job := dbgen.ProvisionerJob(t, db, nil, database.ProvisionerJob{ + OrganizationID: org.ID, + InitiatorID: database.PrebuildsSystemUserID, + Provisioner: database.ProvisionerTypeEcho, + Type: database.ProvisionerJobTypeWorkspaceBuild, + StartedAt: sql.NullTime{Time: time.Now().Add(-time.Minute), Valid: true}, + CompletedAt: sql.NullTime{Time: time.Now(), Valid: true}, + }) + workspaceBuild := dbgen.WorkspaceBuild(t, db, database.WorkspaceBuild{ + WorkspaceID: workspace.ID, + TemplateVersionID: templateVersion.ID, + JobID: job.ID, + BuildNumber: 1, + }) + + cases := []struct { + name string + deadline time.Time + maxDeadline time.Time + expectOK bool + }{ + { + name: "no deadline or max_deadline", + deadline: time.Time{}, + maxDeadline: time.Time{}, + expectOK: true, + }, + { + name: "deadline set when max_deadline is not set", + deadline: time.Now().Add(time.Hour), + maxDeadline: time.Time{}, + expectOK: true, + }, + { + name: "deadline before max_deadline", + deadline: time.Now().Add(-time.Hour), + maxDeadline: time.Now().Add(time.Hour), + expectOK: true, + }, + { + name: "deadline is max_deadline", + deadline: time.Now().Add(time.Hour), + maxDeadline: time.Now().Add(time.Hour), + expectOK: true, + }, + + { + name: "deadline after max_deadline", + deadline: time.Now().Add(time.Hour), + maxDeadline: time.Now().Add(-time.Hour), + expectOK: false, + }, + { + name: "deadline is not set when max_deadline is set", + deadline: time.Time{}, + maxDeadline: time.Now().Add(time.Hour), + expectOK: false, + }, + } + + for _, c := range cases { + err := db.UpdateWorkspaceBuildDeadlineByID(ctx, database.UpdateWorkspaceBuildDeadlineByIDParams{ + ID: workspaceBuild.ID, + Deadline: c.deadline, + MaxDeadline: c.maxDeadline, + UpdatedAt: time.Now(), + }) + if c.expectOK { + require.NoError(t, err) + } else { + require.Error(t, err) + require.True(t, database.IsCheckViolation(err, database.CheckWorkspaceBuildsDeadlineBelowMaxDeadline)) + } + } +} diff --git a/coderd/database/queries.sql.go b/coderd/database/queries.sql.go index 82ffd069b29f5..3a41cf63c1630 100644 --- a/coderd/database/queries.sql.go +++ b/coderd/database/queries.sql.go @@ -32,7 +32,7 @@ WITH latest AS ( -- be as if the workspace auto started at the given time and the -- original TTL was applied. -- - -- Sadly we can't define ` + "`" + `activity_bump_interval` + "`" + ` above since + -- Sadly we can't define 'activity_bump_interval' above since -- it won't be available for this CASE statement, so we have to -- copy the cast twice. WHEN NOW() + (templates.activity_bump / 1000 / 1000 / 1000 || ' seconds')::interval > $1 :: timestamptz @@ -62,7 +62,11 @@ WITH latest AS ( ON workspaces.id = workspace_builds.workspace_id JOIN templates ON templates.id = workspaces.template_id - WHERE workspace_builds.workspace_id = $2::uuid + WHERE + workspace_builds.workspace_id = $2::uuid + -- Prebuilt workspaces (identified by having the prebuilds system user as owner_id) + -- are managed by the reconciliation loop and not subject to activity bumping + AND workspaces.owner_id != 'c42fdf75-3097-471c-8c33-fb52454d81c0'::UUID ORDER BY workspace_builds.build_number DESC LIMIT 1 ) @@ -1711,7 +1715,7 @@ func (q *sqlQuerier) DeleteExternalAuthLink(ctx context.Context, arg DeleteExter } const getExternalAuthLink = `-- name: GetExternalAuthLink :one -SELECT provider_id, user_id, created_at, updated_at, oauth_access_token, oauth_refresh_token, oauth_expiry, oauth_access_token_key_id, oauth_refresh_token_key_id, oauth_extra FROM external_auth_links WHERE provider_id = $1 AND user_id = $2 +SELECT provider_id, user_id, created_at, updated_at, oauth_access_token, oauth_refresh_token, oauth_expiry, oauth_access_token_key_id, oauth_refresh_token_key_id, oauth_extra, oauth_refresh_failure_reason FROM external_auth_links WHERE provider_id = $1 AND user_id = $2 ` type GetExternalAuthLinkParams struct { @@ -1733,12 +1737,13 @@ func (q *sqlQuerier) GetExternalAuthLink(ctx context.Context, arg GetExternalAut &i.OAuthAccessTokenKeyID, &i.OAuthRefreshTokenKeyID, &i.OAuthExtra, + &i.OauthRefreshFailureReason, ) return i, err } const getExternalAuthLinksByUserID = `-- name: GetExternalAuthLinksByUserID :many -SELECT provider_id, user_id, created_at, updated_at, oauth_access_token, oauth_refresh_token, oauth_expiry, oauth_access_token_key_id, oauth_refresh_token_key_id, oauth_extra FROM external_auth_links WHERE user_id = $1 +SELECT provider_id, user_id, created_at, updated_at, oauth_access_token, oauth_refresh_token, oauth_expiry, oauth_access_token_key_id, oauth_refresh_token_key_id, oauth_extra, oauth_refresh_failure_reason FROM external_auth_links WHERE user_id = $1 ` func (q *sqlQuerier) GetExternalAuthLinksByUserID(ctx context.Context, userID uuid.UUID) ([]ExternalAuthLink, error) { @@ -1761,6 +1766,7 @@ func (q *sqlQuerier) GetExternalAuthLinksByUserID(ctx context.Context, userID uu &i.OAuthAccessTokenKeyID, &i.OAuthRefreshTokenKeyID, &i.OAuthExtra, + &i.OauthRefreshFailureReason, ); err != nil { return nil, err } @@ -1798,7 +1804,7 @@ INSERT INTO external_auth_links ( $8, $9, $10 -) RETURNING provider_id, user_id, created_at, updated_at, oauth_access_token, oauth_refresh_token, oauth_expiry, oauth_access_token_key_id, oauth_refresh_token_key_id, oauth_extra +) RETURNING provider_id, user_id, created_at, updated_at, oauth_access_token, oauth_refresh_token, oauth_expiry, oauth_access_token_key_id, oauth_refresh_token_key_id, oauth_extra, oauth_refresh_failure_reason ` type InsertExternalAuthLinkParams struct { @@ -1839,6 +1845,7 @@ func (q *sqlQuerier) InsertExternalAuthLink(ctx context.Context, arg InsertExter &i.OAuthAccessTokenKeyID, &i.OAuthRefreshTokenKeyID, &i.OAuthExtra, + &i.OauthRefreshFailureReason, ) return i, err } @@ -1851,8 +1858,12 @@ UPDATE external_auth_links SET oauth_refresh_token = $6, oauth_refresh_token_key_id = $7, oauth_expiry = $8, - oauth_extra = $9 -WHERE provider_id = $1 AND user_id = $2 RETURNING provider_id, user_id, created_at, updated_at, oauth_access_token, oauth_refresh_token, oauth_expiry, oauth_access_token_key_id, oauth_refresh_token_key_id, oauth_extra + oauth_extra = $9, + -- Only 'UpdateExternalAuthLinkRefreshToken' supports updating the oauth_refresh_failure_reason. + -- Any updates to the external auth link, will be assumed to change the state and clear + -- any cached errors. + oauth_refresh_failure_reason = '' +WHERE provider_id = $1 AND user_id = $2 RETURNING provider_id, user_id, created_at, updated_at, oauth_access_token, oauth_refresh_token, oauth_expiry, oauth_access_token_key_id, oauth_refresh_token_key_id, oauth_extra, oauth_refresh_failure_reason ` type UpdateExternalAuthLinkParams struct { @@ -1891,6 +1902,7 @@ func (q *sqlQuerier) UpdateExternalAuthLink(ctx context.Context, arg UpdateExter &i.OAuthAccessTokenKeyID, &i.OAuthRefreshTokenKeyID, &i.OAuthExtra, + &i.OauthRefreshFailureReason, ) return i, err } @@ -1899,27 +1911,32 @@ const updateExternalAuthLinkRefreshToken = `-- name: UpdateExternalAuthLinkRefre UPDATE external_auth_links SET - oauth_refresh_token = $1, - updated_at = $2 + -- oauth_refresh_failure_reason can be set to cache the failure reason + -- for subsequent refresh attempts. + oauth_refresh_failure_reason = $1, + oauth_refresh_token = $2, + updated_at = $3 WHERE - provider_id = $3 + provider_id = $4 AND - user_id = $4 + user_id = $5 AND -- Required for sqlc to generate a parameter for the oauth_refresh_token_key_id - $5 :: text = $5 :: text + $6 :: text = $6 :: text ` type UpdateExternalAuthLinkRefreshTokenParams struct { - OAuthRefreshToken string `db:"oauth_refresh_token" json:"oauth_refresh_token"` - UpdatedAt time.Time `db:"updated_at" json:"updated_at"` - ProviderID string `db:"provider_id" json:"provider_id"` - UserID uuid.UUID `db:"user_id" json:"user_id"` - OAuthRefreshTokenKeyID string `db:"oauth_refresh_token_key_id" json:"oauth_refresh_token_key_id"` + OauthRefreshFailureReason string `db:"oauth_refresh_failure_reason" json:"oauth_refresh_failure_reason"` + OAuthRefreshToken string `db:"oauth_refresh_token" json:"oauth_refresh_token"` + UpdatedAt time.Time `db:"updated_at" json:"updated_at"` + ProviderID string `db:"provider_id" json:"provider_id"` + UserID uuid.UUID `db:"user_id" json:"user_id"` + OAuthRefreshTokenKeyID string `db:"oauth_refresh_token_key_id" json:"oauth_refresh_token_key_id"` } func (q *sqlQuerier) UpdateExternalAuthLinkRefreshToken(ctx context.Context, arg UpdateExternalAuthLinkRefreshTokenParams) error { _, err := q.db.ExecContext(ctx, updateExternalAuthLinkRefreshToken, + arg.OauthRefreshFailureReason, arg.OAuthRefreshToken, arg.UpdatedAt, arg.ProviderID, @@ -2869,6 +2886,37 @@ func (q *sqlQuerier) UpdateGroupByID(ctx context.Context, arg UpdateGroupByIDPar return i, err } +const validateGroupIDs = `-- name: ValidateGroupIDs :one +WITH input AS ( + SELECT + unnest($1::uuid[]) AS id +) +SELECT + array_agg(input.id)::uuid[] as invalid_group_ids, + COUNT(*) = 0 as ok +FROM + -- Preserve rows where there is not a matching left (groups) row for each + -- right (input) row... + groups + RIGHT JOIN input ON groups.id = input.id +WHERE + -- ...so that we can retain exactly those rows where an input ID does not + -- match an existing group. + groups.id IS NULL +` + +type ValidateGroupIDsRow struct { + InvalidGroupIds []uuid.UUID `db:"invalid_group_ids" json:"invalid_group_ids"` + Ok bool `db:"ok" json:"ok"` +} + +func (q *sqlQuerier) ValidateGroupIDs(ctx context.Context, groupIds []uuid.UUID) (ValidateGroupIDsRow, error) { + row := q.db.QueryRowContext(ctx, validateGroupIDs, pq.Array(groupIds)) + var i ValidateGroupIDsRow + err := row.Scan(pq.Array(&i.InvalidGroupIds), &i.Ok) + return i, err +} + const getTemplateAppInsights = `-- name: GetTemplateAppInsights :many WITH -- Create a list of all unique apps by template, this is used to @@ -7091,7 +7139,20 @@ const claimPrebuiltWorkspace = `-- name: ClaimPrebuiltWorkspace :one UPDATE workspaces w SET owner_id = $1::uuid, name = $2::text, - updated_at = NOW() + updated_at = $3::timestamptz, + -- Update autostart_schedule, next_start_at and ttl according to template and workspace-level + -- configurations, allowing the workspace to be managed by the lifecycle executor as expected. + autostart_schedule = $4, + next_start_at = $5, + ttl = $6, + -- Update last_used_at during claim to ensure the claimed workspace is treated as recently used. + -- This avoids unintended dormancy caused by prebuilds having stale usage timestamps. + last_used_at = $3::timestamptz, + -- Clear dormant and deletion timestamps as a safeguard to ensure a clean lifecycle state after claim. + -- These fields should not be set on prebuilds, but we defensively reset them here to prevent + -- accidental dormancy or deletion by the lifecycle executor. + dormant_at = NULL, + deleting_at = NULL WHERE w.id IN ( SELECT p.id FROM workspace_prebuilds p @@ -7102,7 +7163,7 @@ WHERE w.id IN ( -- The prebuilds system should never try to claim a prebuild for an inactive template version. -- Nevertheless, this filter is here as a defensive measure: AND b.template_version_id = t.active_version_id - AND p.current_preset_id = $3::uuid + AND p.current_preset_id = $7::uuid AND p.ready AND NOT t.deleted LIMIT 1 FOR UPDATE OF p SKIP LOCKED -- Ensure that a concurrent request will not select the same prebuild. @@ -7111,9 +7172,13 @@ RETURNING w.id, w.name ` type ClaimPrebuiltWorkspaceParams struct { - NewUserID uuid.UUID `db:"new_user_id" json:"new_user_id"` - NewName string `db:"new_name" json:"new_name"` - PresetID uuid.UUID `db:"preset_id" json:"preset_id"` + NewUserID uuid.UUID `db:"new_user_id" json:"new_user_id"` + NewName string `db:"new_name" json:"new_name"` + Now time.Time `db:"now" json:"now"` + AutostartSchedule sql.NullString `db:"autostart_schedule" json:"autostart_schedule"` + NextStartAt sql.NullTime `db:"next_start_at" json:"next_start_at"` + WorkspaceTtl sql.NullInt64 `db:"workspace_ttl" json:"workspace_ttl"` + PresetID uuid.UUID `db:"preset_id" json:"preset_id"` } type ClaimPrebuiltWorkspaceRow struct { @@ -7122,7 +7187,15 @@ type ClaimPrebuiltWorkspaceRow struct { } func (q *sqlQuerier) ClaimPrebuiltWorkspace(ctx context.Context, arg ClaimPrebuiltWorkspaceParams) (ClaimPrebuiltWorkspaceRow, error) { - row := q.db.QueryRowContext(ctx, claimPrebuiltWorkspace, arg.NewUserID, arg.NewName, arg.PresetID) + row := q.db.QueryRowContext(ctx, claimPrebuiltWorkspace, + arg.NewUserID, + arg.NewName, + arg.Now, + arg.AutostartSchedule, + arg.NextStartAt, + arg.WorkspaceTtl, + arg.PresetID, + ) var i ClaimPrebuiltWorkspaceRow err := row.Scan(&i.ID, &i.Name) return i, err @@ -7183,6 +7256,47 @@ func (q *sqlQuerier) CountInProgressPrebuilds(ctx context.Context) ([]CountInPro return items, nil } +const findMatchingPresetID = `-- name: FindMatchingPresetID :one +WITH provided_params AS ( + SELECT + unnest($1::text[]) AS name, + unnest($2::text[]) AS value +), +preset_matches AS ( + SELECT + tvp.id AS template_version_preset_id, + COALESCE(COUNT(tvpp.name), 0) AS total_preset_params, + COALESCE(COUNT(pp.name), 0) AS matching_params + FROM template_version_presets tvp + LEFT JOIN template_version_preset_parameters tvpp ON tvpp.template_version_preset_id = tvp.id + LEFT JOIN provided_params pp ON pp.name = tvpp.name AND pp.value = tvpp.value + WHERE tvp.template_version_id = $3 + GROUP BY tvp.id +) +SELECT pm.template_version_preset_id +FROM preset_matches pm +WHERE pm.total_preset_params = pm.matching_params -- All preset parameters must match +ORDER BY pm.total_preset_params DESC -- Return the preset with the most parameters +LIMIT 1 +` + +type FindMatchingPresetIDParams struct { + ParameterNames []string `db:"parameter_names" json:"parameter_names"` + ParameterValues []string `db:"parameter_values" json:"parameter_values"` + TemplateVersionID uuid.UUID `db:"template_version_id" json:"template_version_id"` +} + +// FindMatchingPresetID finds a preset ID that is the largest exact subset of the provided parameters. +// It returns the preset ID if a match is found, or NULL if no match is found. +// The query finds presets where all preset parameters are present in the provided parameters, +// and returns the preset with the most parameters (largest subset). +func (q *sqlQuerier) FindMatchingPresetID(ctx context.Context, arg FindMatchingPresetIDParams) (uuid.UUID, error) { + row := q.db.QueryRowContext(ctx, findMatchingPresetID, pq.Array(arg.ParameterNames), pq.Array(arg.ParameterValues), arg.TemplateVersionID) + var template_version_preset_id uuid.UUID + err := row.Scan(&template_version_preset_id) + return template_version_preset_id, err +} + const getPrebuildMetrics = `-- name: GetPrebuildMetrics :many SELECT t.name as template_name, @@ -7628,7 +7742,7 @@ func (q *sqlQuerier) GetActivePresetPrebuildSchedules(ctx context.Context) ([]Te } const getPresetByID = `-- name: GetPresetByID :one -SELECT tvp.id, tvp.template_version_id, tvp.name, tvp.created_at, tvp.desired_instances, tvp.invalidate_after_secs, tvp.prebuild_status, tvp.scheduling_timezone, tvp.is_default, tv.template_id, tv.organization_id FROM +SELECT tvp.id, tvp.template_version_id, tvp.name, tvp.created_at, tvp.desired_instances, tvp.invalidate_after_secs, tvp.prebuild_status, tvp.scheduling_timezone, tvp.is_default, tvp.description, tvp.icon, tv.template_id, tv.organization_id FROM template_version_presets tvp INNER JOIN template_versions tv ON tvp.template_version_id = tv.id WHERE tvp.id = $1 @@ -7644,6 +7758,8 @@ type GetPresetByIDRow struct { PrebuildStatus PrebuildStatus `db:"prebuild_status" json:"prebuild_status"` SchedulingTimezone string `db:"scheduling_timezone" json:"scheduling_timezone"` IsDefault bool `db:"is_default" json:"is_default"` + Description string `db:"description" json:"description"` + Icon string `db:"icon" json:"icon"` TemplateID uuid.NullUUID `db:"template_id" json:"template_id"` OrganizationID uuid.UUID `db:"organization_id" json:"organization_id"` } @@ -7661,6 +7777,8 @@ func (q *sqlQuerier) GetPresetByID(ctx context.Context, presetID uuid.UUID) (Get &i.PrebuildStatus, &i.SchedulingTimezone, &i.IsDefault, + &i.Description, + &i.Icon, &i.TemplateID, &i.OrganizationID, ) @@ -7669,7 +7787,7 @@ func (q *sqlQuerier) GetPresetByID(ctx context.Context, presetID uuid.UUID) (Get const getPresetByWorkspaceBuildID = `-- name: GetPresetByWorkspaceBuildID :one SELECT - template_version_presets.id, template_version_presets.template_version_id, template_version_presets.name, template_version_presets.created_at, template_version_presets.desired_instances, template_version_presets.invalidate_after_secs, template_version_presets.prebuild_status, template_version_presets.scheduling_timezone, template_version_presets.is_default + template_version_presets.id, template_version_presets.template_version_id, template_version_presets.name, template_version_presets.created_at, template_version_presets.desired_instances, template_version_presets.invalidate_after_secs, template_version_presets.prebuild_status, template_version_presets.scheduling_timezone, template_version_presets.is_default, template_version_presets.description, template_version_presets.icon FROM template_version_presets INNER JOIN workspace_builds ON workspace_builds.template_version_preset_id = template_version_presets.id @@ -7690,6 +7808,8 @@ func (q *sqlQuerier) GetPresetByWorkspaceBuildID(ctx context.Context, workspaceB &i.PrebuildStatus, &i.SchedulingTimezone, &i.IsDefault, + &i.Description, + &i.Icon, ) return i, err } @@ -7771,7 +7891,7 @@ func (q *sqlQuerier) GetPresetParametersByTemplateVersionID(ctx context.Context, const getPresetsByTemplateVersionID = `-- name: GetPresetsByTemplateVersionID :many SELECT - id, template_version_id, name, created_at, desired_instances, invalidate_after_secs, prebuild_status, scheduling_timezone, is_default + id, template_version_id, name, created_at, desired_instances, invalidate_after_secs, prebuild_status, scheduling_timezone, is_default, description, icon FROM template_version_presets WHERE @@ -7797,6 +7917,8 @@ func (q *sqlQuerier) GetPresetsByTemplateVersionID(ctx context.Context, template &i.PrebuildStatus, &i.SchedulingTimezone, &i.IsDefault, + &i.Description, + &i.Icon, ); err != nil { return nil, err } @@ -7820,7 +7942,9 @@ INSERT INTO template_version_presets ( desired_instances, invalidate_after_secs, scheduling_timezone, - is_default + is_default, + description, + icon ) VALUES ( $1, @@ -7830,8 +7954,10 @@ VALUES ( $5, $6, $7, - $8 -) RETURNING id, template_version_id, name, created_at, desired_instances, invalidate_after_secs, prebuild_status, scheduling_timezone, is_default + $8, + $9, + $10 +) RETURNING id, template_version_id, name, created_at, desired_instances, invalidate_after_secs, prebuild_status, scheduling_timezone, is_default, description, icon ` type InsertPresetParams struct { @@ -7843,6 +7969,8 @@ type InsertPresetParams struct { InvalidateAfterSecs sql.NullInt32 `db:"invalidate_after_secs" json:"invalidate_after_secs"` SchedulingTimezone string `db:"scheduling_timezone" json:"scheduling_timezone"` IsDefault bool `db:"is_default" json:"is_default"` + Description string `db:"description" json:"description"` + Icon string `db:"icon" json:"icon"` } func (q *sqlQuerier) InsertPreset(ctx context.Context, arg InsertPresetParams) (TemplateVersionPreset, error) { @@ -7855,6 +7983,8 @@ func (q *sqlQuerier) InsertPreset(ctx context.Context, arg InsertPresetParams) ( arg.InvalidateAfterSecs, arg.SchedulingTimezone, arg.IsDefault, + arg.Description, + arg.Icon, ) var i TemplateVersionPreset err := row.Scan( @@ -7867,6 +7997,8 @@ func (q *sqlQuerier) InsertPreset(ctx context.Context, arg InsertPresetParams) ( &i.PrebuildStatus, &i.SchedulingTimezone, &i.IsDefault, + &i.Description, + &i.Icon, ) return i, err } @@ -8131,13 +8263,13 @@ const getProvisionerDaemonsWithStatusByOrganization = `-- name: GetProvisionerDa SELECT pd.id, pd.created_at, pd.name, pd.provisioners, pd.replica_id, pd.tags, pd.last_seen_at, pd.version, pd.api_version, pd.organization_id, pd.key_id, CASE - WHEN pd.last_seen_at IS NULL OR pd.last_seen_at < (NOW() - ($1::bigint || ' ms')::interval) - THEN 'offline' - ELSE CASE - WHEN current_job.id IS NOT NULL THEN 'busy' - ELSE 'idle' - END - END::provisioner_daemon_status AS status, + WHEN current_job.id IS NOT NULL THEN 'busy'::provisioner_daemon_status + WHEN (COALESCE($1::bool, false) = true + OR 'offline'::provisioner_daemon_status = ANY($2::provisioner_daemon_status[])) + AND (pd.last_seen_at IS NULL OR pd.last_seen_at < (NOW() - ($3::bigint || ' ms')::interval)) + THEN 'offline'::provisioner_daemon_status + ELSE 'idle'::provisioner_daemon_status + END AS status, pk.name AS key_name, -- NOTE(mafredri): sqlc.embed doesn't support nullable tables nor renaming them. current_job.id AS current_job_id, @@ -8204,21 +8336,56 @@ LEFT JOIN AND previous_template.organization_id = pd.organization_id ) WHERE - pd.organization_id = $2::uuid - AND (COALESCE(array_length($3::uuid[], 1), 0) = 0 OR pd.id = ANY($3::uuid[])) - AND ($4::tagset = 'null'::tagset OR provisioner_tagset_contains(pd.tags::tagset, $4::tagset)) + pd.organization_id = $4::uuid + AND (COALESCE(array_length($5::uuid[], 1), 0) = 0 OR pd.id = ANY($5::uuid[])) + AND ($6::tagset = 'null'::tagset OR provisioner_tagset_contains(pd.tags::tagset, $6::tagset)) + -- Filter by max age if provided + AND ( + $7::bigint IS NULL + OR pd.last_seen_at IS NULL + OR pd.last_seen_at >= (NOW() - ($7::bigint || ' ms')::interval) + ) + AND ( + -- Always include online daemons + (pd.last_seen_at IS NOT NULL AND pd.last_seen_at >= (NOW() - ($3::bigint || ' ms')::interval)) + -- Include offline daemons if offline param is true or 'offline' status is requested + OR ( + (pd.last_seen_at IS NULL OR pd.last_seen_at < (NOW() - ($3::bigint || ' ms')::interval)) + AND ( + COALESCE($1::bool, false) = true + OR 'offline'::provisioner_daemon_status = ANY($2::provisioner_daemon_status[]) + ) + ) + ) + AND ( + -- Filter daemons by any statuses if provided + COALESCE(array_length($2::provisioner_daemon_status[], 1), 0) = 0 + OR (current_job.id IS NOT NULL AND 'busy'::provisioner_daemon_status = ANY($2::provisioner_daemon_status[])) + OR (current_job.id IS NULL AND 'idle'::provisioner_daemon_status = ANY($2::provisioner_daemon_status[])) + OR ( + 'offline'::provisioner_daemon_status = ANY($2::provisioner_daemon_status[]) + AND (pd.last_seen_at IS NULL OR pd.last_seen_at < (NOW() - ($3::bigint || ' ms')::interval)) + ) + OR ( + COALESCE($1::bool, false) = true + AND (pd.last_seen_at IS NULL OR pd.last_seen_at < (NOW() - ($3::bigint || ' ms')::interval)) + ) + ) ORDER BY pd.created_at DESC LIMIT - $5::int + $8::int ` type GetProvisionerDaemonsWithStatusByOrganizationParams struct { - StaleIntervalMS int64 `db:"stale_interval_ms" json:"stale_interval_ms"` - OrganizationID uuid.UUID `db:"organization_id" json:"organization_id"` - IDs []uuid.UUID `db:"ids" json:"ids"` - Tags StringMap `db:"tags" json:"tags"` - Limit sql.NullInt32 `db:"limit" json:"limit"` + Offline sql.NullBool `db:"offline" json:"offline"` + Statuses []ProvisionerDaemonStatus `db:"statuses" json:"statuses"` + StaleIntervalMS int64 `db:"stale_interval_ms" json:"stale_interval_ms"` + OrganizationID uuid.UUID `db:"organization_id" json:"organization_id"` + IDs []uuid.UUID `db:"ids" json:"ids"` + Tags StringMap `db:"tags" json:"tags"` + MaxAgeMs sql.NullInt64 `db:"max_age_ms" json:"max_age_ms"` + Limit sql.NullInt32 `db:"limit" json:"limit"` } type GetProvisionerDaemonsWithStatusByOrganizationRow struct { @@ -8241,10 +8408,13 @@ type GetProvisionerDaemonsWithStatusByOrganizationRow struct { // Previous job information. func (q *sqlQuerier) GetProvisionerDaemonsWithStatusByOrganization(ctx context.Context, arg GetProvisionerDaemonsWithStatusByOrganizationParams) ([]GetProvisionerDaemonsWithStatusByOrganizationRow, error) { rows, err := q.db.QueryContext(ctx, getProvisionerDaemonsWithStatusByOrganization, + arg.Offline, + pq.Array(arg.Statuses), arg.StaleIntervalMS, arg.OrganizationID, pq.Array(arg.IDs), arg.Tags, + arg.MaxAgeMs, arg.Limit, ) if err != nil { @@ -8496,6 +8666,44 @@ func (q *sqlQuerier) InsertProvisionerJobLogs(ctx context.Context, arg InsertPro return items, nil } +const updateProvisionerJobLogsLength = `-- name: UpdateProvisionerJobLogsLength :exec +UPDATE + provisioner_jobs +SET + logs_length = logs_length + $2 +WHERE + id = $1 +` + +type UpdateProvisionerJobLogsLengthParams struct { + ID uuid.UUID `db:"id" json:"id"` + LogsLength int32 `db:"logs_length" json:"logs_length"` +} + +func (q *sqlQuerier) UpdateProvisionerJobLogsLength(ctx context.Context, arg UpdateProvisionerJobLogsLengthParams) error { + _, err := q.db.ExecContext(ctx, updateProvisionerJobLogsLength, arg.ID, arg.LogsLength) + return err +} + +const updateProvisionerJobLogsOverflowed = `-- name: UpdateProvisionerJobLogsOverflowed :exec +UPDATE + provisioner_jobs +SET + logs_overflowed = $2 +WHERE + id = $1 +` + +type UpdateProvisionerJobLogsOverflowedParams struct { + ID uuid.UUID `db:"id" json:"id"` + LogsOverflowed bool `db:"logs_overflowed" json:"logs_overflowed"` +} + +func (q *sqlQuerier) UpdateProvisionerJobLogsOverflowed(ctx context.Context, arg UpdateProvisionerJobLogsOverflowedParams) error { + _, err := q.db.ExecContext(ctx, updateProvisionerJobLogsOverflowed, arg.ID, arg.LogsOverflowed) + return err +} + const acquireProvisionerJob = `-- name: AcquireProvisionerJob :one UPDATE provisioner_jobs @@ -8525,7 +8733,7 @@ WHERE SKIP LOCKED LIMIT 1 - ) RETURNING id, created_at, updated_at, started_at, canceled_at, completed_at, error, organization_id, initiator_id, provisioner, storage_method, type, input, worker_id, file_id, tags, error_code, trace_metadata, job_status + ) RETURNING id, created_at, updated_at, started_at, canceled_at, completed_at, error, organization_id, initiator_id, provisioner, storage_method, type, input, worker_id, file_id, tags, error_code, trace_metadata, job_status, logs_length, logs_overflowed ` type AcquireProvisionerJobParams struct { @@ -8571,13 +8779,15 @@ func (q *sqlQuerier) AcquireProvisionerJob(ctx context.Context, arg AcquireProvi &i.ErrorCode, &i.TraceMetadata, &i.JobStatus, + &i.LogsLength, + &i.LogsOverflowed, ) return i, err } const getProvisionerJobByID = `-- name: GetProvisionerJobByID :one SELECT - id, created_at, updated_at, started_at, canceled_at, completed_at, error, organization_id, initiator_id, provisioner, storage_method, type, input, worker_id, file_id, tags, error_code, trace_metadata, job_status + id, created_at, updated_at, started_at, canceled_at, completed_at, error, organization_id, initiator_id, provisioner, storage_method, type, input, worker_id, file_id, tags, error_code, trace_metadata, job_status, logs_length, logs_overflowed FROM provisioner_jobs WHERE @@ -8607,13 +8817,15 @@ func (q *sqlQuerier) GetProvisionerJobByID(ctx context.Context, id uuid.UUID) (P &i.ErrorCode, &i.TraceMetadata, &i.JobStatus, + &i.LogsLength, + &i.LogsOverflowed, ) return i, err } const getProvisionerJobByIDForUpdate = `-- name: GetProvisionerJobByIDForUpdate :one SELECT - id, created_at, updated_at, started_at, canceled_at, completed_at, error, organization_id, initiator_id, provisioner, storage_method, type, input, worker_id, file_id, tags, error_code, trace_metadata, job_status + id, created_at, updated_at, started_at, canceled_at, completed_at, error, organization_id, initiator_id, provisioner, storage_method, type, input, worker_id, file_id, tags, error_code, trace_metadata, job_status, logs_length, logs_overflowed FROM provisioner_jobs WHERE @@ -8647,6 +8859,8 @@ func (q *sqlQuerier) GetProvisionerJobByIDForUpdate(ctx context.Context, id uuid &i.ErrorCode, &i.TraceMetadata, &i.JobStatus, + &i.LogsLength, + &i.LogsOverflowed, ) return i, err } @@ -8690,7 +8904,7 @@ func (q *sqlQuerier) GetProvisionerJobTimingsByJobID(ctx context.Context, jobID const getProvisionerJobsByIDs = `-- name: GetProvisionerJobsByIDs :many SELECT - id, created_at, updated_at, started_at, canceled_at, completed_at, error, organization_id, initiator_id, provisioner, storage_method, type, input, worker_id, file_id, tags, error_code, trace_metadata, job_status + id, created_at, updated_at, started_at, canceled_at, completed_at, error, organization_id, initiator_id, provisioner, storage_method, type, input, worker_id, file_id, tags, error_code, trace_metadata, job_status, logs_length, logs_overflowed FROM provisioner_jobs WHERE @@ -8726,6 +8940,8 @@ func (q *sqlQuerier) GetProvisionerJobsByIDs(ctx context.Context, ids []uuid.UUI &i.ErrorCode, &i.TraceMetadata, &i.JobStatus, + &i.LogsLength, + &i.LogsOverflowed, ); err != nil { return nil, err } @@ -8793,7 +9009,7 @@ SELECT -- Step 5: Final SELECT with INNER JOIN provisioner_jobs fj.id, fj.created_at, - pj.id, pj.created_at, pj.updated_at, pj.started_at, pj.canceled_at, pj.completed_at, pj.error, pj.organization_id, pj.initiator_id, pj.provisioner, pj.storage_method, pj.type, pj.input, pj.worker_id, pj.file_id, pj.tags, pj.error_code, pj.trace_metadata, pj.job_status, + pj.id, pj.created_at, pj.updated_at, pj.started_at, pj.canceled_at, pj.completed_at, pj.error, pj.organization_id, pj.initiator_id, pj.provisioner, pj.storage_method, pj.type, pj.input, pj.worker_id, pj.file_id, pj.tags, pj.error_code, pj.trace_metadata, pj.job_status, pj.logs_length, pj.logs_overflowed, fj.queue_position, fj.queue_size FROM @@ -8849,6 +9065,8 @@ func (q *sqlQuerier) GetProvisionerJobsByIDsWithQueuePosition(ctx context.Contex &i.ProvisionerJob.ErrorCode, &i.ProvisionerJob.TraceMetadata, &i.ProvisionerJob.JobStatus, + &i.ProvisionerJob.LogsLength, + &i.ProvisionerJob.LogsOverflowed, &i.QueuePosition, &i.QueueSize, ); err != nil { @@ -8891,7 +9109,7 @@ queue_size AS ( SELECT COUNT(*) AS count FROM pending_jobs ) SELECT - pj.id, pj.created_at, pj.updated_at, pj.started_at, pj.canceled_at, pj.completed_at, pj.error, pj.organization_id, pj.initiator_id, pj.provisioner, pj.storage_method, pj.type, pj.input, pj.worker_id, pj.file_id, pj.tags, pj.error_code, pj.trace_metadata, pj.job_status, + pj.id, pj.created_at, pj.updated_at, pj.started_at, pj.canceled_at, pj.completed_at, pj.error, pj.organization_id, pj.initiator_id, pj.provisioner, pj.storage_method, pj.type, pj.input, pj.worker_id, pj.file_id, pj.tags, pj.error_code, pj.trace_metadata, pj.job_status, pj.logs_length, pj.logs_overflowed, COALESCE(qp.queue_position, 0) AS queue_position, COALESCE(qs.count, 0) AS queue_size, -- Use subquery to utilize ORDER BY in array_agg since it cannot be @@ -9027,6 +9245,8 @@ func (q *sqlQuerier) GetProvisionerJobsByOrganizationAndStatusWithQueuePositionA &i.ProvisionerJob.ErrorCode, &i.ProvisionerJob.TraceMetadata, &i.ProvisionerJob.JobStatus, + &i.ProvisionerJob.LogsLength, + &i.ProvisionerJob.LogsOverflowed, &i.QueuePosition, &i.QueueSize, pq.Array(&i.AvailableWorkers), @@ -9053,7 +9273,7 @@ func (q *sqlQuerier) GetProvisionerJobsByOrganizationAndStatusWithQueuePositionA } const getProvisionerJobsCreatedAfter = `-- name: GetProvisionerJobsCreatedAfter :many -SELECT id, created_at, updated_at, started_at, canceled_at, completed_at, error, organization_id, initiator_id, provisioner, storage_method, type, input, worker_id, file_id, tags, error_code, trace_metadata, job_status FROM provisioner_jobs WHERE created_at > $1 +SELECT id, created_at, updated_at, started_at, canceled_at, completed_at, error, organization_id, initiator_id, provisioner, storage_method, type, input, worker_id, file_id, tags, error_code, trace_metadata, job_status, logs_length, logs_overflowed FROM provisioner_jobs WHERE created_at > $1 ` func (q *sqlQuerier) GetProvisionerJobsCreatedAfter(ctx context.Context, createdAt time.Time) ([]ProvisionerJob, error) { @@ -9085,6 +9305,8 @@ func (q *sqlQuerier) GetProvisionerJobsCreatedAfter(ctx context.Context, created &i.ErrorCode, &i.TraceMetadata, &i.JobStatus, + &i.LogsLength, + &i.LogsOverflowed, ); err != nil { return nil, err } @@ -9101,7 +9323,7 @@ func (q *sqlQuerier) GetProvisionerJobsCreatedAfter(ctx context.Context, created const getProvisionerJobsToBeReaped = `-- name: GetProvisionerJobsToBeReaped :many SELECT - id, created_at, updated_at, started_at, canceled_at, completed_at, error, organization_id, initiator_id, provisioner, storage_method, type, input, worker_id, file_id, tags, error_code, trace_metadata, job_status + id, created_at, updated_at, started_at, canceled_at, completed_at, error, organization_id, initiator_id, provisioner, storage_method, type, input, worker_id, file_id, tags, error_code, trace_metadata, job_status, logs_length, logs_overflowed FROM provisioner_jobs WHERE @@ -9158,6 +9380,8 @@ func (q *sqlQuerier) GetProvisionerJobsToBeReaped(ctx context.Context, arg GetPr &i.ErrorCode, &i.TraceMetadata, &i.JobStatus, + &i.LogsLength, + &i.LogsOverflowed, ); err != nil { return nil, err } @@ -9186,10 +9410,11 @@ INSERT INTO "type", "input", tags, - trace_metadata + trace_metadata, + logs_overflowed ) VALUES - ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12) RETURNING id, created_at, updated_at, started_at, canceled_at, completed_at, error, organization_id, initiator_id, provisioner, storage_method, type, input, worker_id, file_id, tags, error_code, trace_metadata, job_status + ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13) RETURNING id, created_at, updated_at, started_at, canceled_at, completed_at, error, organization_id, initiator_id, provisioner, storage_method, type, input, worker_id, file_id, tags, error_code, trace_metadata, job_status, logs_length, logs_overflowed ` type InsertProvisionerJobParams struct { @@ -9205,6 +9430,7 @@ type InsertProvisionerJobParams struct { Input json.RawMessage `db:"input" json:"input"` Tags StringMap `db:"tags" json:"tags"` TraceMetadata pqtype.NullRawMessage `db:"trace_metadata" json:"trace_metadata"` + LogsOverflowed bool `db:"logs_overflowed" json:"logs_overflowed"` } func (q *sqlQuerier) InsertProvisionerJob(ctx context.Context, arg InsertProvisionerJobParams) (ProvisionerJob, error) { @@ -9221,6 +9447,7 @@ func (q *sqlQuerier) InsertProvisionerJob(ctx context.Context, arg InsertProvisi arg.Input, arg.Tags, arg.TraceMetadata, + arg.LogsOverflowed, ) var i ProvisionerJob err := row.Scan( @@ -9243,6 +9470,8 @@ func (q *sqlQuerier) InsertProvisionerJob(ctx context.Context, arg InsertProvisi &i.ErrorCode, &i.TraceMetadata, &i.JobStatus, + &i.LogsLength, + &i.LogsOverflowed, ) return i, err } @@ -11294,15 +11523,17 @@ func (q *sqlQuerier) GetTailnetPeers(ctx context.Context, id uuid.UUID) ([]Tailn } const getTailnetTunnelPeerBindings = `-- name: GetTailnetTunnelPeerBindings :many -SELECT tailnet_tunnels.dst_id as peer_id, tailnet_peers.coordinator_id, tailnet_peers.updated_at, tailnet_peers.node, tailnet_peers.status -FROM tailnet_tunnels -INNER JOIN tailnet_peers ON tailnet_tunnels.dst_id = tailnet_peers.id -WHERE tailnet_tunnels.src_id = $1 -UNION -SELECT tailnet_tunnels.src_id as peer_id, tailnet_peers.coordinator_id, tailnet_peers.updated_at, tailnet_peers.node, tailnet_peers.status -FROM tailnet_tunnels -INNER JOIN tailnet_peers ON tailnet_tunnels.src_id = tailnet_peers.id -WHERE tailnet_tunnels.dst_id = $1 +SELECT id AS peer_id, coordinator_id, updated_at, node, status +FROM tailnet_peers +WHERE id IN ( + SELECT dst_id as peer_id + FROM tailnet_tunnels + WHERE tailnet_tunnels.src_id = $1 + UNION + SELECT src_id as peer_id + FROM tailnet_tunnels + WHERE tailnet_tunnels.dst_id = $1 +) ` type GetTailnetTunnelPeerBindingsRow struct { @@ -11382,7 +11613,7 @@ func (q *sqlQuerier) GetTailnetTunnelPeerIDs(ctx context.Context, srcID uuid.UUI } const updateTailnetPeerStatusByCoordinator = `-- name: UpdateTailnetPeerStatusByCoordinator :exec -UPDATE +UPDATE tailnet_peers SET status = $2 @@ -11750,7 +11981,7 @@ func (q *sqlQuerier) GetTemplateAverageBuildTime(ctx context.Context, arg GetTem const getTemplateByID = `-- name: GetTemplateByID :one SELECT - id, created_at, updated_at, organization_id, deleted, name, provisioner, active_version_id, description, default_ttl, created_by, icon, user_acl, group_acl, display_name, allow_user_cancel_workspace_jobs, allow_user_autostart, allow_user_autostop, failure_ttl, time_til_dormant, time_til_dormant_autodelete, autostop_requirement_days_of_week, autostop_requirement_weeks, autostart_block_days_of_week, require_active_version, deprecated, activity_bump, max_port_sharing_level, use_classic_parameter_flow, created_by_avatar_url, created_by_username, created_by_name, organization_name, organization_display_name, organization_icon + id, created_at, updated_at, organization_id, deleted, name, provisioner, active_version_id, description, default_ttl, created_by, icon, user_acl, group_acl, display_name, allow_user_cancel_workspace_jobs, allow_user_autostart, allow_user_autostop, failure_ttl, time_til_dormant, time_til_dormant_autodelete, autostop_requirement_days_of_week, autostop_requirement_weeks, autostart_block_days_of_week, require_active_version, deprecated, activity_bump, max_port_sharing_level, use_classic_parameter_flow, cors_behavior, created_by_avatar_url, created_by_username, created_by_name, organization_name, organization_display_name, organization_icon FROM template_with_names WHERE @@ -11792,6 +12023,7 @@ func (q *sqlQuerier) GetTemplateByID(ctx context.Context, id uuid.UUID) (Templat &i.ActivityBump, &i.MaxPortSharingLevel, &i.UseClassicParameterFlow, + &i.CorsBehavior, &i.CreatedByAvatarURL, &i.CreatedByUsername, &i.CreatedByName, @@ -11804,7 +12036,7 @@ func (q *sqlQuerier) GetTemplateByID(ctx context.Context, id uuid.UUID) (Templat const getTemplateByOrganizationAndName = `-- name: GetTemplateByOrganizationAndName :one SELECT - id, created_at, updated_at, organization_id, deleted, name, provisioner, active_version_id, description, default_ttl, created_by, icon, user_acl, group_acl, display_name, allow_user_cancel_workspace_jobs, allow_user_autostart, allow_user_autostop, failure_ttl, time_til_dormant, time_til_dormant_autodelete, autostop_requirement_days_of_week, autostop_requirement_weeks, autostart_block_days_of_week, require_active_version, deprecated, activity_bump, max_port_sharing_level, use_classic_parameter_flow, created_by_avatar_url, created_by_username, created_by_name, organization_name, organization_display_name, organization_icon + id, created_at, updated_at, organization_id, deleted, name, provisioner, active_version_id, description, default_ttl, created_by, icon, user_acl, group_acl, display_name, allow_user_cancel_workspace_jobs, allow_user_autostart, allow_user_autostop, failure_ttl, time_til_dormant, time_til_dormant_autodelete, autostop_requirement_days_of_week, autostop_requirement_weeks, autostart_block_days_of_week, require_active_version, deprecated, activity_bump, max_port_sharing_level, use_classic_parameter_flow, cors_behavior, created_by_avatar_url, created_by_username, created_by_name, organization_name, organization_display_name, organization_icon FROM template_with_names AS templates WHERE @@ -11854,6 +12086,7 @@ func (q *sqlQuerier) GetTemplateByOrganizationAndName(ctx context.Context, arg G &i.ActivityBump, &i.MaxPortSharingLevel, &i.UseClassicParameterFlow, + &i.CorsBehavior, &i.CreatedByAvatarURL, &i.CreatedByUsername, &i.CreatedByName, @@ -11865,7 +12098,7 @@ func (q *sqlQuerier) GetTemplateByOrganizationAndName(ctx context.Context, arg G } const getTemplates = `-- name: GetTemplates :many -SELECT id, created_at, updated_at, organization_id, deleted, name, provisioner, active_version_id, description, default_ttl, created_by, icon, user_acl, group_acl, display_name, allow_user_cancel_workspace_jobs, allow_user_autostart, allow_user_autostop, failure_ttl, time_til_dormant, time_til_dormant_autodelete, autostop_requirement_days_of_week, autostop_requirement_weeks, autostart_block_days_of_week, require_active_version, deprecated, activity_bump, max_port_sharing_level, use_classic_parameter_flow, created_by_avatar_url, created_by_username, created_by_name, organization_name, organization_display_name, organization_icon FROM template_with_names AS templates +SELECT id, created_at, updated_at, organization_id, deleted, name, provisioner, active_version_id, description, default_ttl, created_by, icon, user_acl, group_acl, display_name, allow_user_cancel_workspace_jobs, allow_user_autostart, allow_user_autostop, failure_ttl, time_til_dormant, time_til_dormant_autodelete, autostop_requirement_days_of_week, autostop_requirement_weeks, autostart_block_days_of_week, require_active_version, deprecated, activity_bump, max_port_sharing_level, use_classic_parameter_flow, cors_behavior, created_by_avatar_url, created_by_username, created_by_name, organization_name, organization_display_name, organization_icon FROM template_with_names AS templates ORDER BY (name, id) ASC ` @@ -11908,6 +12141,7 @@ func (q *sqlQuerier) GetTemplates(ctx context.Context) ([]Template, error) { &i.ActivityBump, &i.MaxPortSharingLevel, &i.UseClassicParameterFlow, + &i.CorsBehavior, &i.CreatedByAvatarURL, &i.CreatedByUsername, &i.CreatedByName, @@ -11930,7 +12164,7 @@ func (q *sqlQuerier) GetTemplates(ctx context.Context) ([]Template, error) { const getTemplatesWithFilter = `-- name: GetTemplatesWithFilter :many SELECT - t.id, t.created_at, t.updated_at, t.organization_id, t.deleted, t.name, t.provisioner, t.active_version_id, t.description, t.default_ttl, t.created_by, t.icon, t.user_acl, t.group_acl, t.display_name, t.allow_user_cancel_workspace_jobs, t.allow_user_autostart, t.allow_user_autostop, t.failure_ttl, t.time_til_dormant, t.time_til_dormant_autodelete, t.autostop_requirement_days_of_week, t.autostop_requirement_weeks, t.autostart_block_days_of_week, t.require_active_version, t.deprecated, t.activity_bump, t.max_port_sharing_level, t.use_classic_parameter_flow, t.created_by_avatar_url, t.created_by_username, t.created_by_name, t.organization_name, t.organization_display_name, t.organization_icon + t.id, t.created_at, t.updated_at, t.organization_id, t.deleted, t.name, t.provisioner, t.active_version_id, t.description, t.default_ttl, t.created_by, t.icon, t.user_acl, t.group_acl, t.display_name, t.allow_user_cancel_workspace_jobs, t.allow_user_autostart, t.allow_user_autostop, t.failure_ttl, t.time_til_dormant, t.time_til_dormant_autodelete, t.autostop_requirement_days_of_week, t.autostop_requirement_weeks, t.autostart_block_days_of_week, t.require_active_version, t.deprecated, t.activity_bump, t.max_port_sharing_level, t.use_classic_parameter_flow, t.cors_behavior, t.created_by_avatar_url, t.created_by_username, t.created_by_name, t.organization_name, t.organization_display_name, t.organization_icon FROM template_with_names AS t LEFT JOIN @@ -11979,19 +12213,41 @@ WHERE tv.has_ai_task = $7 :: boolean ELSE true END + -- Filter by author_id + AND CASE + WHEN $8 :: uuid != '00000000-0000-0000-0000-000000000000'::uuid THEN + t.created_by = $8 + ELSE true + END + -- Filter by author_username + AND CASE + WHEN $9 :: text != '' THEN + t.created_by = (SELECT id FROM users WHERE lower(users.username) = lower($9) AND deleted = false) + ELSE true + END + + -- Filter by has_external_agent in latest version + AND CASE + WHEN $10 :: boolean IS NOT NULL THEN + tv.has_external_agent = $10 :: boolean + ELSE true + END -- Authorize Filter clause will be injected below in GetAuthorizedTemplates -- @authorize_filter ORDER BY (t.name, t.id) ASC ` type GetTemplatesWithFilterParams struct { - Deleted bool `db:"deleted" json:"deleted"` - OrganizationID uuid.UUID `db:"organization_id" json:"organization_id"` - ExactName string `db:"exact_name" json:"exact_name"` - FuzzyName string `db:"fuzzy_name" json:"fuzzy_name"` - IDs []uuid.UUID `db:"ids" json:"ids"` - Deprecated sql.NullBool `db:"deprecated" json:"deprecated"` - HasAITask sql.NullBool `db:"has_ai_task" json:"has_ai_task"` + Deleted bool `db:"deleted" json:"deleted"` + OrganizationID uuid.UUID `db:"organization_id" json:"organization_id"` + ExactName string `db:"exact_name" json:"exact_name"` + FuzzyName string `db:"fuzzy_name" json:"fuzzy_name"` + IDs []uuid.UUID `db:"ids" json:"ids"` + Deprecated sql.NullBool `db:"deprecated" json:"deprecated"` + HasAITask sql.NullBool `db:"has_ai_task" json:"has_ai_task"` + AuthorID uuid.UUID `db:"author_id" json:"author_id"` + AuthorUsername string `db:"author_username" json:"author_username"` + HasExternalAgent sql.NullBool `db:"has_external_agent" json:"has_external_agent"` } func (q *sqlQuerier) GetTemplatesWithFilter(ctx context.Context, arg GetTemplatesWithFilterParams) ([]Template, error) { @@ -12003,6 +12259,9 @@ func (q *sqlQuerier) GetTemplatesWithFilter(ctx context.Context, arg GetTemplate pq.Array(arg.IDs), arg.Deprecated, arg.HasAITask, + arg.AuthorID, + arg.AuthorUsername, + arg.HasExternalAgent, ) if err != nil { return nil, err @@ -12041,6 +12300,7 @@ func (q *sqlQuerier) GetTemplatesWithFilter(ctx context.Context, arg GetTemplate &i.ActivityBump, &i.MaxPortSharingLevel, &i.UseClassicParameterFlow, + &i.CorsBehavior, &i.CreatedByAvatarURL, &i.CreatedByUsername, &i.CreatedByName, @@ -12079,10 +12339,11 @@ INSERT INTO display_name, allow_user_cancel_workspace_jobs, max_port_sharing_level, - use_classic_parameter_flow + use_classic_parameter_flow, + cors_behavior ) VALUES - ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13, $14, $15, $16) + ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13, $14, $15, $16, $17) ` type InsertTemplateParams struct { @@ -12102,6 +12363,7 @@ type InsertTemplateParams struct { AllowUserCancelWorkspaceJobs bool `db:"allow_user_cancel_workspace_jobs" json:"allow_user_cancel_workspace_jobs"` MaxPortSharingLevel AppSharingLevel `db:"max_port_sharing_level" json:"max_port_sharing_level"` UseClassicParameterFlow bool `db:"use_classic_parameter_flow" json:"use_classic_parameter_flow"` + CorsBehavior CorsBehavior `db:"cors_behavior" json:"cors_behavior"` } func (q *sqlQuerier) InsertTemplate(ctx context.Context, arg InsertTemplateParams) error { @@ -12122,6 +12384,7 @@ func (q *sqlQuerier) InsertTemplate(ctx context.Context, arg InsertTemplateParam arg.AllowUserCancelWorkspaceJobs, arg.MaxPortSharingLevel, arg.UseClassicParameterFlow, + arg.CorsBehavior, ) return err } @@ -12222,7 +12485,8 @@ SET allow_user_cancel_workspace_jobs = $7, group_acl = $8, max_port_sharing_level = $9, - use_classic_parameter_flow = $10 + use_classic_parameter_flow = $10, + cors_behavior = $11 WHERE id = $1 ` @@ -12238,6 +12502,7 @@ type UpdateTemplateMetaByIDParams struct { GroupACL TemplateACL `db:"group_acl" json:"group_acl"` MaxPortSharingLevel AppSharingLevel `db:"max_port_sharing_level" json:"max_port_sharing_level"` UseClassicParameterFlow bool `db:"use_classic_parameter_flow" json:"use_classic_parameter_flow"` + CorsBehavior CorsBehavior `db:"cors_behavior" json:"cors_behavior"` } func (q *sqlQuerier) UpdateTemplateMetaByID(ctx context.Context, arg UpdateTemplateMetaByIDParams) error { @@ -12252,6 +12517,7 @@ func (q *sqlQuerier) UpdateTemplateMetaByID(ctx context.Context, arg UpdateTempl arg.GroupACL, arg.MaxPortSharingLevel, arg.UseClassicParameterFlow, + arg.CorsBehavior, ) return err } @@ -12480,7 +12746,7 @@ FROM -- Scope an archive to a single template and ignore already archived template versions ( SELECT - id, template_id, organization_id, created_at, updated_at, name, readme, job_id, created_by, external_auth_providers, message, archived, source_example_id, has_ai_task + id, template_id, organization_id, created_at, updated_at, name, readme, job_id, created_by, external_auth_providers, message, archived, source_example_id, has_ai_task, has_external_agent FROM template_versions WHERE @@ -12581,7 +12847,7 @@ func (q *sqlQuerier) ArchiveUnusedTemplateVersions(ctx context.Context, arg Arch const getPreviousTemplateVersion = `-- name: GetPreviousTemplateVersion :one SELECT - id, template_id, organization_id, created_at, updated_at, name, readme, job_id, created_by, external_auth_providers, message, archived, source_example_id, has_ai_task, created_by_avatar_url, created_by_username, created_by_name + id, template_id, organization_id, created_at, updated_at, name, readme, job_id, created_by, external_auth_providers, message, archived, source_example_id, has_ai_task, has_external_agent, created_by_avatar_url, created_by_username, created_by_name FROM template_version_with_user AS template_versions WHERE @@ -12620,6 +12886,7 @@ func (q *sqlQuerier) GetPreviousTemplateVersion(ctx context.Context, arg GetPrev &i.Archived, &i.SourceExampleID, &i.HasAITask, + &i.HasExternalAgent, &i.CreatedByAvatarURL, &i.CreatedByUsername, &i.CreatedByName, @@ -12629,7 +12896,7 @@ func (q *sqlQuerier) GetPreviousTemplateVersion(ctx context.Context, arg GetPrev const getTemplateVersionByID = `-- name: GetTemplateVersionByID :one SELECT - id, template_id, organization_id, created_at, updated_at, name, readme, job_id, created_by, external_auth_providers, message, archived, source_example_id, has_ai_task, created_by_avatar_url, created_by_username, created_by_name + id, template_id, organization_id, created_at, updated_at, name, readme, job_id, created_by, external_auth_providers, message, archived, source_example_id, has_ai_task, has_external_agent, created_by_avatar_url, created_by_username, created_by_name FROM template_version_with_user AS template_versions WHERE @@ -12654,6 +12921,7 @@ func (q *sqlQuerier) GetTemplateVersionByID(ctx context.Context, id uuid.UUID) ( &i.Archived, &i.SourceExampleID, &i.HasAITask, + &i.HasExternalAgent, &i.CreatedByAvatarURL, &i.CreatedByUsername, &i.CreatedByName, @@ -12663,7 +12931,7 @@ func (q *sqlQuerier) GetTemplateVersionByID(ctx context.Context, id uuid.UUID) ( const getTemplateVersionByJobID = `-- name: GetTemplateVersionByJobID :one SELECT - id, template_id, organization_id, created_at, updated_at, name, readme, job_id, created_by, external_auth_providers, message, archived, source_example_id, has_ai_task, created_by_avatar_url, created_by_username, created_by_name + id, template_id, organization_id, created_at, updated_at, name, readme, job_id, created_by, external_auth_providers, message, archived, source_example_id, has_ai_task, has_external_agent, created_by_avatar_url, created_by_username, created_by_name FROM template_version_with_user AS template_versions WHERE @@ -12688,6 +12956,7 @@ func (q *sqlQuerier) GetTemplateVersionByJobID(ctx context.Context, jobID uuid.U &i.Archived, &i.SourceExampleID, &i.HasAITask, + &i.HasExternalAgent, &i.CreatedByAvatarURL, &i.CreatedByUsername, &i.CreatedByName, @@ -12697,7 +12966,7 @@ func (q *sqlQuerier) GetTemplateVersionByJobID(ctx context.Context, jobID uuid.U const getTemplateVersionByTemplateIDAndName = `-- name: GetTemplateVersionByTemplateIDAndName :one SELECT - id, template_id, organization_id, created_at, updated_at, name, readme, job_id, created_by, external_auth_providers, message, archived, source_example_id, has_ai_task, created_by_avatar_url, created_by_username, created_by_name + id, template_id, organization_id, created_at, updated_at, name, readme, job_id, created_by, external_auth_providers, message, archived, source_example_id, has_ai_task, has_external_agent, created_by_avatar_url, created_by_username, created_by_name FROM template_version_with_user AS template_versions WHERE @@ -12728,6 +12997,7 @@ func (q *sqlQuerier) GetTemplateVersionByTemplateIDAndName(ctx context.Context, &i.Archived, &i.SourceExampleID, &i.HasAITask, + &i.HasExternalAgent, &i.CreatedByAvatarURL, &i.CreatedByUsername, &i.CreatedByName, @@ -12735,9 +13005,24 @@ func (q *sqlQuerier) GetTemplateVersionByTemplateIDAndName(ctx context.Context, return i, err } +const getTemplateVersionHasAITask = `-- name: GetTemplateVersionHasAITask :one +SELECT EXISTS ( + SELECT 1 + FROM template_versions + WHERE id = $1 AND has_ai_task = TRUE +) +` + +func (q *sqlQuerier) GetTemplateVersionHasAITask(ctx context.Context, id uuid.UUID) (bool, error) { + row := q.db.QueryRowContext(ctx, getTemplateVersionHasAITask, id) + var exists bool + err := row.Scan(&exists) + return exists, err +} + const getTemplateVersionsByIDs = `-- name: GetTemplateVersionsByIDs :many SELECT - id, template_id, organization_id, created_at, updated_at, name, readme, job_id, created_by, external_auth_providers, message, archived, source_example_id, has_ai_task, created_by_avatar_url, created_by_username, created_by_name + id, template_id, organization_id, created_at, updated_at, name, readme, job_id, created_by, external_auth_providers, message, archived, source_example_id, has_ai_task, has_external_agent, created_by_avatar_url, created_by_username, created_by_name FROM template_version_with_user AS template_versions WHERE @@ -12768,6 +13053,7 @@ func (q *sqlQuerier) GetTemplateVersionsByIDs(ctx context.Context, ids []uuid.UU &i.Archived, &i.SourceExampleID, &i.HasAITask, + &i.HasExternalAgent, &i.CreatedByAvatarURL, &i.CreatedByUsername, &i.CreatedByName, @@ -12787,7 +13073,7 @@ func (q *sqlQuerier) GetTemplateVersionsByIDs(ctx context.Context, ids []uuid.UU const getTemplateVersionsByTemplateID = `-- name: GetTemplateVersionsByTemplateID :many SELECT - id, template_id, organization_id, created_at, updated_at, name, readme, job_id, created_by, external_auth_providers, message, archived, source_example_id, has_ai_task, created_by_avatar_url, created_by_username, created_by_name + id, template_id, organization_id, created_at, updated_at, name, readme, job_id, created_by, external_auth_providers, message, archived, source_example_id, has_ai_task, has_external_agent, created_by_avatar_url, created_by_username, created_by_name FROM template_version_with_user AS template_versions WHERE @@ -12865,6 +13151,7 @@ func (q *sqlQuerier) GetTemplateVersionsByTemplateID(ctx context.Context, arg Ge &i.Archived, &i.SourceExampleID, &i.HasAITask, + &i.HasExternalAgent, &i.CreatedByAvatarURL, &i.CreatedByUsername, &i.CreatedByName, @@ -12883,7 +13170,7 @@ func (q *sqlQuerier) GetTemplateVersionsByTemplateID(ctx context.Context, arg Ge } const getTemplateVersionsCreatedAfter = `-- name: GetTemplateVersionsCreatedAfter :many -SELECT id, template_id, organization_id, created_at, updated_at, name, readme, job_id, created_by, external_auth_providers, message, archived, source_example_id, has_ai_task, created_by_avatar_url, created_by_username, created_by_name FROM template_version_with_user AS template_versions WHERE created_at > $1 +SELECT id, template_id, organization_id, created_at, updated_at, name, readme, job_id, created_by, external_auth_providers, message, archived, source_example_id, has_ai_task, has_external_agent, created_by_avatar_url, created_by_username, created_by_name FROM template_version_with_user AS template_versions WHERE created_at > $1 ` func (q *sqlQuerier) GetTemplateVersionsCreatedAfter(ctx context.Context, createdAt time.Time) ([]TemplateVersion, error) { @@ -12910,6 +13197,7 @@ func (q *sqlQuerier) GetTemplateVersionsCreatedAfter(ctx context.Context, create &i.Archived, &i.SourceExampleID, &i.HasAITask, + &i.HasExternalAgent, &i.CreatedByAvatarURL, &i.CreatedByUsername, &i.CreatedByName, @@ -12927,18 +13215,6 @@ func (q *sqlQuerier) GetTemplateVersionsCreatedAfter(ctx context.Context, create return items, nil } -const hasTemplateVersionsWithAITask = `-- name: HasTemplateVersionsWithAITask :one -SELECT EXISTS (SELECT 1 FROM template_versions WHERE has_ai_task = TRUE) -` - -// Determines if the template versions table has any rows with has_ai_task = TRUE. -func (q *sqlQuerier) HasTemplateVersionsWithAITask(ctx context.Context) (bool, error) { - row := q.db.QueryRowContext(ctx, hasTemplateVersionsWithAITask) - var exists bool - err := row.Scan(&exists) - return exists, err -} - const insertTemplateVersion = `-- name: InsertTemplateVersion :exec INSERT INTO template_versions ( @@ -13010,27 +13286,6 @@ func (q *sqlQuerier) UnarchiveTemplateVersion(ctx context.Context, arg Unarchive return err } -const updateTemplateVersionAITaskByJobID = `-- name: UpdateTemplateVersionAITaskByJobID :exec -UPDATE - template_versions -SET - has_ai_task = $2, - updated_at = $3 -WHERE - job_id = $1 -` - -type UpdateTemplateVersionAITaskByJobIDParams struct { - JobID uuid.UUID `db:"job_id" json:"job_id"` - HasAITask sql.NullBool `db:"has_ai_task" json:"has_ai_task"` - UpdatedAt time.Time `db:"updated_at" json:"updated_at"` -} - -func (q *sqlQuerier) UpdateTemplateVersionAITaskByJobID(ctx context.Context, arg UpdateTemplateVersionAITaskByJobIDParams) error { - _, err := q.db.ExecContext(ctx, updateTemplateVersionAITaskByJobID, arg.JobID, arg.HasAITask, arg.UpdatedAt) - return err -} - const updateTemplateVersionByID = `-- name: UpdateTemplateVersionByID :exec UPDATE template_versions @@ -13104,6 +13359,34 @@ func (q *sqlQuerier) UpdateTemplateVersionExternalAuthProvidersByJobID(ctx conte return err } +const updateTemplateVersionFlagsByJobID = `-- name: UpdateTemplateVersionFlagsByJobID :exec +UPDATE + template_versions +SET + has_ai_task = $2, + has_external_agent = $3, + updated_at = $4 +WHERE + job_id = $1 +` + +type UpdateTemplateVersionFlagsByJobIDParams struct { + JobID uuid.UUID `db:"job_id" json:"job_id"` + HasAITask sql.NullBool `db:"has_ai_task" json:"has_ai_task"` + HasExternalAgent sql.NullBool `db:"has_external_agent" json:"has_external_agent"` + UpdatedAt time.Time `db:"updated_at" json:"updated_at"` +} + +func (q *sqlQuerier) UpdateTemplateVersionFlagsByJobID(ctx context.Context, arg UpdateTemplateVersionFlagsByJobIDParams) error { + _, err := q.db.ExecContext(ctx, updateTemplateVersionFlagsByJobID, + arg.JobID, + arg.HasAITask, + arg.HasExternalAgent, + arg.UpdatedAt, + ) + return err +} + const getTemplateVersionTerraformValues = `-- name: GetTemplateVersionTerraformValues :one SELECT template_version_terraform_values.template_version_id, template_version_terraform_values.updated_at, template_version_terraform_values.cached_plan, template_version_terraform_values.cached_module_files, template_version_terraform_values.provisionerd_version @@ -13343,6 +13626,161 @@ func (q *sqlQuerier) DisableForeignKeysAndTriggers(ctx context.Context) error { return err } +const insertUsageEvent = `-- name: InsertUsageEvent :exec +INSERT INTO + usage_events ( + id, + event_type, + event_data, + created_at, + publish_started_at, + published_at, + failure_message + ) +VALUES + ($1, $2, $3, $4, NULL, NULL, NULL) +ON CONFLICT (id) DO NOTHING +` + +type InsertUsageEventParams struct { + ID string `db:"id" json:"id"` + EventType string `db:"event_type" json:"event_type"` + EventData json.RawMessage `db:"event_data" json:"event_data"` + CreatedAt time.Time `db:"created_at" json:"created_at"` +} + +// Duplicate events are ignored intentionally to allow for multiple replicas to +// publish heartbeat events. +func (q *sqlQuerier) InsertUsageEvent(ctx context.Context, arg InsertUsageEventParams) error { + _, err := q.db.ExecContext(ctx, insertUsageEvent, + arg.ID, + arg.EventType, + arg.EventData, + arg.CreatedAt, + ) + return err +} + +const selectUsageEventsForPublishing = `-- name: SelectUsageEventsForPublishing :many +WITH usage_events AS ( + UPDATE + usage_events + SET + publish_started_at = $1::timestamptz + WHERE + id IN ( + SELECT + potential_event.id + FROM + usage_events potential_event + WHERE + -- Do not publish events that have already been published or + -- have permanently failed to publish. + potential_event.published_at IS NULL + -- Do not publish events that are already being published by + -- another replica. + AND ( + potential_event.publish_started_at IS NULL + -- If the event has publish_started_at set, it must be older + -- than an hour ago. This is so we can retry publishing + -- events where the replica exited or couldn't update the + -- row. + -- The parenthesis around @now::timestamptz are necessary to + -- avoid sqlc from generating an extra argument. + OR potential_event.publish_started_at < ($1::timestamptz) - INTERVAL '1 hour' + ) + -- Do not publish events older than 30 days. Tallyman will + -- always permanently reject these events anyways. This is to + -- avoid duplicate events being billed to customers, as + -- Metronome will only deduplicate events within 34 days. + -- Also, the same parenthesis thing here as above. + AND potential_event.created_at > ($1::timestamptz) - INTERVAL '30 days' + ORDER BY potential_event.created_at ASC + FOR UPDATE SKIP LOCKED + LIMIT 100 + ) + RETURNING id, event_type, event_data, created_at, publish_started_at, published_at, failure_message +) +SELECT id, event_type, event_data, created_at, publish_started_at, published_at, failure_message +FROM usage_events +ORDER BY created_at ASC +` + +// Note that this selects from the CTE, not the original table. The CTE is named +// the same as the original table to trick sqlc into reusing the existing struct +// for the table. +// The CTE and the reorder is required because UPDATE doesn't guarantee order. +func (q *sqlQuerier) SelectUsageEventsForPublishing(ctx context.Context, now time.Time) ([]UsageEvent, error) { + rows, err := q.db.QueryContext(ctx, selectUsageEventsForPublishing, now) + if err != nil { + return nil, err + } + defer rows.Close() + var items []UsageEvent + for rows.Next() { + var i UsageEvent + if err := rows.Scan( + &i.ID, + &i.EventType, + &i.EventData, + &i.CreatedAt, + &i.PublishStartedAt, + &i.PublishedAt, + &i.FailureMessage, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Close(); err != nil { + return nil, err + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const updateUsageEventsPostPublish = `-- name: UpdateUsageEventsPostPublish :exec +UPDATE + usage_events +SET + publish_started_at = NULL, + published_at = CASE WHEN input.set_published_at THEN $1::timestamptz ELSE NULL END, + failure_message = NULLIF(input.failure_message, '') +FROM ( + SELECT + UNNEST($2::text[]) AS id, + UNNEST($3::text[]) AS failure_message, + UNNEST($4::boolean[]) AS set_published_at +) input +WHERE + input.id = usage_events.id + -- If the number of ids, failure messages, and set published ats are not the + -- same, do not do anything. Unfortunately you can't really throw from a + -- query without writing a function or doing some jank like dividing by + -- zero, so this is the best we can do. + AND cardinality($2::text[]) = cardinality($3::text[]) + AND cardinality($2::text[]) = cardinality($4::boolean[]) +` + +type UpdateUsageEventsPostPublishParams struct { + Now time.Time `db:"now" json:"now"` + IDs []string `db:"ids" json:"ids"` + FailureMessages []string `db:"failure_messages" json:"failure_messages"` + SetPublishedAts []bool `db:"set_published_ats" json:"set_published_ats"` +} + +func (q *sqlQuerier) UpdateUsageEventsPostPublish(ctx context.Context, arg UpdateUsageEventsPostPublishParams) error { + _, err := q.db.ExecContext(ctx, updateUsageEventsPostPublish, + arg.Now, + pq.Array(arg.IDs), + pq.Array(arg.FailureMessages), + pq.Array(arg.SetPublishedAts), + ) + return err +} + const getUserLinkByLinkedID = `-- name: GetUserLinkByLinkedID :one SELECT user_links.user_id, user_links.login_type, user_links.linked_id, user_links.oauth_access_token, user_links.oauth_refresh_token, user_links.oauth_expiry, user_links.oauth_access_token_key_id, user_links.oauth_refresh_token_key_id, user_links.claims @@ -13684,6 +14122,196 @@ func (q *sqlQuerier) UpdateUserLinkedID(ctx context.Context, arg UpdateUserLinke return i, err } +const createUserSecret = `-- name: CreateUserSecret :one +INSERT INTO user_secrets ( + id, + user_id, + name, + description, + value, + env_name, + file_path +) VALUES ( + $1, $2, $3, $4, $5, $6, $7 +) RETURNING id, user_id, name, description, value, env_name, file_path, created_at, updated_at +` + +type CreateUserSecretParams struct { + ID uuid.UUID `db:"id" json:"id"` + UserID uuid.UUID `db:"user_id" json:"user_id"` + Name string `db:"name" json:"name"` + Description string `db:"description" json:"description"` + Value string `db:"value" json:"value"` + EnvName string `db:"env_name" json:"env_name"` + FilePath string `db:"file_path" json:"file_path"` +} + +func (q *sqlQuerier) CreateUserSecret(ctx context.Context, arg CreateUserSecretParams) (UserSecret, error) { + row := q.db.QueryRowContext(ctx, createUserSecret, + arg.ID, + arg.UserID, + arg.Name, + arg.Description, + arg.Value, + arg.EnvName, + arg.FilePath, + ) + var i UserSecret + err := row.Scan( + &i.ID, + &i.UserID, + &i.Name, + &i.Description, + &i.Value, + &i.EnvName, + &i.FilePath, + &i.CreatedAt, + &i.UpdatedAt, + ) + return i, err +} + +const deleteUserSecret = `-- name: DeleteUserSecret :exec +DELETE FROM user_secrets +WHERE id = $1 +` + +func (q *sqlQuerier) DeleteUserSecret(ctx context.Context, id uuid.UUID) error { + _, err := q.db.ExecContext(ctx, deleteUserSecret, id) + return err +} + +const getUserSecret = `-- name: GetUserSecret :one +SELECT id, user_id, name, description, value, env_name, file_path, created_at, updated_at FROM user_secrets +WHERE id = $1 +` + +func (q *sqlQuerier) GetUserSecret(ctx context.Context, id uuid.UUID) (UserSecret, error) { + row := q.db.QueryRowContext(ctx, getUserSecret, id) + var i UserSecret + err := row.Scan( + &i.ID, + &i.UserID, + &i.Name, + &i.Description, + &i.Value, + &i.EnvName, + &i.FilePath, + &i.CreatedAt, + &i.UpdatedAt, + ) + return i, err +} + +const getUserSecretByUserIDAndName = `-- name: GetUserSecretByUserIDAndName :one +SELECT id, user_id, name, description, value, env_name, file_path, created_at, updated_at FROM user_secrets +WHERE user_id = $1 AND name = $2 +` + +type GetUserSecretByUserIDAndNameParams struct { + UserID uuid.UUID `db:"user_id" json:"user_id"` + Name string `db:"name" json:"name"` +} + +func (q *sqlQuerier) GetUserSecretByUserIDAndName(ctx context.Context, arg GetUserSecretByUserIDAndNameParams) (UserSecret, error) { + row := q.db.QueryRowContext(ctx, getUserSecretByUserIDAndName, arg.UserID, arg.Name) + var i UserSecret + err := row.Scan( + &i.ID, + &i.UserID, + &i.Name, + &i.Description, + &i.Value, + &i.EnvName, + &i.FilePath, + &i.CreatedAt, + &i.UpdatedAt, + ) + return i, err +} + +const listUserSecrets = `-- name: ListUserSecrets :many +SELECT id, user_id, name, description, value, env_name, file_path, created_at, updated_at FROM user_secrets +WHERE user_id = $1 +ORDER BY name ASC +` + +func (q *sqlQuerier) ListUserSecrets(ctx context.Context, userID uuid.UUID) ([]UserSecret, error) { + rows, err := q.db.QueryContext(ctx, listUserSecrets, userID) + if err != nil { + return nil, err + } + defer rows.Close() + var items []UserSecret + for rows.Next() { + var i UserSecret + if err := rows.Scan( + &i.ID, + &i.UserID, + &i.Name, + &i.Description, + &i.Value, + &i.EnvName, + &i.FilePath, + &i.CreatedAt, + &i.UpdatedAt, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Close(); err != nil { + return nil, err + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const updateUserSecret = `-- name: UpdateUserSecret :one +UPDATE user_secrets +SET + description = $2, + value = $3, + env_name = $4, + file_path = $5, + updated_at = CURRENT_TIMESTAMP +WHERE id = $1 +RETURNING id, user_id, name, description, value, env_name, file_path, created_at, updated_at +` + +type UpdateUserSecretParams struct { + ID uuid.UUID `db:"id" json:"id"` + Description string `db:"description" json:"description"` + Value string `db:"value" json:"value"` + EnvName string `db:"env_name" json:"env_name"` + FilePath string `db:"file_path" json:"file_path"` +} + +func (q *sqlQuerier) UpdateUserSecret(ctx context.Context, arg UpdateUserSecretParams) (UserSecret, error) { + row := q.db.QueryRowContext(ctx, updateUserSecret, + arg.ID, + arg.Description, + arg.Value, + arg.EnvName, + arg.FilePath, + ) + var i UserSecret + err := row.Scan( + &i.ID, + &i.UserID, + &i.Name, + &i.Description, + &i.Value, + &i.EnvName, + &i.FilePath, + &i.CreatedAt, + &i.UpdatedAt, + ) + return i, err +} + const allUserIDs = `-- name: AllUserIDs :many SELECT DISTINCT id FROM USERS WHERE CASE WHEN $1::bool THEN TRUE ELSE is_system = false END @@ -14700,6 +15328,39 @@ func (q *sqlQuerier) UpdateUserThemePreference(ctx context.Context, arg UpdateUs return i, err } +const validateUserIDs = `-- name: ValidateUserIDs :one +WITH input AS ( + SELECT + unnest($1::uuid[]) AS id +) +SELECT + array_agg(input.id)::uuid[] as invalid_user_ids, + COUNT(*) = 0 as ok +FROM + -- Preserve rows where there is not a matching left (users) row for each + -- right (input) row... + users + RIGHT JOIN input ON users.id = input.id +WHERE + -- ...so that we can retain exactly those rows where an input ID does not + -- match an existing user... + users.id IS NULL OR + -- ...or that only matches a user that was deleted. + users.deleted = true +` + +type ValidateUserIDsRow struct { + InvalidUserIds []uuid.UUID `db:"invalid_user_ids" json:"invalid_user_ids"` + Ok bool `db:"ok" json:"ok"` +} + +func (q *sqlQuerier) ValidateUserIDs(ctx context.Context, userIds []uuid.UUID) (ValidateUserIDsRow, error) { + row := q.db.QueryRowContext(ctx, validateUserIDs, pq.Array(userIds)) + var i ValidateUserIDsRow + err := row.Scan(pq.Array(&i.InvalidUserIds), &i.Ok) + return i, err +} + const getWorkspaceAgentDevcontainersByAgentID = `-- name: GetWorkspaceAgentDevcontainersByAgentID :many SELECT id, workspace_agent_id, created_at, workspace_folder, config_path, name @@ -15354,9 +16015,9 @@ func (q *sqlQuerier) DeleteWorkspaceSubAgentByID(ctx context.Context, id uuid.UU const getWorkspaceAgentAndLatestBuildByAuthToken = `-- name: GetWorkspaceAgentAndLatestBuildByAuthToken :one SELECT - workspaces.id, workspaces.created_at, workspaces.updated_at, workspaces.owner_id, workspaces.organization_id, workspaces.template_id, workspaces.deleted, workspaces.name, workspaces.autostart_schedule, workspaces.ttl, workspaces.last_used_at, workspaces.dormant_at, workspaces.deleting_at, workspaces.automatic_updates, workspaces.favorite, workspaces.next_start_at, + workspaces.id, workspaces.created_at, workspaces.updated_at, workspaces.owner_id, workspaces.organization_id, workspaces.template_id, workspaces.deleted, workspaces.name, workspaces.autostart_schedule, workspaces.ttl, workspaces.last_used_at, workspaces.dormant_at, workspaces.deleting_at, workspaces.automatic_updates, workspaces.favorite, workspaces.next_start_at, workspaces.group_acl, workspaces.user_acl, workspace_agents.id, workspace_agents.created_at, workspace_agents.updated_at, workspace_agents.name, workspace_agents.first_connected_at, workspace_agents.last_connected_at, workspace_agents.disconnected_at, workspace_agents.resource_id, workspace_agents.auth_token, workspace_agents.auth_instance_id, workspace_agents.architecture, workspace_agents.environment_variables, workspace_agents.operating_system, workspace_agents.instance_metadata, workspace_agents.resource_metadata, workspace_agents.directory, workspace_agents.version, workspace_agents.last_connected_replica_id, workspace_agents.connection_timeout_seconds, workspace_agents.troubleshooting_url, workspace_agents.motd_file, workspace_agents.lifecycle_state, workspace_agents.expanded_directory, workspace_agents.logs_length, workspace_agents.logs_overflowed, workspace_agents.started_at, workspace_agents.ready_at, workspace_agents.subsystems, workspace_agents.display_apps, workspace_agents.api_version, workspace_agents.display_order, workspace_agents.parent_id, workspace_agents.api_key_scope, workspace_agents.deleted, - workspace_build_with_user.id, workspace_build_with_user.created_at, workspace_build_with_user.updated_at, workspace_build_with_user.workspace_id, workspace_build_with_user.template_version_id, workspace_build_with_user.build_number, workspace_build_with_user.transition, workspace_build_with_user.initiator_id, workspace_build_with_user.provisioner_state, workspace_build_with_user.job_id, workspace_build_with_user.deadline, workspace_build_with_user.reason, workspace_build_with_user.daily_cost, workspace_build_with_user.max_deadline, workspace_build_with_user.template_version_preset_id, workspace_build_with_user.has_ai_task, workspace_build_with_user.ai_task_sidebar_app_id, workspace_build_with_user.initiator_by_avatar_url, workspace_build_with_user.initiator_by_username, workspace_build_with_user.initiator_by_name + workspace_build_with_user.id, workspace_build_with_user.created_at, workspace_build_with_user.updated_at, workspace_build_with_user.workspace_id, workspace_build_with_user.template_version_id, workspace_build_with_user.build_number, workspace_build_with_user.transition, workspace_build_with_user.initiator_id, workspace_build_with_user.provisioner_state, workspace_build_with_user.job_id, workspace_build_with_user.deadline, workspace_build_with_user.reason, workspace_build_with_user.daily_cost, workspace_build_with_user.max_deadline, workspace_build_with_user.template_version_preset_id, workspace_build_with_user.has_ai_task, workspace_build_with_user.ai_task_sidebar_app_id, workspace_build_with_user.has_external_agent, workspace_build_with_user.initiator_by_avatar_url, workspace_build_with_user.initiator_by_username, workspace_build_with_user.initiator_by_name FROM workspace_agents JOIN @@ -15416,6 +16077,8 @@ func (q *sqlQuerier) GetWorkspaceAgentAndLatestBuildByAuthToken(ctx context.Cont &i.WorkspaceTable.AutomaticUpdates, &i.WorkspaceTable.Favorite, &i.WorkspaceTable.NextStartAt, + &i.WorkspaceTable.GroupACL, + &i.WorkspaceTable.UserACL, &i.WorkspaceAgent.ID, &i.WorkspaceAgent.CreatedAt, &i.WorkspaceAgent.UpdatedAt, @@ -15467,6 +16130,7 @@ func (q *sqlQuerier) GetWorkspaceAgentAndLatestBuildByAuthToken(ctx context.Cont &i.WorkspaceBuild.TemplateVersionPresetID, &i.WorkspaceBuild.HasAITask, &i.WorkspaceBuild.AITaskSidebarAppID, + &i.WorkspaceBuild.HasExternalAgent, &i.WorkspaceBuild.InitiatorByAvatarUrl, &i.WorkspaceBuild.InitiatorByUsername, &i.WorkspaceBuild.InitiatorByName, @@ -18121,7 +18785,7 @@ func (q *sqlQuerier) InsertWorkspaceBuildParameters(ctx context.Context, arg Ins } const getActiveWorkspaceBuildsByTemplateID = `-- name: GetActiveWorkspaceBuildsByTemplateID :many -SELECT wb.id, wb.created_at, wb.updated_at, wb.workspace_id, wb.template_version_id, wb.build_number, wb.transition, wb.initiator_id, wb.provisioner_state, wb.job_id, wb.deadline, wb.reason, wb.daily_cost, wb.max_deadline, wb.template_version_preset_id, wb.has_ai_task, wb.ai_task_sidebar_app_id, wb.initiator_by_avatar_url, wb.initiator_by_username, wb.initiator_by_name +SELECT wb.id, wb.created_at, wb.updated_at, wb.workspace_id, wb.template_version_id, wb.build_number, wb.transition, wb.initiator_id, wb.provisioner_state, wb.job_id, wb.deadline, wb.reason, wb.daily_cost, wb.max_deadline, wb.template_version_preset_id, wb.has_ai_task, wb.ai_task_sidebar_app_id, wb.has_external_agent, wb.initiator_by_avatar_url, wb.initiator_by_username, wb.initiator_by_name FROM ( SELECT workspace_id, MAX(build_number) as max_build_number @@ -18178,6 +18842,7 @@ func (q *sqlQuerier) GetActiveWorkspaceBuildsByTemplateID(ctx context.Context, t &i.TemplateVersionPresetID, &i.HasAITask, &i.AITaskSidebarAppID, + &i.HasExternalAgent, &i.InitiatorByAvatarUrl, &i.InitiatorByUsername, &i.InitiatorByName, @@ -18277,7 +18942,7 @@ func (q *sqlQuerier) GetFailedWorkspaceBuildsByTemplateID(ctx context.Context, a const getLatestWorkspaceBuildByWorkspaceID = `-- name: GetLatestWorkspaceBuildByWorkspaceID :one SELECT - id, created_at, updated_at, workspace_id, template_version_id, build_number, transition, initiator_id, provisioner_state, job_id, deadline, reason, daily_cost, max_deadline, template_version_preset_id, has_ai_task, ai_task_sidebar_app_id, initiator_by_avatar_url, initiator_by_username, initiator_by_name + id, created_at, updated_at, workspace_id, template_version_id, build_number, transition, initiator_id, provisioner_state, job_id, deadline, reason, daily_cost, max_deadline, template_version_preset_id, has_ai_task, ai_task_sidebar_app_id, has_external_agent, initiator_by_avatar_url, initiator_by_username, initiator_by_name FROM workspace_build_with_user AS workspace_builds WHERE @@ -18309,6 +18974,7 @@ func (q *sqlQuerier) GetLatestWorkspaceBuildByWorkspaceID(ctx context.Context, w &i.TemplateVersionPresetID, &i.HasAITask, &i.AITaskSidebarAppID, + &i.HasExternalAgent, &i.InitiatorByAvatarUrl, &i.InitiatorByUsername, &i.InitiatorByName, @@ -18316,67 +18982,8 @@ func (q *sqlQuerier) GetLatestWorkspaceBuildByWorkspaceID(ctx context.Context, w return i, err } -const getLatestWorkspaceBuilds = `-- name: GetLatestWorkspaceBuilds :many -SELECT wb.id, wb.created_at, wb.updated_at, wb.workspace_id, wb.template_version_id, wb.build_number, wb.transition, wb.initiator_id, wb.provisioner_state, wb.job_id, wb.deadline, wb.reason, wb.daily_cost, wb.max_deadline, wb.template_version_preset_id, wb.has_ai_task, wb.ai_task_sidebar_app_id, wb.initiator_by_avatar_url, wb.initiator_by_username, wb.initiator_by_name -FROM ( - SELECT - workspace_id, MAX(build_number) as max_build_number - FROM - workspace_build_with_user AS workspace_builds - GROUP BY - workspace_id -) m -JOIN - workspace_build_with_user AS wb -ON m.workspace_id = wb.workspace_id AND m.max_build_number = wb.build_number -` - -func (q *sqlQuerier) GetLatestWorkspaceBuilds(ctx context.Context) ([]WorkspaceBuild, error) { - rows, err := q.db.QueryContext(ctx, getLatestWorkspaceBuilds) - if err != nil { - return nil, err - } - defer rows.Close() - var items []WorkspaceBuild - for rows.Next() { - var i WorkspaceBuild - if err := rows.Scan( - &i.ID, - &i.CreatedAt, - &i.UpdatedAt, - &i.WorkspaceID, - &i.TemplateVersionID, - &i.BuildNumber, - &i.Transition, - &i.InitiatorID, - &i.ProvisionerState, - &i.JobID, - &i.Deadline, - &i.Reason, - &i.DailyCost, - &i.MaxDeadline, - &i.TemplateVersionPresetID, - &i.HasAITask, - &i.AITaskSidebarAppID, - &i.InitiatorByAvatarUrl, - &i.InitiatorByUsername, - &i.InitiatorByName, - ); err != nil { - return nil, err - } - items = append(items, i) - } - if err := rows.Close(); err != nil { - return nil, err - } - if err := rows.Err(); err != nil { - return nil, err - } - return items, nil -} - const getLatestWorkspaceBuildsByWorkspaceIDs = `-- name: GetLatestWorkspaceBuildsByWorkspaceIDs :many -SELECT wb.id, wb.created_at, wb.updated_at, wb.workspace_id, wb.template_version_id, wb.build_number, wb.transition, wb.initiator_id, wb.provisioner_state, wb.job_id, wb.deadline, wb.reason, wb.daily_cost, wb.max_deadline, wb.template_version_preset_id, wb.has_ai_task, wb.ai_task_sidebar_app_id, wb.initiator_by_avatar_url, wb.initiator_by_username, wb.initiator_by_name +SELECT wb.id, wb.created_at, wb.updated_at, wb.workspace_id, wb.template_version_id, wb.build_number, wb.transition, wb.initiator_id, wb.provisioner_state, wb.job_id, wb.deadline, wb.reason, wb.daily_cost, wb.max_deadline, wb.template_version_preset_id, wb.has_ai_task, wb.ai_task_sidebar_app_id, wb.has_external_agent, wb.initiator_by_avatar_url, wb.initiator_by_username, wb.initiator_by_name FROM ( SELECT workspace_id, MAX(build_number) as max_build_number @@ -18419,6 +19026,7 @@ func (q *sqlQuerier) GetLatestWorkspaceBuildsByWorkspaceIDs(ctx context.Context, &i.TemplateVersionPresetID, &i.HasAITask, &i.AITaskSidebarAppID, + &i.HasExternalAgent, &i.InitiatorByAvatarUrl, &i.InitiatorByUsername, &i.InitiatorByName, @@ -18438,7 +19046,7 @@ func (q *sqlQuerier) GetLatestWorkspaceBuildsByWorkspaceIDs(ctx context.Context, const getWorkspaceBuildByID = `-- name: GetWorkspaceBuildByID :one SELECT - id, created_at, updated_at, workspace_id, template_version_id, build_number, transition, initiator_id, provisioner_state, job_id, deadline, reason, daily_cost, max_deadline, template_version_preset_id, has_ai_task, ai_task_sidebar_app_id, initiator_by_avatar_url, initiator_by_username, initiator_by_name + id, created_at, updated_at, workspace_id, template_version_id, build_number, transition, initiator_id, provisioner_state, job_id, deadline, reason, daily_cost, max_deadline, template_version_preset_id, has_ai_task, ai_task_sidebar_app_id, has_external_agent, initiator_by_avatar_url, initiator_by_username, initiator_by_name FROM workspace_build_with_user AS workspace_builds WHERE @@ -18468,6 +19076,7 @@ func (q *sqlQuerier) GetWorkspaceBuildByID(ctx context.Context, id uuid.UUID) (W &i.TemplateVersionPresetID, &i.HasAITask, &i.AITaskSidebarAppID, + &i.HasExternalAgent, &i.InitiatorByAvatarUrl, &i.InitiatorByUsername, &i.InitiatorByName, @@ -18477,7 +19086,7 @@ func (q *sqlQuerier) GetWorkspaceBuildByID(ctx context.Context, id uuid.UUID) (W const getWorkspaceBuildByJobID = `-- name: GetWorkspaceBuildByJobID :one SELECT - id, created_at, updated_at, workspace_id, template_version_id, build_number, transition, initiator_id, provisioner_state, job_id, deadline, reason, daily_cost, max_deadline, template_version_preset_id, has_ai_task, ai_task_sidebar_app_id, initiator_by_avatar_url, initiator_by_username, initiator_by_name + id, created_at, updated_at, workspace_id, template_version_id, build_number, transition, initiator_id, provisioner_state, job_id, deadline, reason, daily_cost, max_deadline, template_version_preset_id, has_ai_task, ai_task_sidebar_app_id, has_external_agent, initiator_by_avatar_url, initiator_by_username, initiator_by_name FROM workspace_build_with_user AS workspace_builds WHERE @@ -18507,6 +19116,7 @@ func (q *sqlQuerier) GetWorkspaceBuildByJobID(ctx context.Context, jobID uuid.UU &i.TemplateVersionPresetID, &i.HasAITask, &i.AITaskSidebarAppID, + &i.HasExternalAgent, &i.InitiatorByAvatarUrl, &i.InitiatorByUsername, &i.InitiatorByName, @@ -18516,7 +19126,7 @@ func (q *sqlQuerier) GetWorkspaceBuildByJobID(ctx context.Context, jobID uuid.UU const getWorkspaceBuildByWorkspaceIDAndBuildNumber = `-- name: GetWorkspaceBuildByWorkspaceIDAndBuildNumber :one SELECT - id, created_at, updated_at, workspace_id, template_version_id, build_number, transition, initiator_id, provisioner_state, job_id, deadline, reason, daily_cost, max_deadline, template_version_preset_id, has_ai_task, ai_task_sidebar_app_id, initiator_by_avatar_url, initiator_by_username, initiator_by_name + id, created_at, updated_at, workspace_id, template_version_id, build_number, transition, initiator_id, provisioner_state, job_id, deadline, reason, daily_cost, max_deadline, template_version_preset_id, has_ai_task, ai_task_sidebar_app_id, has_external_agent, initiator_by_avatar_url, initiator_by_username, initiator_by_name FROM workspace_build_with_user AS workspace_builds WHERE @@ -18550,6 +19160,7 @@ func (q *sqlQuerier) GetWorkspaceBuildByWorkspaceIDAndBuildNumber(ctx context.Co &i.TemplateVersionPresetID, &i.HasAITask, &i.AITaskSidebarAppID, + &i.HasExternalAgent, &i.InitiatorByAvatarUrl, &i.InitiatorByUsername, &i.InitiatorByName, @@ -18626,7 +19237,7 @@ func (q *sqlQuerier) GetWorkspaceBuildStatsByTemplates(ctx context.Context, sinc const getWorkspaceBuildsByWorkspaceID = `-- name: GetWorkspaceBuildsByWorkspaceID :many SELECT - id, created_at, updated_at, workspace_id, template_version_id, build_number, transition, initiator_id, provisioner_state, job_id, deadline, reason, daily_cost, max_deadline, template_version_preset_id, has_ai_task, ai_task_sidebar_app_id, initiator_by_avatar_url, initiator_by_username, initiator_by_name + id, created_at, updated_at, workspace_id, template_version_id, build_number, transition, initiator_id, provisioner_state, job_id, deadline, reason, daily_cost, max_deadline, template_version_preset_id, has_ai_task, ai_task_sidebar_app_id, has_external_agent, initiator_by_avatar_url, initiator_by_username, initiator_by_name FROM workspace_build_with_user AS workspace_builds WHERE @@ -18699,6 +19310,7 @@ func (q *sqlQuerier) GetWorkspaceBuildsByWorkspaceID(ctx context.Context, arg Ge &i.TemplateVersionPresetID, &i.HasAITask, &i.AITaskSidebarAppID, + &i.HasExternalAgent, &i.InitiatorByAvatarUrl, &i.InitiatorByUsername, &i.InitiatorByName, @@ -18717,7 +19329,7 @@ func (q *sqlQuerier) GetWorkspaceBuildsByWorkspaceID(ctx context.Context, arg Ge } const getWorkspaceBuildsCreatedAfter = `-- name: GetWorkspaceBuildsCreatedAfter :many -SELECT id, created_at, updated_at, workspace_id, template_version_id, build_number, transition, initiator_id, provisioner_state, job_id, deadline, reason, daily_cost, max_deadline, template_version_preset_id, has_ai_task, ai_task_sidebar_app_id, initiator_by_avatar_url, initiator_by_username, initiator_by_name FROM workspace_build_with_user WHERE created_at > $1 +SELECT id, created_at, updated_at, workspace_id, template_version_id, build_number, transition, initiator_id, provisioner_state, job_id, deadline, reason, daily_cost, max_deadline, template_version_preset_id, has_ai_task, ai_task_sidebar_app_id, has_external_agent, initiator_by_avatar_url, initiator_by_username, initiator_by_name FROM workspace_build_with_user WHERE created_at > $1 ` func (q *sqlQuerier) GetWorkspaceBuildsCreatedAfter(ctx context.Context, createdAt time.Time) ([]WorkspaceBuild, error) { @@ -18747,6 +19359,7 @@ func (q *sqlQuerier) GetWorkspaceBuildsCreatedAfter(ctx context.Context, created &i.TemplateVersionPresetID, &i.HasAITask, &i.AITaskSidebarAppID, + &i.HasExternalAgent, &i.InitiatorByAvatarUrl, &i.InitiatorByUsername, &i.InitiatorByName, @@ -18823,33 +19436,6 @@ func (q *sqlQuerier) InsertWorkspaceBuild(ctx context.Context, arg InsertWorkspa return err } -const updateWorkspaceBuildAITaskByID = `-- name: UpdateWorkspaceBuildAITaskByID :exec -UPDATE - workspace_builds -SET - has_ai_task = $1, - ai_task_sidebar_app_id = $2, - updated_at = $3::timestamptz -WHERE id = $4::uuid -` - -type UpdateWorkspaceBuildAITaskByIDParams struct { - HasAITask sql.NullBool `db:"has_ai_task" json:"has_ai_task"` - SidebarAppID uuid.NullUUID `db:"sidebar_app_id" json:"sidebar_app_id"` - UpdatedAt time.Time `db:"updated_at" json:"updated_at"` - ID uuid.UUID `db:"id" json:"id"` -} - -func (q *sqlQuerier) UpdateWorkspaceBuildAITaskByID(ctx context.Context, arg UpdateWorkspaceBuildAITaskByIDParams) error { - _, err := q.db.ExecContext(ctx, updateWorkspaceBuildAITaskByID, - arg.HasAITask, - arg.SidebarAppID, - arg.UpdatedAt, - arg.ID, - ) - return err -} - const updateWorkspaceBuildCostByID = `-- name: UpdateWorkspaceBuildCostByID :exec UPDATE workspace_builds @@ -18876,7 +19462,15 @@ SET deadline = $1::timestamptz, max_deadline = $2::timestamptz, updated_at = $3::timestamptz -WHERE id = $4::uuid +FROM + workspaces +WHERE + workspace_builds.id = $4::uuid + AND workspace_builds.workspace_id = workspaces.id + -- Prebuilt workspaces (identified by having the prebuilds system user as owner_id) + -- are managed by the reconciliation loop, not the lifecycle executor which handles + -- deadline and max_deadline + AND workspaces.owner_id != 'c42fdf75-3097-471c-8c33-fb52454d81c0'::UUID ` type UpdateWorkspaceBuildDeadlineByIDParams struct { @@ -18896,6 +19490,36 @@ func (q *sqlQuerier) UpdateWorkspaceBuildDeadlineByID(ctx context.Context, arg U return err } +const updateWorkspaceBuildFlagsByID = `-- name: UpdateWorkspaceBuildFlagsByID :exec +UPDATE + workspace_builds +SET + has_ai_task = $1, + ai_task_sidebar_app_id = $2, + has_external_agent = $3, + updated_at = $4::timestamptz +WHERE id = $5::uuid +` + +type UpdateWorkspaceBuildFlagsByIDParams struct { + HasAITask sql.NullBool `db:"has_ai_task" json:"has_ai_task"` + SidebarAppID uuid.NullUUID `db:"sidebar_app_id" json:"sidebar_app_id"` + HasExternalAgent sql.NullBool `db:"has_external_agent" json:"has_external_agent"` + UpdatedAt time.Time `db:"updated_at" json:"updated_at"` + ID uuid.UUID `db:"id" json:"id"` +} + +func (q *sqlQuerier) UpdateWorkspaceBuildFlagsByID(ctx context.Context, arg UpdateWorkspaceBuildFlagsByIDParams) error { + _, err := q.db.ExecContext(ctx, updateWorkspaceBuildFlagsByID, + arg.HasAITask, + arg.SidebarAppID, + arg.HasExternalAgent, + arg.UpdatedAt, + arg.ID, + ) + return err +} + const updateWorkspaceBuildProvisionerStateByID = `-- name: UpdateWorkspaceBuildProvisionerStateByID :exec UPDATE workspace_builds @@ -19506,7 +20130,7 @@ func (q *sqlQuerier) GetDeploymentWorkspaceStats(ctx context.Context) (GetDeploy const getWorkspaceByAgentID = `-- name: GetWorkspaceByAgentID :one SELECT - id, created_at, updated_at, owner_id, organization_id, template_id, deleted, name, autostart_schedule, ttl, last_used_at, dormant_at, deleting_at, automatic_updates, favorite, next_start_at, owner_avatar_url, owner_username, owner_name, organization_name, organization_display_name, organization_icon, organization_description, template_name, template_display_name, template_icon, template_description + id, created_at, updated_at, owner_id, organization_id, template_id, deleted, name, autostart_schedule, ttl, last_used_at, dormant_at, deleting_at, automatic_updates, favorite, next_start_at, group_acl, user_acl, owner_avatar_url, owner_username, owner_name, organization_name, organization_display_name, organization_icon, organization_description, template_name, template_display_name, template_icon, template_description FROM workspaces_expanded as workspaces WHERE @@ -19554,6 +20178,8 @@ func (q *sqlQuerier) GetWorkspaceByAgentID(ctx context.Context, agentID uuid.UUI &i.AutomaticUpdates, &i.Favorite, &i.NextStartAt, + &i.GroupACL, + &i.UserACL, &i.OwnerAvatarUrl, &i.OwnerUsername, &i.OwnerName, @@ -19571,7 +20197,7 @@ func (q *sqlQuerier) GetWorkspaceByAgentID(ctx context.Context, agentID uuid.UUI const getWorkspaceByID = `-- name: GetWorkspaceByID :one SELECT - id, created_at, updated_at, owner_id, organization_id, template_id, deleted, name, autostart_schedule, ttl, last_used_at, dormant_at, deleting_at, automatic_updates, favorite, next_start_at, owner_avatar_url, owner_username, owner_name, organization_name, organization_display_name, organization_icon, organization_description, template_name, template_display_name, template_icon, template_description + id, created_at, updated_at, owner_id, organization_id, template_id, deleted, name, autostart_schedule, ttl, last_used_at, dormant_at, deleting_at, automatic_updates, favorite, next_start_at, group_acl, user_acl, owner_avatar_url, owner_username, owner_name, organization_name, organization_display_name, organization_icon, organization_description, template_name, template_display_name, template_icon, template_description FROM workspaces_expanded WHERE @@ -19600,6 +20226,8 @@ func (q *sqlQuerier) GetWorkspaceByID(ctx context.Context, id uuid.UUID) (Worksp &i.AutomaticUpdates, &i.Favorite, &i.NextStartAt, + &i.GroupACL, + &i.UserACL, &i.OwnerAvatarUrl, &i.OwnerUsername, &i.OwnerName, @@ -19617,7 +20245,7 @@ func (q *sqlQuerier) GetWorkspaceByID(ctx context.Context, id uuid.UUID) (Worksp const getWorkspaceByOwnerIDAndName = `-- name: GetWorkspaceByOwnerIDAndName :one SELECT - id, created_at, updated_at, owner_id, organization_id, template_id, deleted, name, autostart_schedule, ttl, last_used_at, dormant_at, deleting_at, automatic_updates, favorite, next_start_at, owner_avatar_url, owner_username, owner_name, organization_name, organization_display_name, organization_icon, organization_description, template_name, template_display_name, template_icon, template_description + id, created_at, updated_at, owner_id, organization_id, template_id, deleted, name, autostart_schedule, ttl, last_used_at, dormant_at, deleting_at, automatic_updates, favorite, next_start_at, group_acl, user_acl, owner_avatar_url, owner_username, owner_name, organization_name, organization_display_name, organization_icon, organization_description, template_name, template_display_name, template_icon, template_description FROM workspaces_expanded as workspaces WHERE @@ -19653,6 +20281,8 @@ func (q *sqlQuerier) GetWorkspaceByOwnerIDAndName(ctx context.Context, arg GetWo &i.AutomaticUpdates, &i.Favorite, &i.NextStartAt, + &i.GroupACL, + &i.UserACL, &i.OwnerAvatarUrl, &i.OwnerUsername, &i.OwnerName, @@ -19670,7 +20300,7 @@ func (q *sqlQuerier) GetWorkspaceByOwnerIDAndName(ctx context.Context, arg GetWo const getWorkspaceByResourceID = `-- name: GetWorkspaceByResourceID :one SELECT - id, created_at, updated_at, owner_id, organization_id, template_id, deleted, name, autostart_schedule, ttl, last_used_at, dormant_at, deleting_at, automatic_updates, favorite, next_start_at, owner_avatar_url, owner_username, owner_name, organization_name, organization_display_name, organization_icon, organization_description, template_name, template_display_name, template_icon, template_description + id, created_at, updated_at, owner_id, organization_id, template_id, deleted, name, autostart_schedule, ttl, last_used_at, dormant_at, deleting_at, automatic_updates, favorite, next_start_at, group_acl, user_acl, owner_avatar_url, owner_username, owner_name, organization_name, organization_display_name, organization_icon, organization_description, template_name, template_display_name, template_icon, template_description FROM workspaces_expanded as workspaces WHERE @@ -19713,6 +20343,8 @@ func (q *sqlQuerier) GetWorkspaceByResourceID(ctx context.Context, resourceID uu &i.AutomaticUpdates, &i.Favorite, &i.NextStartAt, + &i.GroupACL, + &i.UserACL, &i.OwnerAvatarUrl, &i.OwnerUsername, &i.OwnerName, @@ -19730,7 +20362,7 @@ func (q *sqlQuerier) GetWorkspaceByResourceID(ctx context.Context, resourceID uu const getWorkspaceByWorkspaceAppID = `-- name: GetWorkspaceByWorkspaceAppID :one SELECT - id, created_at, updated_at, owner_id, organization_id, template_id, deleted, name, autostart_schedule, ttl, last_used_at, dormant_at, deleting_at, automatic_updates, favorite, next_start_at, owner_avatar_url, owner_username, owner_name, organization_name, organization_display_name, organization_icon, organization_description, template_name, template_display_name, template_icon, template_description + id, created_at, updated_at, owner_id, organization_id, template_id, deleted, name, autostart_schedule, ttl, last_used_at, dormant_at, deleting_at, automatic_updates, favorite, next_start_at, group_acl, user_acl, owner_avatar_url, owner_username, owner_name, organization_name, organization_display_name, organization_icon, organization_description, template_name, template_display_name, template_icon, template_description FROM workspaces_expanded as workspaces WHERE @@ -19785,6 +20417,8 @@ func (q *sqlQuerier) GetWorkspaceByWorkspaceAppID(ctx context.Context, workspace &i.AutomaticUpdates, &i.Favorite, &i.NextStartAt, + &i.GroupACL, + &i.UserACL, &i.OwnerAvatarUrl, &i.OwnerUsername, &i.OwnerName, @@ -19845,7 +20479,7 @@ SELECT ), filtered_workspaces AS ( SELECT - workspaces.id, workspaces.created_at, workspaces.updated_at, workspaces.owner_id, workspaces.organization_id, workspaces.template_id, workspaces.deleted, workspaces.name, workspaces.autostart_schedule, workspaces.ttl, workspaces.last_used_at, workspaces.dormant_at, workspaces.deleting_at, workspaces.automatic_updates, workspaces.favorite, workspaces.next_start_at, workspaces.owner_avatar_url, workspaces.owner_username, workspaces.owner_name, workspaces.organization_name, workspaces.organization_display_name, workspaces.organization_icon, workspaces.organization_description, workspaces.template_name, workspaces.template_display_name, workspaces.template_icon, workspaces.template_description, + workspaces.id, workspaces.created_at, workspaces.updated_at, workspaces.owner_id, workspaces.organization_id, workspaces.template_id, workspaces.deleted, workspaces.name, workspaces.autostart_schedule, workspaces.ttl, workspaces.last_used_at, workspaces.dormant_at, workspaces.deleting_at, workspaces.automatic_updates, workspaces.favorite, workspaces.next_start_at, workspaces.group_acl, workspaces.user_acl, workspaces.owner_avatar_url, workspaces.owner_username, workspaces.owner_name, workspaces.organization_name, workspaces.organization_display_name, workspaces.organization_icon, workspaces.organization_description, workspaces.template_name, workspaces.template_display_name, workspaces.template_icon, workspaces.template_description, latest_build.template_version_id, latest_build.template_version_name, latest_build.completed_at as latest_build_completed_at, @@ -19853,7 +20487,8 @@ SELECT latest_build.error as latest_build_error, latest_build.transition as latest_build_transition, latest_build.job_status as latest_build_status, - latest_build.has_ai_task as latest_build_has_ai_task + latest_build.has_ai_task as latest_build_has_ai_task, + latest_build.has_external_agent as latest_build_has_external_agent FROM workspaces_expanded as workspaces JOIN @@ -19866,6 +20501,7 @@ LEFT JOIN LATERAL ( workspace_builds.transition, workspace_builds.template_version_id, workspace_builds.has_ai_task, + workspace_builds.has_external_agent, template_versions.name AS template_version_name, provisioner_jobs.id AS provisioner_job_id, provisioner_jobs.started_at, @@ -19893,7 +20529,7 @@ LEFT JOIN LATERAL ( ) latest_build ON TRUE LEFT JOIN LATERAL ( SELECT - id, created_at, updated_at, organization_id, deleted, name, provisioner, active_version_id, description, default_ttl, created_by, icon, user_acl, group_acl, display_name, allow_user_cancel_workspace_jobs, allow_user_autostart, allow_user_autostop, failure_ttl, time_til_dormant, time_til_dormant_autodelete, autostop_requirement_days_of_week, autostop_requirement_weeks, autostart_block_days_of_week, require_active_version, deprecated, activity_bump, max_port_sharing_level, use_classic_parameter_flow + id, created_at, updated_at, organization_id, deleted, name, provisioner, active_version_id, description, default_ttl, created_by, icon, user_acl, group_acl, display_name, allow_user_cancel_workspace_jobs, allow_user_autostart, allow_user_autostop, failure_ttl, time_til_dormant, time_til_dormant_autodelete, autostop_requirement_days_of_week, autostop_requirement_weeks, autostart_block_days_of_week, require_active_version, deprecated, activity_bump, max_port_sharing_level, use_classic_parameter_flow, cors_behavior FROM templates WHERE @@ -20106,16 +20742,22 @@ WHERE )) = ($19 :: boolean) ELSE true END + -- Filter by has_external_agent in latest build + AND CASE + WHEN $20 :: boolean IS NOT NULL THEN + latest_build.has_external_agent = $20 :: boolean + ELSE true + END -- Authorize Filter clause will be injected below in GetAuthorizedWorkspaces -- @authorize_filter ), filtered_workspaces_order AS ( SELECT - fw.id, fw.created_at, fw.updated_at, fw.owner_id, fw.organization_id, fw.template_id, fw.deleted, fw.name, fw.autostart_schedule, fw.ttl, fw.last_used_at, fw.dormant_at, fw.deleting_at, fw.automatic_updates, fw.favorite, fw.next_start_at, fw.owner_avatar_url, fw.owner_username, fw.owner_name, fw.organization_name, fw.organization_display_name, fw.organization_icon, fw.organization_description, fw.template_name, fw.template_display_name, fw.template_icon, fw.template_description, fw.template_version_id, fw.template_version_name, fw.latest_build_completed_at, fw.latest_build_canceled_at, fw.latest_build_error, fw.latest_build_transition, fw.latest_build_status, fw.latest_build_has_ai_task + fw.id, fw.created_at, fw.updated_at, fw.owner_id, fw.organization_id, fw.template_id, fw.deleted, fw.name, fw.autostart_schedule, fw.ttl, fw.last_used_at, fw.dormant_at, fw.deleting_at, fw.automatic_updates, fw.favorite, fw.next_start_at, fw.group_acl, fw.user_acl, fw.owner_avatar_url, fw.owner_username, fw.owner_name, fw.organization_name, fw.organization_display_name, fw.organization_icon, fw.organization_description, fw.template_name, fw.template_display_name, fw.template_icon, fw.template_description, fw.template_version_id, fw.template_version_name, fw.latest_build_completed_at, fw.latest_build_canceled_at, fw.latest_build_error, fw.latest_build_transition, fw.latest_build_status, fw.latest_build_has_ai_task, fw.latest_build_has_external_agent FROM filtered_workspaces fw ORDER BY -- To ensure that 'favorite' workspaces show up first in the list only for their owner. - CASE WHEN owner_id = $20 AND favorite THEN 0 ELSE 1 END ASC, + CASE WHEN owner_id = $21 AND favorite THEN 0 ELSE 1 END ASC, (latest_build_completed_at IS NOT NULL AND latest_build_canceled_at IS NULL AND latest_build_error IS NULL AND @@ -20124,14 +20766,14 @@ WHERE LOWER(name) ASC LIMIT CASE - WHEN $22 :: integer > 0 THEN - $22 + WHEN $23 :: integer > 0 THEN + $23 END OFFSET - $21 + $22 ), filtered_workspaces_order_with_summary AS ( SELECT - fwo.id, fwo.created_at, fwo.updated_at, fwo.owner_id, fwo.organization_id, fwo.template_id, fwo.deleted, fwo.name, fwo.autostart_schedule, fwo.ttl, fwo.last_used_at, fwo.dormant_at, fwo.deleting_at, fwo.automatic_updates, fwo.favorite, fwo.next_start_at, fwo.owner_avatar_url, fwo.owner_username, fwo.owner_name, fwo.organization_name, fwo.organization_display_name, fwo.organization_icon, fwo.organization_description, fwo.template_name, fwo.template_display_name, fwo.template_icon, fwo.template_description, fwo.template_version_id, fwo.template_version_name, fwo.latest_build_completed_at, fwo.latest_build_canceled_at, fwo.latest_build_error, fwo.latest_build_transition, fwo.latest_build_status, fwo.latest_build_has_ai_task + fwo.id, fwo.created_at, fwo.updated_at, fwo.owner_id, fwo.organization_id, fwo.template_id, fwo.deleted, fwo.name, fwo.autostart_schedule, fwo.ttl, fwo.last_used_at, fwo.dormant_at, fwo.deleting_at, fwo.automatic_updates, fwo.favorite, fwo.next_start_at, fwo.group_acl, fwo.user_acl, fwo.owner_avatar_url, fwo.owner_username, fwo.owner_name, fwo.organization_name, fwo.organization_display_name, fwo.organization_icon, fwo.organization_description, fwo.template_name, fwo.template_display_name, fwo.template_icon, fwo.template_description, fwo.template_version_id, fwo.template_version_name, fwo.latest_build_completed_at, fwo.latest_build_canceled_at, fwo.latest_build_error, fwo.latest_build_transition, fwo.latest_build_status, fwo.latest_build_has_ai_task, fwo.latest_build_has_external_agent FROM filtered_workspaces_order fwo -- Return a technical summary row with total count of workspaces. @@ -20154,6 +20796,8 @@ WHERE 'never'::automatic_updates, -- automatic_updates false, -- favorite '0001-01-01 00:00:00+00'::timestamptz, -- next_start_at + '{}'::jsonb, -- group_acl + '{}'::jsonb, -- user_acl '', -- owner_avatar_url '', -- owner_username '', -- owner_name @@ -20173,9 +20817,10 @@ WHERE '', -- latest_build_error 'start'::workspace_transition, -- latest_build_transition 'unknown'::provisioner_job_status, -- latest_build_status - false -- latest_build_has_ai_task + false, -- latest_build_has_ai_task + false -- latest_build_has_external_agent WHERE - $23 :: boolean = true + $24 :: boolean = true ), total_count AS ( SELECT count(*) AS count @@ -20183,7 +20828,7 @@ WHERE filtered_workspaces ) SELECT - fwos.id, fwos.created_at, fwos.updated_at, fwos.owner_id, fwos.organization_id, fwos.template_id, fwos.deleted, fwos.name, fwos.autostart_schedule, fwos.ttl, fwos.last_used_at, fwos.dormant_at, fwos.deleting_at, fwos.automatic_updates, fwos.favorite, fwos.next_start_at, fwos.owner_avatar_url, fwos.owner_username, fwos.owner_name, fwos.organization_name, fwos.organization_display_name, fwos.organization_icon, fwos.organization_description, fwos.template_name, fwos.template_display_name, fwos.template_icon, fwos.template_description, fwos.template_version_id, fwos.template_version_name, fwos.latest_build_completed_at, fwos.latest_build_canceled_at, fwos.latest_build_error, fwos.latest_build_transition, fwos.latest_build_status, fwos.latest_build_has_ai_task, + fwos.id, fwos.created_at, fwos.updated_at, fwos.owner_id, fwos.organization_id, fwos.template_id, fwos.deleted, fwos.name, fwos.autostart_schedule, fwos.ttl, fwos.last_used_at, fwos.dormant_at, fwos.deleting_at, fwos.automatic_updates, fwos.favorite, fwos.next_start_at, fwos.group_acl, fwos.user_acl, fwos.owner_avatar_url, fwos.owner_username, fwos.owner_name, fwos.organization_name, fwos.organization_display_name, fwos.organization_icon, fwos.organization_description, fwos.template_name, fwos.template_display_name, fwos.template_icon, fwos.template_description, fwos.template_version_id, fwos.template_version_name, fwos.latest_build_completed_at, fwos.latest_build_canceled_at, fwos.latest_build_error, fwos.latest_build_transition, fwos.latest_build_status, fwos.latest_build_has_ai_task, fwos.latest_build_has_external_agent, tc.count FROM filtered_workspaces_order_with_summary fwos @@ -20211,6 +20856,7 @@ type GetWorkspacesParams struct { LastUsedAfter time.Time `db:"last_used_after" json:"last_used_after"` UsingActive sql.NullBool `db:"using_active" json:"using_active"` HasAITask sql.NullBool `db:"has_ai_task" json:"has_ai_task"` + HasExternalAgent sql.NullBool `db:"has_external_agent" json:"has_external_agent"` RequesterID uuid.UUID `db:"requester_id" json:"requester_id"` Offset int32 `db:"offset_" json:"offset_"` Limit int32 `db:"limit_" json:"limit_"` @@ -20218,42 +20864,45 @@ type GetWorkspacesParams struct { } type GetWorkspacesRow struct { - ID uuid.UUID `db:"id" json:"id"` - CreatedAt time.Time `db:"created_at" json:"created_at"` - UpdatedAt time.Time `db:"updated_at" json:"updated_at"` - OwnerID uuid.UUID `db:"owner_id" json:"owner_id"` - OrganizationID uuid.UUID `db:"organization_id" json:"organization_id"` - TemplateID uuid.UUID `db:"template_id" json:"template_id"` - Deleted bool `db:"deleted" json:"deleted"` - Name string `db:"name" json:"name"` - AutostartSchedule sql.NullString `db:"autostart_schedule" json:"autostart_schedule"` - Ttl sql.NullInt64 `db:"ttl" json:"ttl"` - LastUsedAt time.Time `db:"last_used_at" json:"last_used_at"` - DormantAt sql.NullTime `db:"dormant_at" json:"dormant_at"` - DeletingAt sql.NullTime `db:"deleting_at" json:"deleting_at"` - AutomaticUpdates AutomaticUpdates `db:"automatic_updates" json:"automatic_updates"` - Favorite bool `db:"favorite" json:"favorite"` - NextStartAt sql.NullTime `db:"next_start_at" json:"next_start_at"` - OwnerAvatarUrl string `db:"owner_avatar_url" json:"owner_avatar_url"` - OwnerUsername string `db:"owner_username" json:"owner_username"` - OwnerName string `db:"owner_name" json:"owner_name"` - OrganizationName string `db:"organization_name" json:"organization_name"` - OrganizationDisplayName string `db:"organization_display_name" json:"organization_display_name"` - OrganizationIcon string `db:"organization_icon" json:"organization_icon"` - OrganizationDescription string `db:"organization_description" json:"organization_description"` - TemplateName string `db:"template_name" json:"template_name"` - TemplateDisplayName string `db:"template_display_name" json:"template_display_name"` - TemplateIcon string `db:"template_icon" json:"template_icon"` - TemplateDescription string `db:"template_description" json:"template_description"` - TemplateVersionID uuid.UUID `db:"template_version_id" json:"template_version_id"` - TemplateVersionName sql.NullString `db:"template_version_name" json:"template_version_name"` - LatestBuildCompletedAt sql.NullTime `db:"latest_build_completed_at" json:"latest_build_completed_at"` - LatestBuildCanceledAt sql.NullTime `db:"latest_build_canceled_at" json:"latest_build_canceled_at"` - LatestBuildError sql.NullString `db:"latest_build_error" json:"latest_build_error"` - LatestBuildTransition WorkspaceTransition `db:"latest_build_transition" json:"latest_build_transition"` - LatestBuildStatus ProvisionerJobStatus `db:"latest_build_status" json:"latest_build_status"` - LatestBuildHasAITask sql.NullBool `db:"latest_build_has_ai_task" json:"latest_build_has_ai_task"` - Count int64 `db:"count" json:"count"` + ID uuid.UUID `db:"id" json:"id"` + CreatedAt time.Time `db:"created_at" json:"created_at"` + UpdatedAt time.Time `db:"updated_at" json:"updated_at"` + OwnerID uuid.UUID `db:"owner_id" json:"owner_id"` + OrganizationID uuid.UUID `db:"organization_id" json:"organization_id"` + TemplateID uuid.UUID `db:"template_id" json:"template_id"` + Deleted bool `db:"deleted" json:"deleted"` + Name string `db:"name" json:"name"` + AutostartSchedule sql.NullString `db:"autostart_schedule" json:"autostart_schedule"` + Ttl sql.NullInt64 `db:"ttl" json:"ttl"` + LastUsedAt time.Time `db:"last_used_at" json:"last_used_at"` + DormantAt sql.NullTime `db:"dormant_at" json:"dormant_at"` + DeletingAt sql.NullTime `db:"deleting_at" json:"deleting_at"` + AutomaticUpdates AutomaticUpdates `db:"automatic_updates" json:"automatic_updates"` + Favorite bool `db:"favorite" json:"favorite"` + NextStartAt sql.NullTime `db:"next_start_at" json:"next_start_at"` + GroupACL json.RawMessage `db:"group_acl" json:"group_acl"` + UserACL json.RawMessage `db:"user_acl" json:"user_acl"` + OwnerAvatarUrl string `db:"owner_avatar_url" json:"owner_avatar_url"` + OwnerUsername string `db:"owner_username" json:"owner_username"` + OwnerName string `db:"owner_name" json:"owner_name"` + OrganizationName string `db:"organization_name" json:"organization_name"` + OrganizationDisplayName string `db:"organization_display_name" json:"organization_display_name"` + OrganizationIcon string `db:"organization_icon" json:"organization_icon"` + OrganizationDescription string `db:"organization_description" json:"organization_description"` + TemplateName string `db:"template_name" json:"template_name"` + TemplateDisplayName string `db:"template_display_name" json:"template_display_name"` + TemplateIcon string `db:"template_icon" json:"template_icon"` + TemplateDescription string `db:"template_description" json:"template_description"` + TemplateVersionID uuid.UUID `db:"template_version_id" json:"template_version_id"` + TemplateVersionName sql.NullString `db:"template_version_name" json:"template_version_name"` + LatestBuildCompletedAt sql.NullTime `db:"latest_build_completed_at" json:"latest_build_completed_at"` + LatestBuildCanceledAt sql.NullTime `db:"latest_build_canceled_at" json:"latest_build_canceled_at"` + LatestBuildError sql.NullString `db:"latest_build_error" json:"latest_build_error"` + LatestBuildTransition WorkspaceTransition `db:"latest_build_transition" json:"latest_build_transition"` + LatestBuildStatus ProvisionerJobStatus `db:"latest_build_status" json:"latest_build_status"` + LatestBuildHasAITask sql.NullBool `db:"latest_build_has_ai_task" json:"latest_build_has_ai_task"` + LatestBuildHasExternalAgent sql.NullBool `db:"latest_build_has_external_agent" json:"latest_build_has_external_agent"` + Count int64 `db:"count" json:"count"` } // build_params is used to filter by build parameters if present. @@ -20280,6 +20929,7 @@ func (q *sqlQuerier) GetWorkspaces(ctx context.Context, arg GetWorkspacesParams) arg.LastUsedAfter, arg.UsingActive, arg.HasAITask, + arg.HasExternalAgent, arg.RequesterID, arg.Offset, arg.Limit, @@ -20309,6 +20959,8 @@ func (q *sqlQuerier) GetWorkspaces(ctx context.Context, arg GetWorkspacesParams) &i.AutomaticUpdates, &i.Favorite, &i.NextStartAt, + &i.GroupACL, + &i.UserACL, &i.OwnerAvatarUrl, &i.OwnerUsername, &i.OwnerName, @@ -20328,6 +20980,7 @@ func (q *sqlQuerier) GetWorkspaces(ctx context.Context, arg GetWorkspacesParams) &i.LatestBuildTransition, &i.LatestBuildStatus, &i.LatestBuildHasAITask, + &i.LatestBuildHasExternalAgent, &i.Count, ); err != nil { return nil, err @@ -20423,7 +21076,7 @@ func (q *sqlQuerier) GetWorkspacesAndAgentsByOwnerID(ctx context.Context, ownerI } const getWorkspacesByTemplateID = `-- name: GetWorkspacesByTemplateID :many -SELECT id, created_at, updated_at, owner_id, organization_id, template_id, deleted, name, autostart_schedule, ttl, last_used_at, dormant_at, deleting_at, automatic_updates, favorite, next_start_at FROM workspaces WHERE template_id = $1 AND deleted = false +SELECT id, created_at, updated_at, owner_id, organization_id, template_id, deleted, name, autostart_schedule, ttl, last_used_at, dormant_at, deleting_at, automatic_updates, favorite, next_start_at, group_acl, user_acl FROM workspaces WHERE template_id = $1 AND deleted = false ` func (q *sqlQuerier) GetWorkspacesByTemplateID(ctx context.Context, templateID uuid.UUID) ([]WorkspaceTable, error) { @@ -20452,6 +21105,8 @@ func (q *sqlQuerier) GetWorkspacesByTemplateID(ctx context.Context, templateID u &i.AutomaticUpdates, &i.Favorite, &i.NextStartAt, + &i.GroupACL, + &i.UserACL, ); err != nil { return nil, err } @@ -20639,7 +21294,7 @@ INSERT INTO next_start_at ) VALUES - ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12) RETURNING id, created_at, updated_at, owner_id, organization_id, template_id, deleted, name, autostart_schedule, ttl, last_used_at, dormant_at, deleting_at, automatic_updates, favorite, next_start_at + ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12) RETURNING id, created_at, updated_at, owner_id, organization_id, template_id, deleted, name, autostart_schedule, ttl, last_used_at, dormant_at, deleting_at, automatic_updates, favorite, next_start_at, group_acl, user_acl ` type InsertWorkspaceParams struct { @@ -20690,6 +21345,8 @@ func (q *sqlQuerier) InsertWorkspace(ctx context.Context, arg InsertWorkspacePar &i.AutomaticUpdates, &i.Favorite, &i.NextStartAt, + &i.GroupACL, + &i.UserACL, ) return i, err } @@ -20729,7 +21386,7 @@ SET WHERE id = $1 AND deleted = false -RETURNING id, created_at, updated_at, owner_id, organization_id, template_id, deleted, name, autostart_schedule, ttl, last_used_at, dormant_at, deleting_at, automatic_updates, favorite, next_start_at +RETURNING id, created_at, updated_at, owner_id, organization_id, template_id, deleted, name, autostart_schedule, ttl, last_used_at, dormant_at, deleting_at, automatic_updates, favorite, next_start_at, group_acl, user_acl ` type UpdateWorkspaceParams struct { @@ -20757,10 +21414,33 @@ func (q *sqlQuerier) UpdateWorkspace(ctx context.Context, arg UpdateWorkspacePar &i.AutomaticUpdates, &i.Favorite, &i.NextStartAt, + &i.GroupACL, + &i.UserACL, ) return i, err } +const updateWorkspaceACLByID = `-- name: UpdateWorkspaceACLByID :exec +UPDATE + workspaces +SET + group_acl = $1, + user_acl = $2 +WHERE + id = $3 +` + +type UpdateWorkspaceACLByIDParams struct { + GroupACL WorkspaceACL `db:"group_acl" json:"group_acl"` + UserACL WorkspaceACL `db:"user_acl" json:"user_acl"` + ID uuid.UUID `db:"id" json:"id"` +} + +func (q *sqlQuerier) UpdateWorkspaceACLByID(ctx context.Context, arg UpdateWorkspaceACLByIDParams) error { + _, err := q.db.ExecContext(ctx, updateWorkspaceACLByID, arg.GroupACL, arg.UserACL, arg.ID) + return err +} + const updateWorkspaceAutomaticUpdates = `-- name: UpdateWorkspaceAutomaticUpdates :exec UPDATE workspaces @@ -20788,6 +21468,10 @@ SET next_start_at = $3 WHERE id = $1 + -- Prebuilt workspaces (identified by having the prebuilds system user as owner_id) + -- are managed by the reconciliation loop, not the lifecycle executor which handles + -- autostart_schedule and next_start_at + AND owner_id != 'c42fdf75-3097-471c-8c33-fb52454d81c0'::UUID ` type UpdateWorkspaceAutostartParams struct { @@ -20844,8 +21528,12 @@ FROM WHERE workspaces.id = $1 AND templates.id = workspaces.template_id + -- Prebuilt workspaces (identified by having the prebuilds system user as owner_id) + -- are managed by the reconciliation loop, not the lifecycle executor which handles + -- dormant_at and deleting_at + AND owner_id != 'c42fdf75-3097-471c-8c33-fb52454d81c0'::UUID RETURNING - workspaces.id, workspaces.created_at, workspaces.updated_at, workspaces.owner_id, workspaces.organization_id, workspaces.template_id, workspaces.deleted, workspaces.name, workspaces.autostart_schedule, workspaces.ttl, workspaces.last_used_at, workspaces.dormant_at, workspaces.deleting_at, workspaces.automatic_updates, workspaces.favorite, workspaces.next_start_at + workspaces.id, workspaces.created_at, workspaces.updated_at, workspaces.owner_id, workspaces.organization_id, workspaces.template_id, workspaces.deleted, workspaces.name, workspaces.autostart_schedule, workspaces.ttl, workspaces.last_used_at, workspaces.dormant_at, workspaces.deleting_at, workspaces.automatic_updates, workspaces.favorite, workspaces.next_start_at, workspaces.group_acl, workspaces.user_acl ` type UpdateWorkspaceDormantDeletingAtParams struct { @@ -20873,6 +21561,8 @@ func (q *sqlQuerier) UpdateWorkspaceDormantDeletingAt(ctx context.Context, arg U &i.AutomaticUpdates, &i.Favorite, &i.NextStartAt, + &i.GroupACL, + &i.UserACL, ) return i, err } @@ -20903,6 +21593,10 @@ SET next_start_at = $2 WHERE id = $1 + -- Prebuilt workspaces (identified by having the prebuilds system user as owner_id) + -- are managed by the reconciliation loop, not the lifecycle executor which handles + -- next_start_at + AND owner_id != 'c42fdf75-3097-471c-8c33-fb52454d81c0'::UUID ` type UpdateWorkspaceNextStartAtParams struct { @@ -20922,6 +21616,10 @@ SET ttl = $2 WHERE id = $1 + -- Prebuilt workspaces (identified by having the prebuilds system user as owner_id) + -- are managed by the reconciliation loop, not the lifecycle executor which handles + -- ttl + AND owner_id != 'c42fdf75-3097-471c-8c33-fb52454d81c0'::UUID ` type UpdateWorkspaceTTLParams struct { @@ -20939,15 +21637,18 @@ UPDATE workspaces SET deleting_at = CASE WHEN $1::bigint = 0 THEN NULL - WHEN $2::timestamptz > '0001-01-01 00:00:00+00'::timestamptz THEN ($2::timestamptz) + interval '1 milliseconds' * $1::bigint + WHEN $2::timestamptz > '0001-01-01 00:00:00+00'::timestamptz THEN ($2::timestamptz) + interval '1 milliseconds' * $1::bigint ELSE dormant_at + interval '1 milliseconds' * $1::bigint END, dormant_at = CASE WHEN $2::timestamptz > '0001-01-01 00:00:00+00'::timestamptz THEN $2::timestamptz ELSE dormant_at END WHERE template_id = $3 -AND - dormant_at IS NOT NULL -RETURNING id, created_at, updated_at, owner_id, organization_id, template_id, deleted, name, autostart_schedule, ttl, last_used_at, dormant_at, deleting_at, automatic_updates, favorite, next_start_at + AND dormant_at IS NOT NULL + -- Prebuilt workspaces (identified by having the prebuilds system user as owner_id) + -- should not have their dormant or deleting at set, as these are handled by the + -- prebuilds reconciliation loop. + AND workspaces.owner_id != 'c42fdf75-3097-471c-8c33-fb52454d81c0'::UUID +RETURNING id, created_at, updated_at, owner_id, organization_id, template_id, deleted, name, autostart_schedule, ttl, last_used_at, dormant_at, deleting_at, automatic_updates, favorite, next_start_at, group_acl, user_acl ` type UpdateWorkspacesDormantDeletingAtByTemplateIDParams struct { @@ -20982,6 +21683,8 @@ func (q *sqlQuerier) UpdateWorkspacesDormantDeletingAtByTemplateID(ctx context.C &i.AutomaticUpdates, &i.Favorite, &i.NextStartAt, + &i.GroupACL, + &i.UserACL, ); err != nil { return nil, err } @@ -20998,11 +21701,15 @@ func (q *sqlQuerier) UpdateWorkspacesDormantDeletingAtByTemplateID(ctx context.C const updateWorkspacesTTLByTemplateID = `-- name: UpdateWorkspacesTTLByTemplateID :exec UPDATE - workspaces + workspaces SET - ttl = $2 + ttl = $2 WHERE - template_id = $1 + template_id = $1 + -- Prebuilt workspaces (identified by having the prebuilds system user as owner_id) + -- should not have their TTL updated, as they are handled by the prebuilds + -- reconciliation loop. + AND workspaces.owner_id != 'c42fdf75-3097-471c-8c33-fb52454d81c0'::UUID ` type UpdateWorkspacesTTLByTemplateIDParams struct { diff --git a/coderd/database/queries/activitybump.sql b/coderd/database/queries/activitybump.sql index 09349d29e5d06..e367a93abf778 100644 --- a/coderd/database/queries/activitybump.sql +++ b/coderd/database/queries/activitybump.sql @@ -22,7 +22,7 @@ WITH latest AS ( -- be as if the workspace auto started at the given time and the -- original TTL was applied. -- - -- Sadly we can't define `activity_bump_interval` above since + -- Sadly we can't define 'activity_bump_interval' above since -- it won't be available for this CASE statement, so we have to -- copy the cast twice. WHEN NOW() + (templates.activity_bump / 1000 / 1000 / 1000 || ' seconds')::interval > @next_autostart :: timestamptz @@ -52,7 +52,11 @@ WITH latest AS ( ON workspaces.id = workspace_builds.workspace_id JOIN templates ON templates.id = workspaces.template_id - WHERE workspace_builds.workspace_id = @workspace_id::uuid + WHERE + workspace_builds.workspace_id = @workspace_id::uuid + -- Prebuilt workspaces (identified by having the prebuilds system user as owner_id) + -- are managed by the reconciliation loop and not subject to activity bumping + AND workspaces.owner_id != 'c42fdf75-3097-471c-8c33-fb52454d81c0'::UUID ORDER BY workspace_builds.build_number DESC LIMIT 1 ) diff --git a/coderd/database/queries/externalauth.sql b/coderd/database/queries/externalauth.sql index 4368ce56589f0..9ca5cf6f871ad 100644 --- a/coderd/database/queries/externalauth.sql +++ b/coderd/database/queries/externalauth.sql @@ -40,13 +40,20 @@ UPDATE external_auth_links SET oauth_refresh_token = $6, oauth_refresh_token_key_id = $7, oauth_expiry = $8, - oauth_extra = $9 + oauth_extra = $9, + -- Only 'UpdateExternalAuthLinkRefreshToken' supports updating the oauth_refresh_failure_reason. + -- Any updates to the external auth link, will be assumed to change the state and clear + -- any cached errors. + oauth_refresh_failure_reason = '' WHERE provider_id = $1 AND user_id = $2 RETURNING *; -- name: UpdateExternalAuthLinkRefreshToken :exec UPDATE external_auth_links SET + -- oauth_refresh_failure_reason can be set to cache the failure reason + -- for subsequent refresh attempts. + oauth_refresh_failure_reason = @oauth_refresh_failure_reason, oauth_refresh_token = @oauth_refresh_token, updated_at = @updated_at WHERE diff --git a/coderd/database/queries/groups.sql b/coderd/database/queries/groups.sql index 48a5ba5c79968..3413e5832e27d 100644 --- a/coderd/database/queries/groups.sql +++ b/coderd/database/queries/groups.sql @@ -8,6 +8,24 @@ WHERE LIMIT 1; +-- name: ValidateGroupIDs :one +WITH input AS ( + SELECT + unnest(@group_ids::uuid[]) AS id +) +SELECT + array_agg(input.id)::uuid[] as invalid_group_ids, + COUNT(*) = 0 as ok +FROM + -- Preserve rows where there is not a matching left (groups) row for each + -- right (input) row... + groups + RIGHT JOIN input ON groups.id = input.id +WHERE + -- ...so that we can retain exactly those rows where an input ID does not + -- match an existing group. + groups.id IS NULL; + -- name: GetGroupByOrgAndName :one SELECT * diff --git a/coderd/database/queries/prebuilds.sql b/coderd/database/queries/prebuilds.sql index 37bff9487928e..8654453554e8c 100644 --- a/coderd/database/queries/prebuilds.sql +++ b/coderd/database/queries/prebuilds.sql @@ -2,7 +2,20 @@ UPDATE workspaces w SET owner_id = @new_user_id::uuid, name = @new_name::text, - updated_at = NOW() + updated_at = @now::timestamptz, + -- Update autostart_schedule, next_start_at and ttl according to template and workspace-level + -- configurations, allowing the workspace to be managed by the lifecycle executor as expected. + autostart_schedule = @autostart_schedule, + next_start_at = @next_start_at, + ttl = @workspace_ttl, + -- Update last_used_at during claim to ensure the claimed workspace is treated as recently used. + -- This avoids unintended dormancy caused by prebuilds having stale usage timestamps. + last_used_at = @now::timestamptz, + -- Clear dormant and deletion timestamps as a safeguard to ensure a clean lifecycle state after claim. + -- These fields should not be set on prebuilds, but we defensively reset them here to prevent + -- accidental dormancy or deletion by the lifecycle executor. + dormant_at = NULL, + deleting_at = NULL WHERE w.id IN ( SELECT p.id FROM workspace_prebuilds p @@ -232,3 +245,30 @@ INNER JOIN organizations o ON o.id = w.organization_id WHERE NOT t.deleted AND wpb.build_number = 1 GROUP BY t.name, tvp.name, o.name ORDER BY t.name, tvp.name, o.name; + +-- name: FindMatchingPresetID :one +-- FindMatchingPresetID finds a preset ID that is the largest exact subset of the provided parameters. +-- It returns the preset ID if a match is found, or NULL if no match is found. +-- The query finds presets where all preset parameters are present in the provided parameters, +-- and returns the preset with the most parameters (largest subset). +WITH provided_params AS ( + SELECT + unnest(@parameter_names::text[]) AS name, + unnest(@parameter_values::text[]) AS value +), +preset_matches AS ( + SELECT + tvp.id AS template_version_preset_id, + COALESCE(COUNT(tvpp.name), 0) AS total_preset_params, + COALESCE(COUNT(pp.name), 0) AS matching_params + FROM template_version_presets tvp + LEFT JOIN template_version_preset_parameters tvpp ON tvpp.template_version_preset_id = tvp.id + LEFT JOIN provided_params pp ON pp.name = tvpp.name AND pp.value = tvpp.value + WHERE tvp.template_version_id = @template_version_id + GROUP BY tvp.id +) +SELECT pm.template_version_preset_id +FROM preset_matches pm +WHERE pm.total_preset_params = pm.matching_params -- All preset parameters must match +ORDER BY pm.total_preset_params DESC -- Return the preset with the most parameters +LIMIT 1; diff --git a/coderd/database/queries/presets.sql b/coderd/database/queries/presets.sql index d275e4744c729..e6edcb4c59c1f 100644 --- a/coderd/database/queries/presets.sql +++ b/coderd/database/queries/presets.sql @@ -7,7 +7,9 @@ INSERT INTO template_version_presets ( desired_instances, invalidate_after_secs, scheduling_timezone, - is_default + is_default, + description, + icon ) VALUES ( @id, @@ -17,7 +19,9 @@ VALUES ( @desired_instances, @invalidate_after_secs, @scheduling_timezone, - @is_default + @is_default, + @description, + @icon ) RETURNING *; -- name: InsertPresetParameters :many diff --git a/coderd/database/queries/provisionerdaemons.sql b/coderd/database/queries/provisionerdaemons.sql index 4f7c7a8b2200a..ad6c0948eb448 100644 --- a/coderd/database/queries/provisionerdaemons.sql +++ b/coderd/database/queries/provisionerdaemons.sql @@ -32,13 +32,13 @@ WHERE SELECT sqlc.embed(pd), CASE - WHEN pd.last_seen_at IS NULL OR pd.last_seen_at < (NOW() - (@stale_interval_ms::bigint || ' ms')::interval) - THEN 'offline' - ELSE CASE - WHEN current_job.id IS NOT NULL THEN 'busy' - ELSE 'idle' - END - END::provisioner_daemon_status AS status, + WHEN current_job.id IS NOT NULL THEN 'busy'::provisioner_daemon_status + WHEN (COALESCE(sqlc.narg('offline')::bool, false) = true + OR 'offline'::provisioner_daemon_status = ANY(@statuses::provisioner_daemon_status[])) + AND (pd.last_seen_at IS NULL OR pd.last_seen_at < (NOW() - (@stale_interval_ms::bigint || ' ms')::interval)) + THEN 'offline'::provisioner_daemon_status + ELSE 'idle'::provisioner_daemon_status + END AS status, pk.name AS key_name, -- NOTE(mafredri): sqlc.embed doesn't support nullable tables nor renaming them. current_job.id AS current_job_id, @@ -110,6 +110,38 @@ WHERE pd.organization_id = @organization_id::uuid AND (COALESCE(array_length(@ids::uuid[], 1), 0) = 0 OR pd.id = ANY(@ids::uuid[])) AND (@tags::tagset = 'null'::tagset OR provisioner_tagset_contains(pd.tags::tagset, @tags::tagset)) + -- Filter by max age if provided + AND ( + sqlc.narg('max_age_ms')::bigint IS NULL + OR pd.last_seen_at IS NULL + OR pd.last_seen_at >= (NOW() - (sqlc.narg('max_age_ms')::bigint || ' ms')::interval) + ) + AND ( + -- Always include online daemons + (pd.last_seen_at IS NOT NULL AND pd.last_seen_at >= (NOW() - (@stale_interval_ms::bigint || ' ms')::interval)) + -- Include offline daemons if offline param is true or 'offline' status is requested + OR ( + (pd.last_seen_at IS NULL OR pd.last_seen_at < (NOW() - (@stale_interval_ms::bigint || ' ms')::interval)) + AND ( + COALESCE(sqlc.narg('offline')::bool, false) = true + OR 'offline'::provisioner_daemon_status = ANY(@statuses::provisioner_daemon_status[]) + ) + ) + ) + AND ( + -- Filter daemons by any statuses if provided + COALESCE(array_length(@statuses::provisioner_daemon_status[], 1), 0) = 0 + OR (current_job.id IS NOT NULL AND 'busy'::provisioner_daemon_status = ANY(@statuses::provisioner_daemon_status[])) + OR (current_job.id IS NULL AND 'idle'::provisioner_daemon_status = ANY(@statuses::provisioner_daemon_status[])) + OR ( + 'offline'::provisioner_daemon_status = ANY(@statuses::provisioner_daemon_status[]) + AND (pd.last_seen_at IS NULL OR pd.last_seen_at < (NOW() - (@stale_interval_ms::bigint || ' ms')::interval)) + ) + OR ( + COALESCE(sqlc.narg('offline')::bool, false) = true + AND (pd.last_seen_at IS NULL OR pd.last_seen_at < (NOW() - (@stale_interval_ms::bigint || ' ms')::interval)) + ) + ) ORDER BY pd.created_at DESC LIMIT diff --git a/coderd/database/queries/provisionerjoblogs.sql b/coderd/database/queries/provisionerjoblogs.sql index b98cf471f0d1a..c0ef188bdd382 100644 --- a/coderd/database/queries/provisionerjoblogs.sql +++ b/coderd/database/queries/provisionerjoblogs.sql @@ -19,3 +19,19 @@ SELECT unnest(@level :: log_level [ ]) AS LEVEL, unnest(@stage :: VARCHAR(128) [ ]) AS stage, unnest(@output :: VARCHAR(1024) [ ]) AS output RETURNING *; + +-- name: UpdateProvisionerJobLogsOverflowed :exec +UPDATE + provisioner_jobs +SET + logs_overflowed = $2 +WHERE + id = $1; + +-- name: UpdateProvisionerJobLogsLength :exec +UPDATE + provisioner_jobs +SET + logs_length = logs_length + $2 +WHERE + id = $1; diff --git a/coderd/database/queries/provisionerjobs.sql b/coderd/database/queries/provisionerjobs.sql index fcf348e089def..3ba581646689e 100644 --- a/coderd/database/queries/provisionerjobs.sql +++ b/coderd/database/queries/provisionerjobs.sql @@ -247,10 +247,11 @@ INSERT INTO "type", "input", tags, - trace_metadata + trace_metadata, + logs_overflowed ) VALUES - ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12) RETURNING *; + ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13) RETURNING *; -- name: UpdateProvisionerJobByID :exec UPDATE diff --git a/coderd/database/queries/tailnet.sql b/coderd/database/queries/tailnet.sql index 07936e277bc52..614d718789d63 100644 --- a/coderd/database/queries/tailnet.sql +++ b/coderd/database/queries/tailnet.sql @@ -150,7 +150,7 @@ DO UPDATE SET RETURNING *; -- name: UpdateTailnetPeerStatusByCoordinator :exec -UPDATE +UPDATE tailnet_peers SET status = $2 @@ -205,15 +205,17 @@ FROM tailnet_tunnels WHERE tailnet_tunnels.dst_id = $1; -- name: GetTailnetTunnelPeerBindings :many -SELECT tailnet_tunnels.dst_id as peer_id, tailnet_peers.coordinator_id, tailnet_peers.updated_at, tailnet_peers.node, tailnet_peers.status -FROM tailnet_tunnels -INNER JOIN tailnet_peers ON tailnet_tunnels.dst_id = tailnet_peers.id -WHERE tailnet_tunnels.src_id = $1 -UNION -SELECT tailnet_tunnels.src_id as peer_id, tailnet_peers.coordinator_id, tailnet_peers.updated_at, tailnet_peers.node, tailnet_peers.status -FROM tailnet_tunnels -INNER JOIN tailnet_peers ON tailnet_tunnels.src_id = tailnet_peers.id -WHERE tailnet_tunnels.dst_id = $1; +SELECT id AS peer_id, coordinator_id, updated_at, node, status +FROM tailnet_peers +WHERE id IN ( + SELECT dst_id as peer_id + FROM tailnet_tunnels + WHERE tailnet_tunnels.src_id = $1 + UNION + SELECT src_id as peer_id + FROM tailnet_tunnels + WHERE tailnet_tunnels.dst_id = $1 +); -- For PG Coordinator HTMLDebug diff --git a/coderd/database/queries/templates.sql b/coderd/database/queries/templates.sql index d10d09daaf6ea..4bb70c6580503 100644 --- a/coderd/database/queries/templates.sql +++ b/coderd/database/queries/templates.sql @@ -59,6 +59,25 @@ WHERE tv.has_ai_task = sqlc.narg('has_ai_task') :: boolean ELSE true END + -- Filter by author_id + AND CASE + WHEN @author_id :: uuid != '00000000-0000-0000-0000-000000000000'::uuid THEN + t.created_by = @author_id + ELSE true + END + -- Filter by author_username + AND CASE + WHEN @author_username :: text != '' THEN + t.created_by = (SELECT id FROM users WHERE lower(users.username) = lower(@author_username) AND deleted = false) + ELSE true + END + + -- Filter by has_external_agent in latest version + AND CASE + WHEN sqlc.narg('has_external_agent') :: boolean IS NOT NULL THEN + tv.has_external_agent = sqlc.narg('has_external_agent') :: boolean + ELSE true + END -- Authorize Filter clause will be injected below in GetAuthorizedTemplates -- @authorize_filter ORDER BY (t.name, t.id) ASC @@ -99,10 +118,11 @@ INSERT INTO display_name, allow_user_cancel_workspace_jobs, max_port_sharing_level, - use_classic_parameter_flow + use_classic_parameter_flow, + cors_behavior ) VALUES - ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13, $14, $15, $16); + ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13, $14, $15, $16, $17); -- name: UpdateTemplateActiveVersionByID :exec UPDATE @@ -134,7 +154,8 @@ SET allow_user_cancel_workspace_jobs = $7, group_acl = $8, max_port_sharing_level = $9, - use_classic_parameter_flow = $10 + use_classic_parameter_flow = $10, + cors_behavior = $11 WHERE id = $1 ; diff --git a/coderd/database/queries/templateversions.sql b/coderd/database/queries/templateversions.sql index 4a37413d2f439..128b2e5f582da 100644 --- a/coderd/database/queries/templateversions.sql +++ b/coderd/database/queries/templateversions.sql @@ -122,15 +122,6 @@ SET WHERE job_id = $1; --- name: UpdateTemplateVersionAITaskByJobID :exec -UPDATE - template_versions -SET - has_ai_task = $2, - updated_at = $3 -WHERE - job_id = $1; - -- name: GetPreviousTemplateVersion :one SELECT * @@ -235,6 +226,19 @@ WHERE template_versions.id IN (archived_versions.id) RETURNING template_versions.id; --- name: HasTemplateVersionsWithAITask :one --- Determines if the template versions table has any rows with has_ai_task = TRUE. -SELECT EXISTS (SELECT 1 FROM template_versions WHERE has_ai_task = TRUE); +-- name: GetTemplateVersionHasAITask :one +SELECT EXISTS ( + SELECT 1 + FROM template_versions + WHERE id = $1 AND has_ai_task = TRUE +); + +-- name: UpdateTemplateVersionFlagsByJobID :exec +UPDATE + template_versions +SET + has_ai_task = $2, + has_external_agent = $3, + updated_at = $4 +WHERE + job_id = $1; diff --git a/coderd/database/queries/usageevents.sql b/coderd/database/queries/usageevents.sql new file mode 100644 index 0000000000000..85b53e04fd658 --- /dev/null +++ b/coderd/database/queries/usageevents.sql @@ -0,0 +1,86 @@ +-- name: InsertUsageEvent :exec +-- Duplicate events are ignored intentionally to allow for multiple replicas to +-- publish heartbeat events. +INSERT INTO + usage_events ( + id, + event_type, + event_data, + created_at, + publish_started_at, + published_at, + failure_message + ) +VALUES + (@id, @event_type, @event_data, @created_at, NULL, NULL, NULL) +ON CONFLICT (id) DO NOTHING; + +-- name: SelectUsageEventsForPublishing :many +WITH usage_events AS ( + UPDATE + usage_events + SET + publish_started_at = @now::timestamptz + WHERE + id IN ( + SELECT + potential_event.id + FROM + usage_events potential_event + WHERE + -- Do not publish events that have already been published or + -- have permanently failed to publish. + potential_event.published_at IS NULL + -- Do not publish events that are already being published by + -- another replica. + AND ( + potential_event.publish_started_at IS NULL + -- If the event has publish_started_at set, it must be older + -- than an hour ago. This is so we can retry publishing + -- events where the replica exited or couldn't update the + -- row. + -- The parenthesis around @now::timestamptz are necessary to + -- avoid sqlc from generating an extra argument. + OR potential_event.publish_started_at < (@now::timestamptz) - INTERVAL '1 hour' + ) + -- Do not publish events older than 30 days. Tallyman will + -- always permanently reject these events anyways. This is to + -- avoid duplicate events being billed to customers, as + -- Metronome will only deduplicate events within 34 days. + -- Also, the same parenthesis thing here as above. + AND potential_event.created_at > (@now::timestamptz) - INTERVAL '30 days' + ORDER BY potential_event.created_at ASC + FOR UPDATE SKIP LOCKED + LIMIT 100 + ) + RETURNING * +) +SELECT * +-- Note that this selects from the CTE, not the original table. The CTE is named +-- the same as the original table to trick sqlc into reusing the existing struct +-- for the table. +FROM usage_events +-- The CTE and the reorder is required because UPDATE doesn't guarantee order. +ORDER BY created_at ASC; + +-- name: UpdateUsageEventsPostPublish :exec +UPDATE + usage_events +SET + publish_started_at = NULL, + published_at = CASE WHEN input.set_published_at THEN @now::timestamptz ELSE NULL END, + failure_message = NULLIF(input.failure_message, '') +FROM ( + SELECT + UNNEST(@ids::text[]) AS id, + UNNEST(@failure_messages::text[]) AS failure_message, + UNNEST(@set_published_ats::boolean[]) AS set_published_at +) input +WHERE + input.id = usage_events.id + -- If the number of ids, failure messages, and set published ats are not the + -- same, do not do anything. Unfortunately you can't really throw from a + -- query without writing a function or doing some jank like dividing by + -- zero, so this is the best we can do. + AND cardinality(@ids::text[]) = cardinality(@failure_messages::text[]) + AND cardinality(@ids::text[]) = cardinality(@set_published_ats::boolean[]); diff --git a/coderd/database/queries/user_secrets.sql b/coderd/database/queries/user_secrets.sql new file mode 100644 index 0000000000000..271b97c9bb13c --- /dev/null +++ b/coderd/database/queries/user_secrets.sql @@ -0,0 +1,40 @@ +-- name: GetUserSecretByUserIDAndName :one +SELECT * FROM user_secrets +WHERE user_id = $1 AND name = $2; + +-- name: GetUserSecret :one +SELECT * FROM user_secrets +WHERE id = $1; + +-- name: ListUserSecrets :many +SELECT * FROM user_secrets +WHERE user_id = $1 +ORDER BY name ASC; + +-- name: CreateUserSecret :one +INSERT INTO user_secrets ( + id, + user_id, + name, + description, + value, + env_name, + file_path +) VALUES ( + $1, $2, $3, $4, $5, $6, $7 +) RETURNING *; + +-- name: UpdateUserSecret :one +UPDATE user_secrets +SET + description = $2, + value = $3, + env_name = $4, + file_path = $5, + updated_at = CURRENT_TIMESTAMP +WHERE id = $1 +RETURNING *; + +-- name: DeleteUserSecret :exec +DELETE FROM user_secrets +WHERE id = $1; diff --git a/coderd/database/queries/users.sql b/coderd/database/queries/users.sql index eece2f96512ea..0b6e52d6bc918 100644 --- a/coderd/database/queries/users.sql +++ b/coderd/database/queries/users.sql @@ -25,6 +25,26 @@ WHERE LIMIT 1; +-- name: ValidateUserIDs :one +WITH input AS ( + SELECT + unnest(@user_ids::uuid[]) AS id +) +SELECT + array_agg(input.id)::uuid[] as invalid_user_ids, + COUNT(*) = 0 as ok +FROM + -- Preserve rows where there is not a matching left (users) row for each + -- right (input) row... + users + RIGHT JOIN input ON users.id = input.id +WHERE + -- ...so that we can retain exactly those rows where an input ID does not + -- match an existing user... + users.id IS NULL OR + -- ...or that only matches a user that was deleted. + users.deleted = true; + -- name: GetUsersByIDs :many -- This shouldn't check for deleted, because it's frequently used -- to look up references to actions. eg. a user could build a workspace diff --git a/coderd/database/queries/workspacebuilds.sql b/coderd/database/queries/workspacebuilds.sql index be76b6642df1f..6c020f5a97f50 100644 --- a/coderd/database/queries/workspacebuilds.sql +++ b/coderd/database/queries/workspacebuilds.sql @@ -91,20 +91,6 @@ JOIN workspace_build_with_user AS wb ON m.workspace_id = wb.workspace_id AND m.max_build_number = wb.build_number; --- name: GetLatestWorkspaceBuilds :many -SELECT wb.* -FROM ( - SELECT - workspace_id, MAX(build_number) as max_build_number - FROM - workspace_build_with_user AS workspace_builds - GROUP BY - workspace_id -) m -JOIN - workspace_build_with_user AS wb -ON m.workspace_id = wb.workspace_id AND m.max_build_number = wb.build_number; - -- name: InsertWorkspaceBuild :exec INSERT INTO workspace_builds ( @@ -141,7 +127,15 @@ SET deadline = @deadline::timestamptz, max_deadline = @max_deadline::timestamptz, updated_at = @updated_at::timestamptz -WHERE id = @id::uuid; +FROM + workspaces +WHERE + workspace_builds.id = @id::uuid + AND workspace_builds.workspace_id = workspaces.id + -- Prebuilt workspaces (identified by having the prebuilds system user as owner_id) + -- are managed by the reconciliation loop, not the lifecycle executor which handles + -- deadline and max_deadline + AND workspaces.owner_id != 'c42fdf75-3097-471c-8c33-fb52454d81c0'::UUID; -- name: UpdateWorkspaceBuildProvisionerStateByID :exec UPDATE @@ -151,15 +145,6 @@ SET updated_at = @updated_at::timestamptz WHERE id = @id::uuid; --- name: UpdateWorkspaceBuildAITaskByID :exec -UPDATE - workspace_builds -SET - has_ai_task = @has_ai_task, - ai_task_sidebar_app_id = @sidebar_app_id, - updated_at = @updated_at::timestamptz -WHERE id = @id::uuid; - -- name: GetActiveWorkspaceBuildsByTemplateID :many SELECT wb.* FROM ( @@ -253,3 +238,13 @@ WHERE AND pj.job_status = 'failed' ORDER BY tv.name ASC, wb.build_number DESC; + +-- name: UpdateWorkspaceBuildFlagsByID :exec +UPDATE + workspace_builds +SET + has_ai_task = @has_ai_task, + ai_task_sidebar_app_id = @sidebar_app_id, + has_external_agent = @has_external_agent, + updated_at = @updated_at::timestamptz +WHERE id = @id::uuid; diff --git a/coderd/database/queries/workspaces.sql b/coderd/database/queries/workspaces.sql index f166d16f742cd..a3deda6863e85 100644 --- a/coderd/database/queries/workspaces.sql +++ b/coderd/database/queries/workspaces.sql @@ -117,7 +117,8 @@ SELECT latest_build.error as latest_build_error, latest_build.transition as latest_build_transition, latest_build.job_status as latest_build_status, - latest_build.has_ai_task as latest_build_has_ai_task + latest_build.has_ai_task as latest_build_has_ai_task, + latest_build.has_external_agent as latest_build_has_external_agent FROM workspaces_expanded as workspaces JOIN @@ -130,6 +131,7 @@ LEFT JOIN LATERAL ( workspace_builds.transition, workspace_builds.template_version_id, workspace_builds.has_ai_task, + workspace_builds.has_external_agent, template_versions.name AS template_version_name, provisioner_jobs.id AS provisioner_job_id, provisioner_jobs.started_at, @@ -370,6 +372,12 @@ WHERE )) = (sqlc.narg('has_ai_task') :: boolean) ELSE true END + -- Filter by has_external_agent in latest build + AND CASE + WHEN sqlc.narg('has_external_agent') :: boolean IS NOT NULL THEN + latest_build.has_external_agent = sqlc.narg('has_external_agent') :: boolean + ELSE true + END -- Authorize Filter clause will be injected below in GetAuthorizedWorkspaces -- @authorize_filter ), filtered_workspaces_order AS ( @@ -418,6 +426,8 @@ WHERE 'never'::automatic_updates, -- automatic_updates false, -- favorite '0001-01-01 00:00:00+00'::timestamptz, -- next_start_at + '{}'::jsonb, -- group_acl + '{}'::jsonb, -- user_acl '', -- owner_avatar_url '', -- owner_username '', -- owner_name @@ -437,7 +447,8 @@ WHERE '', -- latest_build_error 'start'::workspace_transition, -- latest_build_transition 'unknown'::provisioner_job_status, -- latest_build_status - false -- latest_build_has_ai_task + false, -- latest_build_has_ai_task + false -- latest_build_has_external_agent WHERE @with_summary :: boolean = true ), total_count AS ( @@ -516,7 +527,11 @@ SET autostart_schedule = $2, next_start_at = $3 WHERE - id = $1; + id = $1 + -- Prebuilt workspaces (identified by having the prebuilds system user as owner_id) + -- are managed by the reconciliation loop, not the lifecycle executor which handles + -- autostart_schedule and next_start_at + AND owner_id != 'c42fdf75-3097-471c-8c33-fb52454d81c0'::UUID; -- name: UpdateWorkspaceNextStartAt :exec UPDATE @@ -524,7 +539,11 @@ UPDATE SET next_start_at = $2 WHERE - id = $1; + id = $1 + -- Prebuilt workspaces (identified by having the prebuilds system user as owner_id) + -- are managed by the reconciliation loop, not the lifecycle executor which handles + -- next_start_at + AND owner_id != 'c42fdf75-3097-471c-8c33-fb52454d81c0'::UUID; -- name: BatchUpdateWorkspaceNextStartAt :exec UPDATE @@ -548,15 +567,23 @@ UPDATE SET ttl = $2 WHERE - id = $1; + id = $1 + -- Prebuilt workspaces (identified by having the prebuilds system user as owner_id) + -- are managed by the reconciliation loop, not the lifecycle executor which handles + -- ttl + AND owner_id != 'c42fdf75-3097-471c-8c33-fb52454d81c0'::UUID; -- name: UpdateWorkspacesTTLByTemplateID :exec UPDATE - workspaces + workspaces SET - ttl = $2 + ttl = $2 WHERE - template_id = $1; + template_id = $1 + -- Prebuilt workspaces (identified by having the prebuilds system user as owner_id) + -- should not have their TTL updated, as they are handled by the prebuilds + -- reconciliation loop. + AND workspaces.owner_id != 'c42fdf75-3097-471c-8c33-fb52454d81c0'::UUID; -- name: UpdateWorkspaceLastUsedAt :exec UPDATE @@ -789,6 +816,10 @@ FROM WHERE workspaces.id = $1 AND templates.id = workspaces.template_id + -- Prebuilt workspaces (identified by having the prebuilds system user as owner_id) + -- are managed by the reconciliation loop, not the lifecycle executor which handles + -- dormant_at and deleting_at + AND owner_id != 'c42fdf75-3097-471c-8c33-fb52454d81c0'::UUID RETURNING workspaces.*; @@ -797,14 +828,17 @@ UPDATE workspaces SET deleting_at = CASE WHEN @time_til_dormant_autodelete_ms::bigint = 0 THEN NULL - WHEN @dormant_at::timestamptz > '0001-01-01 00:00:00+00'::timestamptz THEN (@dormant_at::timestamptz) + interval '1 milliseconds' * @time_til_dormant_autodelete_ms::bigint + WHEN @dormant_at::timestamptz > '0001-01-01 00:00:00+00'::timestamptz THEN (@dormant_at::timestamptz) + interval '1 milliseconds' * @time_til_dormant_autodelete_ms::bigint ELSE dormant_at + interval '1 milliseconds' * @time_til_dormant_autodelete_ms::bigint END, dormant_at = CASE WHEN @dormant_at::timestamptz > '0001-01-01 00:00:00+00'::timestamptz THEN @dormant_at::timestamptz ELSE dormant_at END WHERE template_id = @template_id -AND - dormant_at IS NOT NULL + AND dormant_at IS NOT NULL + -- Prebuilt workspaces (identified by having the prebuilds system user as owner_id) + -- should not have their dormant or deleting at set, as these are handled by the + -- prebuilds reconciliation loop. + AND workspaces.owner_id != 'c42fdf75-3097-471c-8c33-fb52454d81c0'::UUID RETURNING *; -- name: UpdateTemplateWorkspacesLastUsedAt :exec @@ -871,3 +905,12 @@ GROUP BY workspaces.id, workspaces.name, latest_build.job_status, latest_build.j -- name: GetWorkspacesByTemplateID :many SELECT * FROM workspaces WHERE template_id = $1 AND deleted = false; + +-- name: UpdateWorkspaceACLByID :exec +UPDATE + workspaces +SET + group_acl = @group_acl, + user_acl = @user_acl +WHERE + id = @id; diff --git a/coderd/database/sdk2db/sdk2db.go b/coderd/database/sdk2db/sdk2db.go new file mode 100644 index 0000000000000..02fe8578179c9 --- /dev/null +++ b/coderd/database/sdk2db/sdk2db.go @@ -0,0 +1,16 @@ +// Package sdk2db provides common conversion routines from codersdk types to database types +package sdk2db + +import ( + "github.com/coder/coder/v2/coderd/database" + "github.com/coder/coder/v2/coderd/database/db2sdk" + "github.com/coder/coder/v2/codersdk" +) + +func ProvisionerDaemonStatus(status codersdk.ProvisionerDaemonStatus) database.ProvisionerDaemonStatus { + return database.ProvisionerDaemonStatus(status) +} + +func ProvisionerDaemonStatuses(params []codersdk.ProvisionerDaemonStatus) []database.ProvisionerDaemonStatus { + return db2sdk.List(params, ProvisionerDaemonStatus) +} diff --git a/coderd/database/sdk2db/sdk2db_test.go b/coderd/database/sdk2db/sdk2db_test.go new file mode 100644 index 0000000000000..ff51dc0ffaaf4 --- /dev/null +++ b/coderd/database/sdk2db/sdk2db_test.go @@ -0,0 +1,36 @@ +package sdk2db_test + +import ( + "testing" + + "github.com/coder/coder/v2/coderd/database" + "github.com/coder/coder/v2/coderd/database/sdk2db" + "github.com/coder/coder/v2/codersdk" +) + +func TestProvisionerDaemonStatus(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + input codersdk.ProvisionerDaemonStatus + expect database.ProvisionerDaemonStatus + }{ + {"busy", codersdk.ProvisionerDaemonBusy, database.ProvisionerDaemonStatusBusy}, + {"offline", codersdk.ProvisionerDaemonOffline, database.ProvisionerDaemonStatusOffline}, + {"idle", codersdk.ProvisionerDaemonIdle, database.ProvisionerDaemonStatusIdle}, + } + + for _, tc := range tests { + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + got := sdk2db.ProvisionerDaemonStatus(tc.input) + if !got.Valid() { + t.Errorf("ProvisionerDaemonStatus(%v) returned invalid status", tc.input) + } + if got != tc.expect { + t.Errorf("ProvisionerDaemonStatus(%v) = %v; want %v", tc.input, got, tc.expect) + } + }) + } +} diff --git a/coderd/database/sqlc.yaml b/coderd/database/sqlc.yaml index b96dabd1fc187..689eb1aaeb53b 100644 --- a/coderd/database/sqlc.yaml +++ b/coderd/database/sqlc.yaml @@ -73,6 +73,18 @@ sql: - column: "template_usage_stats.app_usage_mins" go_type: type: "StringMapOfInt" + - column: "workspaces.user_acl" + go_type: + type: "WorkspaceACL" + - column: "workspaces.group_acl" + go_type: + type: "WorkspaceACL" + - column: "workspaces_expanded.user_acl" + go_type: + type: "WorkspaceACL" + - column: "workspaces_expanded.group_acl" + go_type: + type: "WorkspaceACL" - column: "notification_templates.actions" go_type: type: "[]byte" @@ -150,6 +162,7 @@ sql: has_ai_task: HasAITask ai_task_sidebar_app_id: AITaskSidebarAppID latest_build_has_ai_task: LatestBuildHasAITask + cors_behavior: CorsBehavior rules: - name: do-not-use-public-schema-in-queries message: "do not use public schema in queries" diff --git a/coderd/database/types.go b/coderd/database/types.go index 6d0f036fe692c..01a7cce231061 100644 --- a/coderd/database/types.go +++ b/coderd/database/types.go @@ -77,6 +77,39 @@ func (t TemplateACL) Value() (driver.Value, error) { return json.Marshal(t) } +type WorkspaceACL map[string]WorkspaceACLEntry + +func (t *WorkspaceACL) Scan(src interface{}) error { + switch v := src.(type) { + case string: + return json.Unmarshal([]byte(v), &t) + case []byte, json.RawMessage: + //nolint + return json.Unmarshal(v.([]byte), &t) + } + + return xerrors.Errorf("unexpected type %T", src) +} + +//nolint:revive +func (w WorkspaceACL) RBACACL() map[string][]policy.Action { + // Convert WorkspaceACL to a map of string to []policy.Action. + // This is used for RBAC checks. + rbacACL := make(map[string][]policy.Action, len(w)) + for id, entry := range w { + rbacACL[id] = entry.Permissions + } + return rbacACL +} + +func (t WorkspaceACL) Value() (driver.Value, error) { + return json.Marshal(t) +} + +type WorkspaceACLEntry struct { + Permissions []policy.Action `json:"permissions"` +} + type ExternalAuthProvider struct { ID string `json:"id"` Optional bool `json:"optional,omitempty"` diff --git a/coderd/database/unique_constraint.go b/coderd/database/unique_constraint.go index 38c95e67410c9..1b0b13ea2ba5a 100644 --- a/coderd/database/unique_constraint.go +++ b/coderd/database/unique_constraint.go @@ -67,9 +67,11 @@ const ( UniqueTemplateVersionsPkey UniqueConstraint = "template_versions_pkey" // ALTER TABLE ONLY template_versions ADD CONSTRAINT template_versions_pkey PRIMARY KEY (id); UniqueTemplateVersionsTemplateIDNameKey UniqueConstraint = "template_versions_template_id_name_key" // ALTER TABLE ONLY template_versions ADD CONSTRAINT template_versions_template_id_name_key UNIQUE (template_id, name); UniqueTemplatesPkey UniqueConstraint = "templates_pkey" // ALTER TABLE ONLY templates ADD CONSTRAINT templates_pkey PRIMARY KEY (id); + UniqueUsageEventsPkey UniqueConstraint = "usage_events_pkey" // ALTER TABLE ONLY usage_events ADD CONSTRAINT usage_events_pkey PRIMARY KEY (id); UniqueUserConfigsPkey UniqueConstraint = "user_configs_pkey" // ALTER TABLE ONLY user_configs ADD CONSTRAINT user_configs_pkey PRIMARY KEY (user_id, key); UniqueUserDeletedPkey UniqueConstraint = "user_deleted_pkey" // ALTER TABLE ONLY user_deleted ADD CONSTRAINT user_deleted_pkey PRIMARY KEY (id); UniqueUserLinksPkey UniqueConstraint = "user_links_pkey" // ALTER TABLE ONLY user_links ADD CONSTRAINT user_links_pkey PRIMARY KEY (user_id, login_type); + UniqueUserSecretsPkey UniqueConstraint = "user_secrets_pkey" // ALTER TABLE ONLY user_secrets ADD CONSTRAINT user_secrets_pkey PRIMARY KEY (id); UniqueUserStatusChangesPkey UniqueConstraint = "user_status_changes_pkey" // ALTER TABLE ONLY user_status_changes ADD CONSTRAINT user_status_changes_pkey PRIMARY KEY (id); UniqueUsersPkey UniqueConstraint = "users_pkey" // ALTER TABLE ONLY users ADD CONSTRAINT users_pkey PRIMARY KEY (id); UniqueWebpushSubscriptionsPkey UniqueConstraint = "webpush_subscriptions_pkey" // ALTER TABLE ONLY webpush_subscriptions ADD CONSTRAINT webpush_subscriptions_pkey PRIMARY KEY (id); @@ -115,6 +117,9 @@ const ( UniqueTemplateUsageStatsStartTimeTemplateIDUserIDIndex UniqueConstraint = "template_usage_stats_start_time_template_id_user_id_idx" // CREATE UNIQUE INDEX template_usage_stats_start_time_template_id_user_id_idx ON template_usage_stats USING btree (start_time, template_id, user_id); UniqueTemplatesOrganizationIDNameIndex UniqueConstraint = "templates_organization_id_name_idx" // CREATE UNIQUE INDEX templates_organization_id_name_idx ON templates USING btree (organization_id, lower((name)::text)) WHERE (deleted = false); UniqueUserLinksLinkedIDLoginTypeIndex UniqueConstraint = "user_links_linked_id_login_type_idx" // CREATE UNIQUE INDEX user_links_linked_id_login_type_idx ON user_links USING btree (linked_id, login_type) WHERE (linked_id <> ''::text); + UniqueUserSecretsUserEnvNameIndex UniqueConstraint = "user_secrets_user_env_name_idx" // CREATE UNIQUE INDEX user_secrets_user_env_name_idx ON user_secrets USING btree (user_id, env_name) WHERE (env_name <> ''::text); + UniqueUserSecretsUserFilePathIndex UniqueConstraint = "user_secrets_user_file_path_idx" // CREATE UNIQUE INDEX user_secrets_user_file_path_idx ON user_secrets USING btree (user_id, file_path) WHERE (file_path <> ''::text); + UniqueUserSecretsUserNameIndex UniqueConstraint = "user_secrets_user_name_idx" // CREATE UNIQUE INDEX user_secrets_user_name_idx ON user_secrets USING btree (user_id, name); UniqueUsersEmailLowerIndex UniqueConstraint = "users_email_lower_idx" // CREATE UNIQUE INDEX users_email_lower_idx ON users USING btree (lower(email)) WHERE (deleted = false); UniqueUsersUsernameLowerIndex UniqueConstraint = "users_username_lower_idx" // CREATE UNIQUE INDEX users_username_lower_idx ON users USING btree (lower(username)) WHERE (deleted = false); UniqueWorkspaceAppAuditSessionsUniqueIndex UniqueConstraint = "workspace_app_audit_sessions_unique_index" // CREATE UNIQUE INDEX workspace_app_audit_sessions_unique_index ON workspace_app_audit_sessions USING btree (agent_id, app_id, user_id, ip, user_agent, slug_or_port, status_code); diff --git a/coderd/dynamicparameters/error.go b/coderd/dynamicparameters/error.go index 4c27905bfa832..ae2217936b9dd 100644 --- a/coderd/dynamicparameters/error.go +++ b/coderd/dynamicparameters/error.go @@ -26,6 +26,14 @@ func tagValidationError(diags hcl.Diagnostics) *DiagnosticError { } } +func presetValidationError(diags hcl.Diagnostics) *DiagnosticError { + return &DiagnosticError{ + Message: "Unable to validate presets", + Diagnostics: diags, + KeyedDiagnostics: make(map[string]hcl.Diagnostics), + } +} + type DiagnosticError struct { // Message is the human-readable message that will be returned to the user. Message string diff --git a/coderd/dynamicparameters/presets.go b/coderd/dynamicparameters/presets.go new file mode 100644 index 0000000000000..24974962e029f --- /dev/null +++ b/coderd/dynamicparameters/presets.go @@ -0,0 +1,28 @@ +package dynamicparameters + +import ( + "github.com/hashicorp/hcl/v2" + + "github.com/coder/preview" +) + +// CheckPresets extracts the preset related diagnostics from a template version preset +func CheckPresets(output *preview.Output, diags hcl.Diagnostics) *DiagnosticError { + de := presetValidationError(diags) + if output == nil { + return de + } + + presets := output.Presets + for _, preset := range presets { + if hcl.Diagnostics(preset.Diagnostics).HasErrors() { + de.Extend(preset.Name, hcl.Diagnostics(preset.Diagnostics)) + } + } + + if de.HasError() { + return de + } + + return nil +} diff --git a/coderd/dynamicparameters/tags.go b/coderd/dynamicparameters/tags.go index 38a9bf4691571..d9037db5dd909 100644 --- a/coderd/dynamicparameters/tags.go +++ b/coderd/dynamicparameters/tags.go @@ -11,6 +11,10 @@ import ( func CheckTags(output *preview.Output, diags hcl.Diagnostics) *DiagnosticError { de := tagValidationError(diags) + if output == nil { + return de + } + failedTags := output.WorkspaceTags.UnusableTags() if len(failedTags) == 0 && !de.HasError() { return nil // No errors, all is good! diff --git a/coderd/entitlements/entitlements.go b/coderd/entitlements/entitlements.go index 6bbe32ade4a1b..1be422b4765ee 100644 --- a/coderd/entitlements/entitlements.go +++ b/coderd/entitlements/entitlements.go @@ -161,3 +161,9 @@ func (l *Set) Errors() []string { defer l.entitlementsMu.RUnlock() return slices.Clone(l.entitlements.Errors) } + +func (l *Set) HasLicense() bool { + l.entitlementsMu.RLock() + defer l.entitlementsMu.RUnlock() + return l.entitlements.HasLicense +} diff --git a/coderd/externalauth/externalauth.go b/coderd/externalauth/externalauth.go index 9b8b87748e784..24ebe13d03074 100644 --- a/coderd/externalauth/externalauth.go +++ b/coderd/externalauth/externalauth.go @@ -14,6 +14,7 @@ import ( "strings" "time" + "github.com/dustin/go-humanize" "golang.org/x/oauth2" "golang.org/x/xerrors" @@ -28,6 +29,13 @@ import ( "github.com/coder/retry" ) +const ( + // failureReasonLimit is the maximum text length of an error to be cached to the + // database for a failed refresh token. In rare cases, the error could be a large + // HTML payload. + failureReasonLimit = 400 +) + // Config is used for authentication for Git operations. type Config struct { promoauth.InstrumentedOAuth2Config @@ -121,11 +129,12 @@ func (c *Config) RefreshToken(ctx context.Context, db database.Store, externalAu return externalAuthLink, InvalidTokenError("token expired, refreshing is either disabled or refreshing failed and will not be retried") } + refreshToken := externalAuthLink.OAuthRefreshToken + // This is additional defensive programming. Because TokenSource is an interface, // we cannot be sure that the implementation will treat an 'IsZero' time // as "not-expired". The default implementation does, but a custom implementation // might not. Removing the refreshToken will guarantee a refresh will fail. - refreshToken := externalAuthLink.OAuthRefreshToken if c.NoRefresh { refreshToken = "" } @@ -136,15 +145,30 @@ func (c *Config) RefreshToken(ctx context.Context, db database.Store, externalAu Expiry: externalAuthLink.OAuthExpiry, } + // Note: The TokenSource(...) method will make no remote HTTP requests if the + // token is expired and no refresh token is set. This is important to prevent + // spamming the API, consuming rate limits, when the token is known to fail. token, err := c.TokenSource(ctx, existingToken).Token() if err != nil { // TokenSource can fail for numerous reasons. If it fails because of // a bad refresh token, then the refresh token is invalid, and we should // get rid of it. Keeping it around will cause additional refresh // attempts that will fail and cost us api rate limits. + // + // The error message is saved for debugging purposes. if isFailedRefresh(existingToken, err) { + reason := err.Error() + if len(reason) > failureReasonLimit { + // Limit the length of the error message to prevent + // spamming the database with long error messages. + reason = reason[:failureReasonLimit] + } dbExecErr := db.UpdateExternalAuthLinkRefreshToken(ctx, database.UpdateExternalAuthLinkRefreshTokenParams{ - OAuthRefreshToken: "", // It is better to clear the refresh token than to keep retrying. + // Adding a reason will prevent further attempts to try and refresh the token. + OauthRefreshFailureReason: reason, + // Remove the invalid refresh token so it is never used again. The cached + // `reason` can be used to know why this field was zeroed out. + OAuthRefreshToken: "", OAuthRefreshTokenKeyID: externalAuthLink.OAuthRefreshTokenKeyID.String, UpdatedAt: dbtime.Now(), ProviderID: externalAuthLink.ProviderID, @@ -156,12 +180,28 @@ func (c *Config) RefreshToken(ctx context.Context, db database.Store, externalAu } // The refresh token was cleared externalAuthLink.OAuthRefreshToken = "" + externalAuthLink.UpdatedAt = dbtime.Now() } // Unfortunately have to match exactly on the error message string. // Improve the error message to account refresh tokens are deleted if // invalid on our end. + // + // This error messages comes from the oauth2 package on our client side. + // So this check is not against a server generated error message. + // Error source: https://github.com/golang/oauth2/blob/master/oauth2.go#L277 if err.Error() == "oauth2: token expired and refresh token is not set" { + if externalAuthLink.OauthRefreshFailureReason != "" { + // A cached refresh failure error exists. So the refresh token was set, but was invalid, and zeroed out. + // Return this cached error for the original refresh attempt. + return externalAuthLink, InvalidTokenError(fmt.Sprintf("token expired and refreshing failed %s with: %s", + // Do not return the exact time, because then we have to know what timezone the + // user is in. This approximate time is good enough. + humanize.Time(externalAuthLink.UpdatedAt), + externalAuthLink.OauthRefreshFailureReason, + )) + } + return externalAuthLink, InvalidTokenError("token expired, refreshing is either disabled or refreshing failed and will not be retried") } diff --git a/coderd/externalauth/externalauth_test.go b/coderd/externalauth/externalauth_test.go index 81cf5aa1f21e2..8e46566ed2738 100644 --- a/coderd/externalauth/externalauth_test.go +++ b/coderd/externalauth/externalauth_test.go @@ -177,19 +177,25 @@ func TestRefreshToken(t *testing.T) { link.OAuthExpiry = expired // Make the failure a server internal error. Not related to the token + // This should be retried since this error is temporary. refreshErr = &oauth2.RetrieveError{ Response: &http.Response{ StatusCode: http.StatusInternalServerError, }, ErrorCode: "internal_error", } - _, err := config.RefreshToken(ctx, mDB, link) - require.Error(t, err) - require.True(t, externalauth.IsInvalidTokenError(err)) - require.Equal(t, refreshCount, 1) + totalRefreshes := 0 + for i := 0; i < 3; i++ { + // Each loop will hit the temporary error and retry. + _, err := config.RefreshToken(ctx, mDB, link) + require.Error(t, err) + totalRefreshes++ + require.True(t, externalauth.IsInvalidTokenError(err)) + require.Equal(t, refreshCount, totalRefreshes) + } - // Try again with a bad refresh token error - // Expect DB call to remove the refresh token + // Try again with a bad refresh token error. This will invalidate the + // refresh token, and not retry again. Expect DB call to remove the refresh token mDB.EXPECT().UpdateExternalAuthLinkRefreshToken(gomock.Any(), gomock.Any()).Return(nil).Times(1) refreshErr = &oauth2.RetrieveError{ // github error Response: &http.Response{ @@ -197,17 +203,18 @@ func TestRefreshToken(t *testing.T) { }, ErrorCode: "bad_refresh_token", } - _, err = config.RefreshToken(ctx, mDB, link) + _, err := config.RefreshToken(ctx, mDB, link) require.Error(t, err) + totalRefreshes++ require.True(t, externalauth.IsInvalidTokenError(err)) - require.Equal(t, refreshCount, 2) + require.Equal(t, refreshCount, totalRefreshes) // When the refresh token is empty, no api calls should be made link.OAuthRefreshToken = "" // mock'd db, so manually set the token to '' _, err = config.RefreshToken(ctx, mDB, link) require.Error(t, err) require.True(t, externalauth.IsInvalidTokenError(err)) - require.Equal(t, refreshCount, 2) + require.Equal(t, refreshCount, totalRefreshes) }) // ValidateFailure tests if the token is no longer valid with a 401 response. @@ -330,7 +337,6 @@ func TestRefreshToken(t *testing.T) { require.Equal(t, 1, validateCalls, "token is validated") require.Equal(t, 1, refreshCalls, "token is refreshed") require.NotEqualf(t, link.OAuthAccessToken, updated.OAuthAccessToken, "token is updated") - //nolint:gocritic // testing dbLink, err := db.GetExternalAuthLink(dbauthz.AsSystemRestricted(context.Background()), database.GetExternalAuthLinkParams{ ProviderID: link.ProviderID, UserID: link.UserID, diff --git a/coderd/files.go b/coderd/files.go index f82d1aa926c22..eaab00c401481 100644 --- a/coderd/files.go +++ b/coderd/files.go @@ -118,11 +118,23 @@ func (api *API) postFile(rw http.ResponseWriter, r *http.Request) { Data: data, }) if err != nil { - httpapi.Write(ctx, rw, http.StatusInternalServerError, codersdk.Response{ - Message: "Internal error saving file.", - Detail: err.Error(), - }) - return + if database.IsUniqueViolation(err, database.UniqueFilesHashCreatedByKey) { + // The file was uploaded by some concurrent process since the last time we checked for it, fetch it again. + file, err = api.Database.GetFileByHashAndCreator(ctx, database.GetFileByHashAndCreatorParams{ + Hash: hash, + CreatedBy: apiKey.UserID, + }) + api.Logger.Info(ctx, "postFile handler hit UniqueViolation trying to upload file after already checking for the file existence", slog.F("hash", hash), slog.F("created_by_id", apiKey.UserID)) + } + // At this point the first error was either not the UniqueViolation OR there's still an error even after we + // attempt to fetch the file again, so we should return here. + if err != nil { + httpapi.Write(ctx, rw, http.StatusInternalServerError, codersdk.Response{ + Message: "Internal error saving file.", + Detail: err.Error(), + }) + return + } } httpapi.Write(ctx, rw, http.StatusCreated, codersdk.UploadResponse{ diff --git a/coderd/files/cache_test.go b/coderd/files/cache_test.go index 6f8f74e74fe8e..b81deae5d9714 100644 --- a/coderd/files/cache_test.go +++ b/coderd/files/cache_test.go @@ -45,7 +45,6 @@ func TestCancelledFetch(t *testing.T) { cache := files.New(prometheus.NewRegistry(), &coderdtest.FakeAuthorizer{}) // Cancel the context for the first call; should fail. - //nolint:gocritic // Unit testing ctx, cancel := context.WithCancel(dbauthz.AsFileReader(testutil.Context(t, testutil.WaitShort))) cancel() _, err := cache.Acquire(ctx, dbM, fileID) @@ -71,7 +70,6 @@ func TestCancelledConcurrentFetch(t *testing.T) { cache := files.LeakCache{Cache: files.New(prometheus.NewRegistry(), &coderdtest.FakeAuthorizer{})} - //nolint:gocritic // Unit testing ctx := dbauthz.AsFileReader(testutil.Context(t, testutil.WaitShort)) // Cancel the context for the first call; should fail. @@ -99,7 +97,6 @@ func TestConcurrentFetch(t *testing.T) { }) cache := files.New(prometheus.NewRegistry(), &coderdtest.FakeAuthorizer{}) - //nolint:gocritic // Unit testing ctx := dbauthz.AsFileReader(testutil.Context(t, testutil.WaitShort)) // Expect 2 calls to Acquire before we continue the test @@ -151,7 +148,6 @@ func TestCacheRBAC(t *testing.T) { Scope: rbac.ScopeAll, }) - //nolint:gocritic // Unit testing cacheReader := dbauthz.AsFileReader(ctx) t.Run("NoRolesOpen", func(t *testing.T) { @@ -207,7 +203,6 @@ func cachePromMetricName(metric string) string { func TestConcurrency(t *testing.T) { t.Parallel() - //nolint:gocritic // Unit testing ctx := dbauthz.AsFileReader(t.Context()) const fileSize = 10 @@ -268,7 +263,6 @@ func TestConcurrency(t *testing.T) { func TestRelease(t *testing.T) { t.Parallel() - //nolint:gocritic // Unit testing ctx := dbauthz.AsFileReader(t.Context()) const fileSize = 10 diff --git a/coderd/files_test.go b/coderd/files_test.go index 974db6b18fc69..fb13cb30e48f1 100644 --- a/coderd/files_test.go +++ b/coderd/files_test.go @@ -5,6 +5,7 @@ import ( "bytes" "context" "net/http" + "sync" "testing" "github.com/google/uuid" @@ -69,6 +70,30 @@ func TestPostFiles(t *testing.T) { _, err = client.Upload(ctx, codersdk.ContentTypeTar, bytes.NewReader(data)) require.NoError(t, err) }) + t.Run("InsertConcurrent", func(t *testing.T) { + t.Parallel() + client := coderdtest.New(t, nil) + _ = coderdtest.CreateFirstUser(t, client) + + ctx, cancel := context.WithTimeout(context.Background(), testutil.WaitLong) + defer cancel() + + var wg sync.WaitGroup + var end sync.WaitGroup + wg.Add(1) + end.Add(3) + for range 3 { + go func() { + wg.Wait() + data := make([]byte, 1024) + _, err := client.Upload(ctx, codersdk.ContentTypeTar, bytes.NewReader(data)) + end.Done() + require.NoError(t, err) + }() + } + wg.Done() + end.Wait() + }) } func TestDownload(t *testing.T) { diff --git a/coderd/httpapi/queryparams.go b/coderd/httpapi/queryparams.go index 0e4a20920e526..e1bd983ea12a3 100644 --- a/coderd/httpapi/queryparams.go +++ b/coderd/httpapi/queryparams.go @@ -287,6 +287,29 @@ func (p *QueryParamParser) JSONStringMap(vals url.Values, def map[string]string, return v } +func (p *QueryParamParser) ProvisionerDaemonStatuses(vals url.Values, def []codersdk.ProvisionerDaemonStatus, queryParam string) []codersdk.ProvisionerDaemonStatus { + return ParseCustomList(p, vals, def, queryParam, func(v string) (codersdk.ProvisionerDaemonStatus, error) { + return codersdk.ProvisionerDaemonStatus(v), nil + }) +} + +func (p *QueryParamParser) Duration(vals url.Values, def time.Duration, queryParam string) time.Duration { + v, err := parseQueryParam(p, vals, func(v string) (time.Duration, error) { + d, err := time.ParseDuration(v) + if err != nil { + return 0, err + } + return d, nil + }, def, queryParam) + if err != nil { + p.Errors = append(p.Errors, codersdk.ValidationError{ + Field: queryParam, + Detail: fmt.Sprintf("Query param %q must be a valid duration (e.g., '24h', '30m', '1h30m'): %s", queryParam, err.Error()), + }) + } + return v +} + // ValidEnum represents an enum that can be parsed and validated. type ValidEnum interface { // Add more types as needed (avoid importing large dependency trees). diff --git a/coderd/httpmw/pprof.go b/coderd/httpmw/pprof.go new file mode 100644 index 0000000000000..4c51c1ebe552e --- /dev/null +++ b/coderd/httpmw/pprof.go @@ -0,0 +1,43 @@ +package httpmw + +import ( + "context" + "net/http" + "runtime/pprof" + + "github.com/coder/coder/v2/coderd/pproflabel" +) + +// WithProfilingLabels adds a pprof label to all http request handlers. This is +// primarily used to determine if load is coming from background jobs, or from +// http traffic. +func WithProfilingLabels(next http.Handler) http.Handler { + return http.HandlerFunc(func(rw http.ResponseWriter, r *http.Request) { + ctx := r.Context() + + // Label to differentiate between http and websocket requests. Websocket requests + // are assumed to be long-lived and more resource consuming. + requestType := "http" + if r.Header.Get("Upgrade") == "websocket" { + requestType = "websocket" + } + + pprof.Do(ctx, pproflabel.Service(pproflabel.ServiceHTTPServer, pproflabel.RequestTypeTag, requestType), func(ctx context.Context) { + r = r.WithContext(ctx) + next.ServeHTTP(rw, r) + }) + }) +} + +func WithStaticProfilingLabels(labels pprof.LabelSet) func(next http.Handler) http.Handler { + return func(next http.Handler) http.Handler { + return http.HandlerFunc(func(rw http.ResponseWriter, r *http.Request) { + ctx := r.Context() + + pprof.Do(ctx, labels, func(ctx context.Context) { + r = r.WithContext(ctx) + next.ServeHTTP(rw, r) + }) + }) + } +} diff --git a/coderd/idpsync/group.go b/coderd/idpsync/group.go index 0b21c5b9ac84c..63ac0360f0cb3 100644 --- a/coderd/idpsync/group.go +++ b/coderd/idpsync/group.go @@ -4,6 +4,7 @@ import ( "context" "encoding/json" "fmt" + "net/http" "github.com/golang-jwt/jwt/v4" "github.com/google/uuid" @@ -71,9 +72,49 @@ func (s AGPLIDPSync) GroupSyncSettings(ctx context.Context, orgID uuid.UUID, db return settings, nil } -func (s AGPLIDPSync) ParseGroupClaims(_ context.Context, _ jwt.MapClaims) (GroupParams, *HTTPError) { +func (s AGPLIDPSync) ParseGroupClaims(_ context.Context, mergedClaims jwt.MapClaims) (GroupParams, *HTTPError) { + if s.GroupField != "" && len(s.GroupAllowList) > 0 { + groupsRaw, ok := mergedClaims[s.GroupField] + if !ok { + return GroupParams{}, &HTTPError{ + Code: http.StatusForbidden, + Msg: "Not a member of an allowed group", + Detail: "You have no groups in your claims!", + RenderStaticPage: true, + } + } + parsedGroups, err := ParseStringSliceClaim(groupsRaw) + if err != nil { + return GroupParams{}, &HTTPError{ + Code: http.StatusBadRequest, + Msg: "Failed read groups from claims for allow list check. Ask an administrator for help.", + Detail: err.Error(), + RenderStaticPage: true, + } + } + + inAllowList := false + AllowListCheckLoop: + for _, group := range parsedGroups { + if _, ok := s.GroupAllowList[group]; ok { + inAllowList = true + break AllowListCheckLoop + } + } + + if !inAllowList { + return GroupParams{}, &HTTPError{ + Code: http.StatusForbidden, + Msg: "Not a member of an allowed group", + Detail: "Ask an administrator to add one of your groups to the allow list.", + RenderStaticPage: true, + } + } + } + return GroupParams{ SyncEntitled: s.GroupSyncEntitled(), + MergedClaims: mergedClaims, }, nil } diff --git a/coderd/idpsync/group_test.go b/coderd/idpsync/group_test.go index 478d6557de551..459a5dbcfaab0 100644 --- a/coderd/idpsync/group_test.go +++ b/coderd/idpsync/group_test.go @@ -44,8 +44,7 @@ func TestParseGroupClaims(t *testing.T) { require.False(t, params.SyncEntitled) }) - // AllowList has no effect in AGPL - t.Run("AllowList", func(t *testing.T) { + t.Run("NotInAllowList", func(t *testing.T) { t.Parallel() s := idpsync.NewAGPLSync(slogtest.Make(t, &slogtest.Options{}), @@ -59,9 +58,39 @@ func TestParseGroupClaims(t *testing.T) { ctx := testutil.Context(t, testutil.WaitMedium) - params, err := s.ParseGroupClaims(ctx, jwt.MapClaims{}) + // Invalid group + _, err := s.ParseGroupClaims(ctx, jwt.MapClaims{ + "groups": []string{"bar"}, + }) + require.NotNil(t, err) + require.Equal(t, 403, err.Code) + + // No groups + _, err = s.ParseGroupClaims(ctx, jwt.MapClaims{}) + require.NotNil(t, err) + require.Equal(t, 403, err.Code) + }) + + t.Run("InAllowList", func(t *testing.T) { + t.Parallel() + + s := idpsync.NewAGPLSync(slogtest.Make(t, &slogtest.Options{}), + runtimeconfig.NewManager(), + idpsync.DeploymentSyncSettings{ + GroupField: "groups", + GroupAllowList: map[string]struct{}{ + "foo": {}, + }, + }) + + ctx := testutil.Context(t, testutil.WaitMedium) + + claims := jwt.MapClaims{ + "groups": []string{"foo", "bar"}, + } + params, err := s.ParseGroupClaims(ctx, claims) require.Nil(t, err) - require.False(t, params.SyncEntitled) + require.Equal(t, claims, params.MergedClaims) }) } @@ -328,7 +357,6 @@ func TestGroupSyncTable(t *testing.T) { }, } - //nolint:gocritic // testing defOrg, err := db.GetDefaultOrganization(dbauthz.AsSystemRestricted(ctx)) require.NoError(t, err) SetupOrganization(t, s, db, user, defOrg.ID, def) @@ -527,7 +555,6 @@ func TestApplyGroupDifference(t *testing.T) { db, _ := dbtestutil.NewDB(t) ctx := testutil.Context(t, testutil.WaitMedium) - //nolint:gocritic // testing ctx = dbauthz.AsSystemRestricted(ctx) org := dbgen.Organization(t, db, database.Organization{}) diff --git a/coderd/idpsync/role_test.go b/coderd/idpsync/role_test.go index 6df091097b966..db172e0ee4237 100644 --- a/coderd/idpsync/role_test.go +++ b/coderd/idpsync/role_test.go @@ -273,7 +273,6 @@ func TestRoleSyncTable(t *testing.T) { } // Also assert site wide roles - //nolint:gocritic // unit testing assertions allRoles, err := db.GetAuthorizationUserRoles(dbauthz.AsSystemRestricted(ctx), user.ID) require.NoError(t, err) diff --git a/coderd/initscript.go b/coderd/initscript.go new file mode 100644 index 0000000000000..2051ca7f5f6e4 --- /dev/null +++ b/coderd/initscript.go @@ -0,0 +1,45 @@ +package coderd + +import ( + "crypto/sha256" + "encoding/base64" + "fmt" + "net/http" + "strings" + + "github.com/go-chi/chi/v5" + + "github.com/coder/coder/v2/coderd/httpapi" + "github.com/coder/coder/v2/codersdk" + "github.com/coder/coder/v2/provisionersdk" +) + +// @Summary Get agent init script +// @ID get-agent-init-script +// @Produce text/plain +// @Tags InitScript +// @Param os path string true "Operating system" +// @Param arch path string true "Architecture" +// @Success 200 "Success" +// @Router /init-script/{os}/{arch} [get] +func (api *API) initScript(rw http.ResponseWriter, r *http.Request) { + os := strings.ToLower(chi.URLParam(r, "os")) + arch := strings.ToLower(chi.URLParam(r, "arch")) + + script, exists := provisionersdk.AgentScriptEnv()[fmt.Sprintf("CODER_AGENT_SCRIPT_%s_%s", os, arch)] + if !exists { + httpapi.Write(r.Context(), rw, http.StatusBadRequest, codersdk.Response{ + Message: fmt.Sprintf("Unknown os/arch: %s/%s", os, arch), + }) + return + } + script = strings.ReplaceAll(script, "${ACCESS_URL}", api.AccessURL.String()+"/") + script = strings.ReplaceAll(script, "${AUTH_TYPE}", "token") + + scriptBytes := []byte(script) + hash := sha256.Sum256(scriptBytes) + rw.Header().Set("Content-Digest", fmt.Sprintf("sha256:%x", base64.StdEncoding.EncodeToString(hash[:]))) + rw.Header().Set("Content-Type", "text/plain; charset=utf-8") + rw.WriteHeader(http.StatusOK) + _, _ = rw.Write(scriptBytes) +} diff --git a/coderd/initscript_test.go b/coderd/initscript_test.go new file mode 100644 index 0000000000000..bad0577f0218f --- /dev/null +++ b/coderd/initscript_test.go @@ -0,0 +1,67 @@ +package coderd_test + +import ( + "context" + "net/http" + "testing" + + "github.com/stretchr/testify/require" + + "github.com/coder/coder/v2/coderd/coderdtest" + "github.com/coder/coder/v2/codersdk" +) + +func TestInitScript(t *testing.T) { + t.Parallel() + + t.Run("OK Windows amd64", func(t *testing.T) { + t.Parallel() + client := coderdtest.New(t, nil) + script, err := client.InitScript(context.Background(), "windows", "amd64") + require.NoError(t, err) + require.NotEmpty(t, script) + require.Contains(t, script, "$env:CODER_AGENT_AUTH = \"token\"") + require.Contains(t, script, "/bin/coder-windows-amd64.exe") + }) + + t.Run("OK Windows arm64", func(t *testing.T) { + t.Parallel() + client := coderdtest.New(t, nil) + script, err := client.InitScript(context.Background(), "windows", "arm64") + require.NoError(t, err) + require.NotEmpty(t, script) + require.Contains(t, script, "$env:CODER_AGENT_AUTH = \"token\"") + require.Contains(t, script, "/bin/coder-windows-arm64.exe") + }) + + t.Run("OK Linux amd64", func(t *testing.T) { + t.Parallel() + client := coderdtest.New(t, nil) + script, err := client.InitScript(context.Background(), "linux", "amd64") + require.NoError(t, err) + require.NotEmpty(t, script) + require.Contains(t, script, "export CODER_AGENT_AUTH=\"token\"") + require.Contains(t, script, "/bin/coder-linux-amd64") + }) + + t.Run("OK Linux arm64", func(t *testing.T) { + t.Parallel() + client := coderdtest.New(t, nil) + script, err := client.InitScript(context.Background(), "linux", "arm64") + require.NoError(t, err) + require.NotEmpty(t, script) + require.Contains(t, script, "export CODER_AGENT_AUTH=\"token\"") + require.Contains(t, script, "/bin/coder-linux-arm64") + }) + + t.Run("BadRequest", func(t *testing.T) { + t.Parallel() + client := coderdtest.New(t, nil) + _, err := client.InitScript(context.Background(), "darwin", "armv7") + require.Error(t, err) + var apiErr *codersdk.Error + require.ErrorAs(t, err, &apiErr) + require.Equal(t, http.StatusBadRequest, apiErr.StatusCode()) + require.Equal(t, "Unknown os/arch: darwin/armv7", apiErr.Message) + }) +} diff --git a/coderd/insights_test.go b/coderd/insights_test.go index ded030351a3b3..cf5f63065df99 100644 --- a/coderd/insights_test.go +++ b/coderd/insights_test.go @@ -665,10 +665,11 @@ func TestTemplateInsights_Golden(t *testing.T) { // where we can control the template ID. // createdTemplate := coderdtest.CreateTemplate(t, client, firstUser.OrganizationID, version.ID) createdTemplate := dbgen.Template(t, db, database.Template{ - ID: template.id, - ActiveVersionID: version.ID, - OrganizationID: firstUser.OrganizationID, - CreatedBy: firstUser.UserID, + ID: template.id, + ActiveVersionID: version.ID, + OrganizationID: firstUser.OrganizationID, + CreatedBy: firstUser.UserID, + UseClassicParameterFlow: true, // Required for testing classic parameter flow behavior GroupACL: database.TemplateACL{ firstUser.OrganizationID.String(): db2sdk.TemplateRoleActions(codersdk.TemplateRoleUse), }, @@ -753,7 +754,6 @@ func TestTemplateInsights_Golden(t *testing.T) { Database: db, AppStatBatchSize: workspaceapps.DefaultStatsDBReporterBatchSize, }) - //nolint:gocritic // This is a test. err = reporter.ReportAppStats(dbauthz.AsSystemRestricted(ctx), stats) require.NoError(t, err, "want no error inserting app stats") @@ -1556,10 +1556,11 @@ func TestUserActivityInsights_Golden(t *testing.T) { // where we can control the template ID. // createdTemplate := coderdtest.CreateTemplate(t, client, firstUser.OrganizationID, version.ID) createdTemplate := dbgen.Template(t, db, database.Template{ - ID: template.id, - ActiveVersionID: version.ID, - OrganizationID: firstUser.OrganizationID, - CreatedBy: firstUser.UserID, + ID: template.id, + ActiveVersionID: version.ID, + OrganizationID: firstUser.OrganizationID, + CreatedBy: firstUser.UserID, + UseClassicParameterFlow: true, // Required for parameter usage tracking in this test GroupACL: database.TemplateACL{ firstUser.OrganizationID.String(): db2sdk.TemplateRoleActions(codersdk.TemplateRoleUse), }, @@ -1644,7 +1645,6 @@ func TestUserActivityInsights_Golden(t *testing.T) { Database: db, AppStatBatchSize: workspaceapps.DefaultStatsDBReporterBatchSize, }) - //nolint:gocritic // This is a test. err = reporter.ReportAppStats(dbauthz.AsSystemRestricted(ctx), stats) require.NoError(t, err, "want no error inserting app stats") diff --git a/coderd/mcp/mcp.go b/coderd/mcp/mcp.go index f17ab5ae7cd93..3696beff500a1 100644 --- a/coderd/mcp/mcp.go +++ b/coderd/mcp/mcp.go @@ -67,7 +67,9 @@ func (s *Server) ServeHTTP(w http.ResponseWriter, r *http.Request) { s.streamableServer.ServeHTTP(w, r) } -// RegisterTools registers all available MCP tools with the server +// Register all available MCP tools with the server excluding: +// - ReportTask - which requires dependencies not available in the remote MCP context +// - ChatGPT search and fetch tools, which are redundant with the standard tools. func (s *Server) RegisterTools(client *codersdk.Client) error { if client == nil { return xerrors.New("client cannot be nil: MCP HTTP server requires authenticated client") @@ -79,10 +81,36 @@ func (s *Server) RegisterTools(client *codersdk.Client) error { return xerrors.Errorf("failed to initialize tool dependencies: %w", err) } - // Register all available tools, but exclude tools that require dependencies not available in the - // remote MCP context for _, tool := range toolsdk.All { - if tool.Name == toolsdk.ToolNameReportTask { + // the ReportTask tool requires dependencies not available in the remote MCP context + // the ChatGPT search and fetch tools are redundant with the standard tools. + if tool.Name == toolsdk.ToolNameReportTask || + tool.Name == toolsdk.ToolNameChatGPTSearch || tool.Name == toolsdk.ToolNameChatGPTFetch { + continue + } + + s.mcpServer.AddTools(mcpFromSDK(tool, toolDeps)) + } + return nil +} + +// ChatGPT tools are the search and fetch tools as defined in https://platform.openai.com/docs/mcp. +// We do not expose any extra ones because ChatGPT has an undocumented "Safety Scan" feature. +// In my experiments, if I included extra tools in the MCP server, ChatGPT would often - but not always - +// refuse to add Coder as a connector. +func (s *Server) RegisterChatGPTTools(client *codersdk.Client) error { + if client == nil { + return xerrors.New("client cannot be nil: MCP HTTP server requires authenticated client") + } + + // Create tool dependencies + toolDeps, err := toolsdk.NewDeps(client) + if err != nil { + return xerrors.Errorf("failed to initialize tool dependencies: %w", err) + } + + for _, tool := range toolsdk.All { + if tool.Name != toolsdk.ToolNameChatGPTSearch && tool.Name != toolsdk.ToolNameChatGPTFetch { continue } diff --git a/coderd/mcp/mcp_e2e_test.go b/coderd/mcp/mcp_e2e_test.go index 248786405fda9..b831d150c2c0d 100644 --- a/coderd/mcp/mcp_e2e_test.go +++ b/coderd/mcp/mcp_e2e_test.go @@ -1215,6 +1215,155 @@ func TestMCPHTTP_E2E_OAuth2_EndToEnd(t *testing.T) { }) } +func TestMCPHTTP_E2E_ChatGPTEndpoint(t *testing.T) { + t.Parallel() + + // Setup Coder server with authentication + coderClient, closer, api := coderdtest.NewWithAPI(t, &coderdtest.Options{ + IncludeProvisionerDaemon: true, + }) + defer closer.Close() + + user := coderdtest.CreateFirstUser(t, coderClient) + + // Create template and workspace for testing search functionality + version := coderdtest.CreateTemplateVersion(t, coderClient, user.OrganizationID, nil) + coderdtest.AwaitTemplateVersionJobCompleted(t, coderClient, version.ID) + template := coderdtest.CreateTemplate(t, coderClient, user.OrganizationID, version.ID) + + // Create MCP client pointing to the ChatGPT endpoint + mcpURL := api.AccessURL.String() + "/api/experimental/mcp/http?toolset=chatgpt" + + // Configure client with authentication headers using RFC 6750 Bearer token + mcpClient, err := mcpclient.NewStreamableHttpClient(mcpURL, + transport.WithHTTPHeaders(map[string]string{ + "Authorization": "Bearer " + coderClient.SessionToken(), + })) + require.NoError(t, err) + t.Cleanup(func() { + if closeErr := mcpClient.Close(); closeErr != nil { + t.Logf("Failed to close MCP client: %v", closeErr) + } + }) + + ctx, cancel := context.WithTimeout(t.Context(), testutil.WaitLong) + defer cancel() + + // Start client + err = mcpClient.Start(ctx) + require.NoError(t, err) + + // Initialize connection + initReq := mcp.InitializeRequest{ + Params: mcp.InitializeParams{ + ProtocolVersion: mcp.LATEST_PROTOCOL_VERSION, + ClientInfo: mcp.Implementation{ + Name: "test-chatgpt-client", + Version: "1.0.0", + }, + }, + } + + result, err := mcpClient.Initialize(ctx, initReq) + require.NoError(t, err) + require.Equal(t, mcpserver.MCPServerName, result.ServerInfo.Name) + require.Equal(t, mcp.LATEST_PROTOCOL_VERSION, result.ProtocolVersion) + require.NotNil(t, result.Capabilities) + + // Test tool listing - should only have search and fetch tools for ChatGPT + tools, err := mcpClient.ListTools(ctx, mcp.ListToolsRequest{}) + require.NoError(t, err) + require.NotEmpty(t, tools.Tools) + + // Verify we have exactly the ChatGPT tools and no others + var foundTools []string + for _, tool := range tools.Tools { + foundTools = append(foundTools, tool.Name) + } + + // ChatGPT endpoint should only expose search and fetch tools + assert.Contains(t, foundTools, toolsdk.ToolNameChatGPTSearch, "Should have ChatGPT search tool") + assert.Contains(t, foundTools, toolsdk.ToolNameChatGPTFetch, "Should have ChatGPT fetch tool") + assert.Len(t, foundTools, 2, "ChatGPT endpoint should only expose search and fetch tools") + + // Should NOT have other tools that are available in the standard endpoint + assert.NotContains(t, foundTools, toolsdk.ToolNameGetAuthenticatedUser, "Should not have authenticated user tool") + assert.NotContains(t, foundTools, toolsdk.ToolNameListWorkspaces, "Should not have list workspaces tool") + + t.Logf("ChatGPT endpoint tools: %v", foundTools) + + // Test search tool - search for templates + var searchTool *mcp.Tool + for _, tool := range tools.Tools { + if tool.Name == toolsdk.ToolNameChatGPTSearch { + searchTool = &tool + break + } + } + require.NotNil(t, searchTool, "Expected to find search tool") + + // Execute search for templates + searchReq := mcp.CallToolRequest{ + Params: mcp.CallToolParams{ + Name: searchTool.Name, + Arguments: map[string]any{ + "query": "templates", + }, + }, + } + + searchResult, err := mcpClient.CallTool(ctx, searchReq) + require.NoError(t, err) + require.NotEmpty(t, searchResult.Content) + + // Verify the search result contains our template + assert.Len(t, searchResult.Content, 1) + if textContent, ok := searchResult.Content[0].(mcp.TextContent); ok { + assert.Equal(t, "text", textContent.Type) + assert.Contains(t, textContent.Text, template.ID.String(), "Search result should contain our test template") + t.Logf("Search result: %s", textContent.Text) + } else { + t.Errorf("Expected TextContent type, got %T", searchResult.Content[0]) + } + + // Test fetch tool + var fetchTool *mcp.Tool + for _, tool := range tools.Tools { + if tool.Name == toolsdk.ToolNameChatGPTFetch { + fetchTool = &tool + break + } + } + require.NotNil(t, fetchTool, "Expected to find fetch tool") + + // Execute fetch for the template + fetchReq := mcp.CallToolRequest{ + Params: mcp.CallToolParams{ + Name: fetchTool.Name, + Arguments: map[string]any{ + "id": fmt.Sprintf("template:%s", template.ID.String()), + }, + }, + } + + fetchResult, err := mcpClient.CallTool(ctx, fetchReq) + require.NoError(t, err) + require.NotEmpty(t, fetchResult.Content) + + // Verify the fetch result contains template details + assert.Len(t, fetchResult.Content, 1) + if textContent, ok := fetchResult.Content[0].(mcp.TextContent); ok { + assert.Equal(t, "text", textContent.Type) + assert.Contains(t, textContent.Text, template.Name, "Fetch result should contain template name") + assert.Contains(t, textContent.Text, template.ID.String(), "Fetch result should contain template ID") + t.Logf("Fetch result contains template data") + } else { + t.Errorf("Expected TextContent type, got %T", fetchResult.Content[0]) + } + + t.Logf("ChatGPT endpoint E2E test successful: search and fetch tools working correctly") +} + // Helper function to parse URL safely in tests func mustParseURL(t *testing.T, rawURL string) *url.URL { u, err := url.Parse(rawURL) diff --git a/coderd/mcp_http.go b/coderd/mcp_http.go index 40aaaa1c40dd5..51082858fe55e 100644 --- a/coderd/mcp_http.go +++ b/coderd/mcp_http.go @@ -1,6 +1,7 @@ package coderd import ( + "fmt" "net/http" "cdr.dev/slog" @@ -11,7 +12,15 @@ import ( "github.com/coder/coder/v2/codersdk" ) +type MCPToolset string + +const ( + MCPToolsetStandard MCPToolset = "standard" + MCPToolsetChatGPT MCPToolset = "chatgpt" +) + // mcpHTTPHandler creates the MCP HTTP transport handler +// It supports a "toolset" query parameter to select the set of tools to register. func (api *API) mcpHTTPHandler() http.Handler { return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { // Create MCP server instance for each request @@ -23,14 +32,30 @@ func (api *API) mcpHTTPHandler() http.Handler { }) return } - authenticatedClient := codersdk.New(api.AccessURL) // Extract the original session token from the request authenticatedClient.SetSessionToken(httpmw.APITokenFromRequest(r)) - // Register tools with authenticated client - if err := mcpServer.RegisterTools(authenticatedClient); err != nil { - api.Logger.Warn(r.Context(), "failed to register MCP tools", slog.Error(err)) + toolset := MCPToolset(r.URL.Query().Get("toolset")) + // Default to standard toolset if no toolset is specified. + if toolset == "" { + toolset = MCPToolsetStandard + } + + switch toolset { + case MCPToolsetStandard: + if err := mcpServer.RegisterTools(authenticatedClient); err != nil { + api.Logger.Warn(r.Context(), "failed to register MCP tools", slog.Error(err)) + } + case MCPToolsetChatGPT: + if err := mcpServer.RegisterChatGPTTools(authenticatedClient); err != nil { + api.Logger.Warn(r.Context(), "failed to register MCP tools", slog.Error(err)) + } + default: + httpapi.Write(r.Context(), w, http.StatusBadRequest, codersdk.Response{ + Message: fmt.Sprintf("Invalid toolset: %s", toolset), + }) + return } // Handle the MCP request diff --git a/coderd/notifications/dispatch/webhook.go b/coderd/notifications/dispatch/webhook.go index 65d6ed030af98..7265602e5332d 100644 --- a/coderd/notifications/dispatch/webhook.go +++ b/coderd/notifications/dispatch/webhook.go @@ -5,6 +5,7 @@ import ( "context" "encoding/json" "errors" + "fmt" "io" "net/http" "text/template" @@ -39,7 +40,22 @@ type WebhookPayload struct { } func NewWebhookHandler(cfg codersdk.NotificationsWebhookConfig, log slog.Logger) *WebhookHandler { - return &WebhookHandler{cfg: cfg, log: log, cl: &http.Client{}} + // Create a new transport in favor of reusing the default, since other http clients may interfere. + // http.Transport maintains its own connection pool, and we want to avoid cross-contamination. + var rt http.RoundTripper + + def := http.DefaultTransport + t, ok := def.(*http.Transport) + if !ok { + // The API has changed (very unlikely), so let's use the default transport (previous behavior) and log. + log.Warn(context.Background(), "failed to clone default HTTP transport, unexpected type", slog.F("type", fmt.Sprintf("%T", def))) + rt = def + } else { + // Clone the transport's exported fields, but not its connection pool. + rt = t.Clone() + } + + return &WebhookHandler{cfg: cfg, log: log, cl: &http.Client{Transport: rt}} } func (w *WebhookHandler) Dispatcher(payload types.MessagePayload, titleMarkdown, bodyMarkdown string, _ template.FuncMap) (DeliveryFunc, error) { diff --git a/coderd/notifications/dispatch/webhook_test.go b/coderd/notifications/dispatch/webhook_test.go index 9f898a6fd6efd..35443b9fbb840 100644 --- a/coderd/notifications/dispatch/webhook_test.go +++ b/coderd/notifications/dispatch/webhook_test.go @@ -131,7 +131,7 @@ func TestWebhook(t *testing.T) { server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { tc.serverFn(msgID, w, r) })) - defer server.Close() + t.Cleanup(server.Close) endpoint, err = url.Parse(server.URL) require.NoError(t, err) diff --git a/coderd/notifications/manager.go b/coderd/notifications/manager.go index 11588a09fb797..943306d443265 100644 --- a/coderd/notifications/manager.go +++ b/coderd/notifications/manager.go @@ -11,12 +11,13 @@ import ( "golang.org/x/xerrors" "cdr.dev/slog" - "github.com/coder/quartz" "github.com/coder/coder/v2/coderd/database" "github.com/coder/coder/v2/coderd/database/pubsub" "github.com/coder/coder/v2/coderd/notifications/dispatch" + "github.com/coder/coder/v2/coderd/pproflabel" "github.com/coder/coder/v2/codersdk" + "github.com/coder/quartz" ) var ErrInvalidDispatchTimeout = xerrors.New("dispatch timeout must be less than lease period") @@ -145,7 +146,7 @@ func (m *Manager) Run(ctx context.Context) { m.runOnce.Do(func() { // Closes when Stop() is called or context is canceled. - go func() { + pproflabel.Go(ctx, pproflabel.Service(pproflabel.ServiceNotifications), func(ctx context.Context) { err := m.loop(ctx) if err != nil { if xerrors.Is(err, ErrManagerAlreadyClosed) { @@ -154,7 +155,7 @@ func (m *Manager) Run(ctx context.Context) { m.log.Error(ctx, "notification manager stopped with error", slog.Error(err)) } } - }() + }) }) } diff --git a/coderd/notifications/manager_test.go b/coderd/notifications/manager_test.go index e9c309f0a09d3..30af0c88b852c 100644 --- a/coderd/notifications/manager_test.go +++ b/coderd/notifications/manager_test.go @@ -31,7 +31,6 @@ func TestBufferedUpdates(t *testing.T) { // setup - // nolint:gocritic // Unit test. ctx := dbauthz.AsSystemRestricted(testutil.Context(t, testutil.WaitSuperLong)) store, ps := dbtestutil.NewDB(t) logger := testutil.Logger(t) @@ -108,7 +107,6 @@ func TestBuildPayload(t *testing.T) { // SETUP - // nolint:gocritic // Unit test. ctx := dbauthz.AsSystemRestricted(testutil.Context(t, testutil.WaitSuperLong)) store, _ := dbtestutil.NewDB(t) logger := testutil.Logger(t) @@ -166,7 +164,6 @@ func TestStopBeforeRun(t *testing.T) { // SETUP - // nolint:gocritic // Unit test. ctx := dbauthz.AsSystemRestricted(testutil.Context(t, testutil.WaitSuperLong)) store, ps := dbtestutil.NewDB(t) logger := testutil.Logger(t) @@ -187,7 +184,6 @@ func TestRunStopRace(t *testing.T) { // SETUP - // nolint:gocritic // Unit test. ctx := dbauthz.AsSystemRestricted(testutil.Context(t, testutil.WaitMedium)) store, ps := dbtestutil.NewDB(t) logger := testutil.Logger(t) diff --git a/coderd/notifications/metrics_test.go b/coderd/notifications/metrics_test.go index 5517f86061cc0..6ba6635a50c4c 100644 --- a/coderd/notifications/metrics_test.go +++ b/coderd/notifications/metrics_test.go @@ -37,7 +37,6 @@ func TestMetrics(t *testing.T) { t.Skip("This test requires postgres; it relies on business-logic only implemented in the database") } - // nolint:gocritic // Unit test. ctx := dbauthz.AsSystemRestricted(testutil.Context(t, testutil.WaitSuperLong)) store, pubsub := dbtestutil.NewDB(t) logger := testutil.Logger(t) @@ -226,7 +225,6 @@ func TestPendingUpdatesMetric(t *testing.T) { t.Parallel() // SETUP - // nolint:gocritic // Unit test. ctx := dbauthz.AsSystemRestricted(testutil.Context(t, testutil.WaitSuperLong)) store, pubsub := dbtestutil.NewDB(t) logger := testutil.Logger(t) @@ -320,7 +318,6 @@ func TestInflightDispatchesMetric(t *testing.T) { t.Parallel() // SETUP - // nolint:gocritic // Unit test. ctx := dbauthz.AsSystemRestricted(testutil.Context(t, testutil.WaitSuperLong)) store, pubsub := dbtestutil.NewDB(t) logger := testutil.Logger(t) @@ -400,7 +397,6 @@ func TestCustomMethodMetricCollection(t *testing.T) { t.Skip("This test requires postgres; it relies on business-logic only implemented in the database") } - // nolint:gocritic // Unit test. ctx := dbauthz.AsSystemRestricted(testutil.Context(t, testutil.WaitSuperLong)) store, pubsub := dbtestutil.NewDB(t) logger := testutil.Logger(t) diff --git a/coderd/notifications/notifications_test.go b/coderd/notifications/notifications_test.go index e213a62df9996..f5e72a8327d7e 100644 --- a/coderd/notifications/notifications_test.go +++ b/coderd/notifications/notifications_test.go @@ -70,7 +70,6 @@ func TestBasicNotificationRoundtrip(t *testing.T) { t.Skip("This test requires postgres; it relies on business-logic only implemented in the database") } - // nolint:gocritic // Unit test. ctx := dbauthz.AsNotifier(testutil.Context(t, testutil.WaitSuperLong)) store, pubsub := dbtestutil.NewDB(t) logger := testutil.Logger(t) @@ -137,7 +136,6 @@ func TestSMTPDispatch(t *testing.T) { // SETUP - // nolint:gocritic // Unit test. ctx := dbauthz.AsNotifier(testutil.Context(t, testutil.WaitSuperLong)) store, pubsub := dbtestutil.NewDB(t) logger := testutil.Logger(t) @@ -203,7 +201,6 @@ func TestWebhookDispatch(t *testing.T) { // SETUP - // nolint:gocritic // Unit test. ctx := dbauthz.AsNotifier(testutil.Context(t, testutil.WaitSuperLong)) store, pubsub := dbtestutil.NewDB(t) logger := testutil.Logger(t) @@ -287,7 +284,6 @@ func TestBackpressure(t *testing.T) { store, pubsub := dbtestutil.NewDB(t) logger := testutil.Logger(t) - // nolint:gocritic // Unit test. ctx := dbauthz.AsNotifier(testutil.Context(t, testutil.WaitShort)) const method = database.NotificationMethodWebhook @@ -416,7 +412,6 @@ func TestRetries(t *testing.T) { } const maxAttempts = 3 - // nolint:gocritic // Unit test. ctx := dbauthz.AsNotifier(testutil.Context(t, testutil.WaitSuperLong)) store, pubsub := dbtestutil.NewDB(t) logger := testutil.Logger(t) @@ -516,7 +511,6 @@ func TestExpiredLeaseIsRequeued(t *testing.T) { t.Skip("This test requires postgres; it relies on business-logic only implemented in the database") } - // nolint:gocritic // Unit test. ctx := dbauthz.AsNotifier(testutil.Context(t, testutil.WaitSuperLong)) store, pubsub := dbtestutil.NewDB(t) logger := testutil.Logger(t) @@ -536,7 +530,6 @@ func TestExpiredLeaseIsRequeued(t *testing.T) { noopInterceptor := newNoopStoreSyncer(store) - // nolint:gocritic // Unit test. mgrCtx, cancelManagerCtx := context.WithCancel(dbauthz.AsNotifier(context.Background())) t.Cleanup(cancelManagerCtx) @@ -645,7 +638,6 @@ func TestNotifierPaused(t *testing.T) { // Setup. - // nolint:gocritic // Unit test. ctx := dbauthz.AsNotifier(testutil.Context(t, testutil.WaitSuperLong)) store, pubsub := dbtestutil.NewDB(t) logger := testutil.Logger(t) @@ -1323,7 +1315,6 @@ func TestNotificationTemplates_Golden(t *testing.T) { return &db, &api.Logger, &user }() - // nolint:gocritic // Unit test. ctx := dbauthz.AsNotifier(testutil.Context(t, testutil.WaitSuperLong)) _, pubsub := dbtestutil.NewDB(t) @@ -1406,13 +1397,11 @@ func TestNotificationTemplates_Golden(t *testing.T) { // as appearance changes are enterprise features and we do not want to mix those // can't use the api if tc.appName != "" { - // nolint:gocritic // Unit test. err = (*db).UpsertApplicationName(dbauthz.AsSystemRestricted(ctx), "Custom Application") require.NoError(t, err) } if tc.logoURL != "" { - // nolint:gocritic // Unit test. err = (*db).UpsertLogoURL(dbauthz.AsSystemRestricted(ctx), "https://custom.application/logo.png") require.NoError(t, err) } @@ -1510,7 +1499,6 @@ func TestNotificationTemplates_Golden(t *testing.T) { }() _, pubsub := dbtestutil.NewDB(t) - // nolint:gocritic // Unit test. ctx := dbauthz.AsNotifier(testutil.Context(t, testutil.WaitSuperLong)) // Spin up the mock webhook server @@ -1650,7 +1638,6 @@ func TestDisabledByDefaultBeforeEnqueue(t *testing.T) { t.Skip("This test requires postgres; it is testing business-logic implemented in the database") } - // nolint:gocritic // Unit test. ctx := dbauthz.AsNotifier(testutil.Context(t, testutil.WaitSuperLong)) store, _ := dbtestutil.NewDB(t) logger := testutil.Logger(t) @@ -1676,7 +1663,6 @@ func TestDisabledBeforeEnqueue(t *testing.T) { t.Skip("This test requires postgres; it is testing business-logic implemented in the database") } - // nolint:gocritic // Unit test. ctx := dbauthz.AsNotifier(testutil.Context(t, testutil.WaitSuperLong)) store, _ := dbtestutil.NewDB(t) logger := testutil.Logger(t) @@ -1712,7 +1698,6 @@ func TestDisabledAfterEnqueue(t *testing.T) { t.Skip("This test requires postgres; it is testing business-logic implemented in the database") } - // nolint:gocritic // Unit test. ctx := dbauthz.AsNotifier(testutil.Context(t, testutil.WaitSuperLong)) store, pubsub := dbtestutil.NewDB(t) logger := testutil.Logger(t) @@ -1769,7 +1754,6 @@ func TestCustomNotificationMethod(t *testing.T) { t.Skip("This test requires postgres; it relies on business-logic only implemented in the database") } - // nolint:gocritic // Unit test. ctx := dbauthz.AsNotifier(testutil.Context(t, testutil.WaitSuperLong)) store, pubsub := dbtestutil.NewDB(t) logger := testutil.Logger(t) @@ -1873,7 +1857,6 @@ func TestNotificationsTemplates(t *testing.T) { t.Skip("This test requires postgres; it relies on business-logic only implemented in the database") } - // nolint:gocritic // Unit test. ctx := dbauthz.AsNotifier(testutil.Context(t, testutil.WaitSuperLong)) api := coderdtest.New(t, createOpts(t)) @@ -1910,7 +1893,6 @@ func TestNotificationDuplicates(t *testing.T) { t.Skip("This test requires postgres; it is testing the dedupe hash trigger in the database") } - // nolint:gocritic // Unit test. ctx := dbauthz.AsNotifier(testutil.Context(t, testutil.WaitSuperLong)) store, pubsub := dbtestutil.NewDB(t) logger := testutil.Logger(t) @@ -2007,7 +1989,6 @@ func TestNotificationTargetMatrix(t *testing.T) { t.Run(tt.name, func(t *testing.T) { t.Parallel() - // nolint:gocritic // Unit test. ctx := dbauthz.AsNotifier(testutil.Context(t, testutil.WaitSuperLong)) store, pubsub := dbtestutil.NewDB(t) logger := testutil.Logger(t) @@ -2051,7 +2032,6 @@ func TestNotificationOneTimePasswordDeliveryTargets(t *testing.T) { t.Run("Inbox", func(t *testing.T) { t.Parallel() - // nolint:gocritic // Unit test. ctx := dbauthz.AsNotifier(testutil.Context(t, testutil.WaitSuperLong)) store, _ := dbtestutil.NewDB(t) logger := testutil.Logger(t) @@ -2076,7 +2056,6 @@ func TestNotificationOneTimePasswordDeliveryTargets(t *testing.T) { t.Run("SMTP", func(t *testing.T) { t.Parallel() - // nolint:gocritic // Unit test. ctx := dbauthz.AsNotifier(testutil.Context(t, testutil.WaitSuperLong)) store, _ := dbtestutil.NewDB(t) logger := testutil.Logger(t) @@ -2100,7 +2079,6 @@ func TestNotificationOneTimePasswordDeliveryTargets(t *testing.T) { t.Run("Webhook", func(t *testing.T) { t.Parallel() - // nolint:gocritic // Unit test. ctx := dbauthz.AsNotifier(testutil.Context(t, testutil.WaitSuperLong)) store, _ := dbtestutil.NewDB(t) logger := testutil.Logger(t) diff --git a/coderd/notifications/reports/generator_internal_test.go b/coderd/notifications/reports/generator_internal_test.go index f61064c4e0b23..6dcff173118cb 100644 --- a/coderd/notifications/reports/generator_internal_test.go +++ b/coderd/notifications/reports/generator_internal_test.go @@ -505,7 +505,6 @@ func TestReportFailedWorkspaceBuilds(t *testing.T) { func setup(t *testing.T) (context.Context, slog.Logger, database.Store, pubsub.Pubsub, *notificationstest.FakeEnqueuer, *quartz.Mock) { t.Helper() - // nolint:gocritic // reportFailedWorkspaceBuilds is called by system. ctx := dbauthz.AsSystemRestricted(context.Background()) logger := slogtest.Make(t, &slogtest.Options{}) db, ps := dbtestutil.NewDB(t) diff --git a/coderd/parameters_test.go b/coderd/parameters_test.go index c00d6f9224bfb..07c00d2ef23e3 100644 --- a/coderd/parameters_test.go +++ b/coderd/parameters_test.go @@ -3,10 +3,12 @@ package coderd_test import ( "context" "os" + "sync" "testing" "github.com/google/uuid" "github.com/stretchr/testify/require" + "go.uber.org/atomic" "golang.org/x/xerrors" "github.com/coder/coder/v2/coderd" @@ -199,8 +201,15 @@ func TestDynamicParametersWithTerraformValues(t *testing.T) { modulesArchive, err := terraform.GetModulesArchive(os.DirFS("testdata/parameters/modules")) require.NoError(t, err) + c := atomic.NewInt32(0) + reject := &dbRejectGitSSHKey{Store: db, hook: func(d *dbRejectGitSSHKey) { + if c.Add(1) > 1 { + // Second call forward, reject + d.SetReject(true) + } + }} setup := setupDynamicParamsTest(t, setupDynamicParamsTestParams{ - db: &dbRejectGitSSHKey{Store: db}, + db: reject, ps: ps, provisionerDaemonVersion: provProto.CurrentVersion.String(), mainTF: dynamicParametersTerraformSource, @@ -444,8 +453,30 @@ func setupDynamicParamsTest(t *testing.T, args setupDynamicParamsTestParams) dyn // that is generally impossible to force an error. type dbRejectGitSSHKey struct { database.Store + rejectMu sync.RWMutex + reject bool + hook func(d *dbRejectGitSSHKey) +} + +// SetReject toggles whether GetGitSSHKey should return an error or passthrough to the underlying store. +func (d *dbRejectGitSSHKey) SetReject(reject bool) { + d.rejectMu.Lock() + defer d.rejectMu.Unlock() + d.reject = reject } -func (*dbRejectGitSSHKey) GetGitSSHKey(_ context.Context, _ uuid.UUID) (database.GitSSHKey, error) { - return database.GitSSHKey{}, xerrors.New("forcing a fake error") +func (d *dbRejectGitSSHKey) GetGitSSHKey(ctx context.Context, userID uuid.UUID) (database.GitSSHKey, error) { + if d.hook != nil { + d.hook(d) + } + + d.rejectMu.RLock() + reject := d.reject + d.rejectMu.RUnlock() + + if reject { + return database.GitSSHKey{}, xerrors.New("forcing a fake error") + } + + return d.Store.GetGitSSHKey(ctx, userID) } diff --git a/coderd/pproflabel/pproflabel.go b/coderd/pproflabel/pproflabel.go new file mode 100644 index 0000000000000..bde5be1b3630e --- /dev/null +++ b/coderd/pproflabel/pproflabel.go @@ -0,0 +1,43 @@ +package pproflabel + +import ( + "context" + "runtime/pprof" +) + +// Go is just a convince wrapper to set off a labeled goroutine. +func Go(ctx context.Context, labels pprof.LabelSet, f func(context.Context)) { + go pprof.Do(ctx, labels, f) +} + +func Do(ctx context.Context, labels pprof.LabelSet, f func(context.Context)) { + pprof.Do(ctx, labels, f) +} + +const ( + // ServiceTag should not collide with the pyroscope built-in tag "service". + // Use `coder_` to avoid collisions. + ServiceTag = "coder_service" + + ServiceHTTPServer = "http-api" + ServiceLifecycles = "lifecycle-executor" + ServicePrebuildReconciler = "prebuilds-reconciler" + ServiceTerraformProvisioner = "terraform-provisioner" + ServiceDBPurge = "db-purge" + ServiceNotifications = "notifications" + ServiceReplicaSync = "replica-sync" + // ServiceMetricCollector collects metrics from insights in the database and + // exports them in a prometheus collector format. + ServiceMetricCollector = "metrics-collector" + // ServiceAgentMetricAggregator merges agent metrics and exports them in a + // prometheus collector format. + ServiceAgentMetricAggregator = "agent-metrics-aggregator" + // ServiceTallymanPublisher publishes usage events to coder/tallyman. + ServiceTallymanPublisher = "tallyman-publisher" + + RequestTypeTag = "coder_request_type" +) + +func Service(name string, pairs ...string) pprof.LabelSet { + return pprof.Labels(append([]string{ServiceTag, name}, pairs...)...) +} diff --git a/coderd/prebuilds/api.go b/coderd/prebuilds/api.go index 3092d27421d26..1bedeb10130c8 100644 --- a/coderd/prebuilds/api.go +++ b/coderd/prebuilds/api.go @@ -2,6 +2,8 @@ package prebuilds import ( "context" + "database/sql" + "time" "github.com/google/uuid" "golang.org/x/xerrors" @@ -54,6 +56,15 @@ type StateSnapshotter interface { } type Claimer interface { - Claim(ctx context.Context, userID uuid.UUID, name string, presetID uuid.UUID) (*uuid.UUID, error) + Claim( + ctx context.Context, + now time.Time, + userID uuid.UUID, + name string, + presetID uuid.UUID, + autostartSchedule sql.NullString, + nextStartAt sql.NullTime, + ttl sql.NullInt64, + ) (*uuid.UUID, error) Initiator() uuid.UUID } diff --git a/coderd/prebuilds/noop.go b/coderd/prebuilds/noop.go index 3c2dd78a804db..ebb6d6964214e 100644 --- a/coderd/prebuilds/noop.go +++ b/coderd/prebuilds/noop.go @@ -2,6 +2,8 @@ package prebuilds import ( "context" + "database/sql" + "time" "github.com/google/uuid" @@ -28,7 +30,7 @@ var DefaultReconciler ReconciliationOrchestrator = NoopReconciler{} type NoopClaimer struct{} -func (NoopClaimer) Claim(context.Context, uuid.UUID, string, uuid.UUID) (*uuid.UUID, error) { +func (NoopClaimer) Claim(context.Context, time.Time, uuid.UUID, string, uuid.UUID, sql.NullString, sql.NullTime, sql.NullInt64) (*uuid.UUID, error) { // Not entitled to claim prebuilds in AGPL version. return nil, ErrAGPLDoesNotSupportPrebuiltWorkspaces } diff --git a/coderd/prebuilds/parameters.go b/coderd/prebuilds/parameters.go new file mode 100644 index 0000000000000..63a1a7b78bfa7 --- /dev/null +++ b/coderd/prebuilds/parameters.go @@ -0,0 +1,42 @@ +package prebuilds + +import ( + "context" + "database/sql" + "errors" + + "github.com/google/uuid" + "golang.org/x/xerrors" + + "github.com/coder/coder/v2/coderd/database" +) + +// FindMatchingPresetID finds a preset ID that matches the provided parameters. +// It returns the preset ID if a match is found, or uuid.Nil if no match is found. +// The function performs a bidirectional comparison to ensure all parameters match exactly. +func FindMatchingPresetID( + ctx context.Context, + store database.Store, + templateVersionID uuid.UUID, + parameterNames []string, + parameterValues []string, +) (uuid.UUID, error) { + if len(parameterNames) != len(parameterValues) { + return uuid.Nil, xerrors.New("parameter names and values must have the same length") + } + + result, err := store.FindMatchingPresetID(ctx, database.FindMatchingPresetIDParams{ + TemplateVersionID: templateVersionID, + ParameterNames: parameterNames, + ParameterValues: parameterValues, + }) + if err != nil { + // Handle the case where no matching preset is found (no rows returned) + if errors.Is(err, sql.ErrNoRows) { + return uuid.Nil, nil + } + return uuid.Nil, xerrors.Errorf("find matching preset ID: %w", err) + } + + return result, nil +} diff --git a/coderd/prebuilds/parameters_test.go b/coderd/prebuilds/parameters_test.go new file mode 100644 index 0000000000000..e9366bb1da02b --- /dev/null +++ b/coderd/prebuilds/parameters_test.go @@ -0,0 +1,198 @@ +package prebuilds_test + +import ( + "testing" + + "github.com/google/uuid" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/coder/coder/v2/coderd/database" + "github.com/coder/coder/v2/coderd/database/dbgen" + "github.com/coder/coder/v2/coderd/database/dbtestutil" + "github.com/coder/coder/v2/coderd/prebuilds" + "github.com/coder/coder/v2/testutil" +) + +func TestFindMatchingPresetID(t *testing.T) { + t.Parallel() + + presetIDs := []uuid.UUID{ + uuid.New(), + uuid.New(), + } + // Give each preset a meaningful name in alphabetical order + presetNames := map[uuid.UUID]string{ + presetIDs[0]: "development", + presetIDs[1]: "production", + } + tests := []struct { + name string + parameterNames []string + parameterValues []string + presetParameters []database.TemplateVersionPresetParameter + expectedPresetID uuid.UUID + expectError bool + errorContains string + }{ + { + name: "exact match", + parameterNames: []string{"region", "instance_type"}, + parameterValues: []string{"us-west-2", "t3.medium"}, + presetParameters: []database.TemplateVersionPresetParameter{ + {TemplateVersionPresetID: presetIDs[0], Name: "region", Value: "us-west-2"}, + {TemplateVersionPresetID: presetIDs[0], Name: "instance_type", Value: "t3.medium"}, + // antagonist: + {TemplateVersionPresetID: presetIDs[1], Name: "region", Value: "us-west-2"}, + {TemplateVersionPresetID: presetIDs[1], Name: "instance_type", Value: "t3.large"}, + }, + expectedPresetID: presetIDs[0], + expectError: false, + }, + { + name: "no match - different values", + parameterNames: []string{"region", "instance_type"}, + parameterValues: []string{"us-east-1", "t3.medium"}, + presetParameters: []database.TemplateVersionPresetParameter{ + {TemplateVersionPresetID: presetIDs[0], Name: "region", Value: "us-west-2"}, + {TemplateVersionPresetID: presetIDs[0], Name: "instance_type", Value: "t3.medium"}, + // antagonist: + {TemplateVersionPresetID: presetIDs[1], Name: "region", Value: "us-west-2"}, + {TemplateVersionPresetID: presetIDs[1], Name: "instance_type", Value: "t3.large"}, + }, + expectedPresetID: uuid.Nil, + expectError: false, + }, + { + name: "no match - fewer provided parameters", + parameterNames: []string{"region"}, + parameterValues: []string{"us-west-2"}, + presetParameters: []database.TemplateVersionPresetParameter{ + {TemplateVersionPresetID: presetIDs[0], Name: "region", Value: "us-west-2"}, + {TemplateVersionPresetID: presetIDs[0], Name: "instance_type", Value: "t3.medium"}, + // antagonist: + {TemplateVersionPresetID: presetIDs[1], Name: "region", Value: "us-west-2"}, + {TemplateVersionPresetID: presetIDs[1], Name: "instance_type", Value: "t3.large"}, + }, + expectedPresetID: uuid.Nil, + expectError: false, + }, + { + name: "subset match - extra provided parameter", + parameterNames: []string{"region", "instance_type", "extra_param"}, + parameterValues: []string{"us-west-2", "t3.medium", "extra_value"}, + presetParameters: []database.TemplateVersionPresetParameter{ + {TemplateVersionPresetID: presetIDs[0], Name: "region", Value: "us-west-2"}, + {TemplateVersionPresetID: presetIDs[0], Name: "instance_type", Value: "t3.medium"}, + // antagonist: + {TemplateVersionPresetID: presetIDs[1], Name: "region", Value: "us-west-2"}, + {TemplateVersionPresetID: presetIDs[1], Name: "instance_type", Value: "t3.large"}, + }, + expectedPresetID: presetIDs[0], // Should match because all preset parameters are present + expectError: false, + }, + { + name: "mismatched parameter names vs values", + parameterNames: []string{"region", "instance_type"}, + parameterValues: []string{"us-west-2"}, + presetParameters: []database.TemplateVersionPresetParameter{}, + expectedPresetID: uuid.Nil, + expectError: true, + errorContains: "parameter names and values must have the same length", + }, + { + name: "multiple presets - match first", + parameterNames: []string{"region", "instance_type"}, + parameterValues: []string{"us-west-2", "t3.medium"}, + presetParameters: []database.TemplateVersionPresetParameter{ + {TemplateVersionPresetID: presetIDs[0], Name: "region", Value: "us-west-2"}, + {TemplateVersionPresetID: presetIDs[0], Name: "instance_type", Value: "t3.medium"}, + {TemplateVersionPresetID: presetIDs[1], Name: "region", Value: "us-east-1"}, + {TemplateVersionPresetID: presetIDs[1], Name: "instance_type", Value: "t3.large"}, + }, + expectedPresetID: presetIDs[0], + expectError: false, + }, + { + name: "largest subset match", + parameterNames: []string{"region", "instance_type", "storage_size"}, + parameterValues: []string{"us-west-2", "t3.medium", "100gb"}, + presetParameters: []database.TemplateVersionPresetParameter{ + {TemplateVersionPresetID: presetIDs[0], Name: "region", Value: "us-west-2"}, + {TemplateVersionPresetID: presetIDs[0], Name: "instance_type", Value: "t3.medium"}, + {TemplateVersionPresetID: presetIDs[1], Name: "region", Value: "us-west-2"}, + }, + expectedPresetID: presetIDs[0], // Should match the larger subset (2 params vs 1 param) + expectError: false, + }, + } + + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + + ctx := testutil.Context(t, testutil.WaitShort) + db, _ := dbtestutil.NewDB(t) + org := dbgen.Organization(t, db, database.Organization{}) + user := dbgen.User(t, db, database.User{}) + templateVersion := dbgen.TemplateVersion(t, db, database.TemplateVersion{ + OrganizationID: org.ID, + CreatedBy: user.ID, + JobID: uuid.New(), + }) + + // Group parameters by preset ID and create presets + presetMap := make(map[uuid.UUID][]database.TemplateVersionPresetParameter) + for _, param := range tt.presetParameters { + presetMap[param.TemplateVersionPresetID] = append(presetMap[param.TemplateVersionPresetID], param) + } + + // Create presets and insert their parameters + for presetID, params := range presetMap { + // Create the preset + _, err := db.InsertPreset(ctx, database.InsertPresetParams{ + ID: presetID, + TemplateVersionID: templateVersion.ID, + Name: presetNames[presetID], + CreatedAt: dbtestutil.NowInDefaultTimezone(), + }) + require.NoError(t, err) + + // Insert parameters for this preset + names := make([]string, len(params)) + values := make([]string, len(params)) + for i, param := range params { + names[i] = param.Name + values[i] = param.Value + } + + _, err = db.InsertPresetParameters(ctx, database.InsertPresetParametersParams{ + TemplateVersionPresetID: presetID, + Names: names, + Values: values, + }) + require.NoError(t, err) + } + + result, err := prebuilds.FindMatchingPresetID( + ctx, + db, + templateVersion.ID, + tt.parameterNames, + tt.parameterValues, + ) + + // Assert results + if tt.expectError { + require.Error(t, err) + if tt.errorContains != "" { + assert.Contains(t, err.Error(), tt.errorContains) + } + } else { + require.NoError(t, err) + assert.Equal(t, tt.expectedPresetID, result) + } + }) + } +} diff --git a/coderd/presets.go b/coderd/presets.go index c8d84baec4bf3..b002d6168f5ba 100644 --- a/coderd/presets.go +++ b/coderd/presets.go @@ -54,6 +54,8 @@ func (api *API) templateVersionPresets(rw http.ResponseWriter, r *http.Request) Name: preset.Name, Default: preset.IsDefault, DesiredPrebuildInstances: convertPrebuildInstances(preset.DesiredInstances), + Description: preset.Description, + Icon: preset.Icon, } for _, presetParam := range presetParams { if presetParam.TemplateVersionPresetID != preset.ID { diff --git a/coderd/prometheusmetrics/aggregator.go b/coderd/prometheusmetrics/aggregator.go index 44ade677d5cff..ad51c3e7fa8a7 100644 --- a/coderd/prometheusmetrics/aggregator.go +++ b/coderd/prometheusmetrics/aggregator.go @@ -11,11 +11,11 @@ import ( "github.com/prometheus/common/model" "golang.org/x/xerrors" - "github.com/coder/coder/v2/coderd/agentmetrics" - "cdr.dev/slog" agentproto "github.com/coder/coder/v2/agent/proto" + "github.com/coder/coder/v2/coderd/agentmetrics" + "github.com/coder/coder/v2/coderd/pproflabel" ) const ( @@ -298,7 +298,7 @@ func (ma *MetricsAggregator) Run(ctx context.Context) func() { done := make(chan struct{}) cleanupTicker := time.NewTicker(ma.metricsCleanupInterval) - go func() { + pproflabel.Go(ctx, pproflabel.Service(pproflabel.ServiceAgentMetricAggregator), func(ctx context.Context) { defer close(done) defer cleanupTicker.Stop() @@ -395,7 +395,7 @@ func (ma *MetricsAggregator) Run(ctx context.Context) func() { return } } - }() + }) return func() { cancelFunc() <-done diff --git a/coderd/prometheusmetrics/insights/metricscollector.go b/coderd/prometheusmetrics/insights/metricscollector.go index 41d3a0220f391..a095968526ca8 100644 --- a/coderd/prometheusmetrics/insights/metricscollector.go +++ b/coderd/prometheusmetrics/insights/metricscollector.go @@ -14,6 +14,7 @@ import ( "cdr.dev/slog" "github.com/coder/coder/v2/coderd/database" + "github.com/coder/coder/v2/coderd/pproflabel" "github.com/coder/coder/v2/coderd/util/slice" "github.com/coder/coder/v2/codersdk" ) @@ -158,7 +159,7 @@ func (mc *MetricsCollector) Run(ctx context.Context) (func(), error) { }) } - go func() { + pproflabel.Go(ctx, pproflabel.Service(pproflabel.ServiceMetricCollector), func(ctx context.Context) { defer close(done) defer ticker.Stop() for { @@ -170,7 +171,7 @@ func (mc *MetricsCollector) Run(ctx context.Context) (func(), error) { doTick() } } - }() + }) return func() { closeFunc() <-done diff --git a/coderd/prometheusmetrics/insights/metricscollector_test.go b/coderd/prometheusmetrics/insights/metricscollector_test.go index 9382fa5013525..5c18ec6d1a60f 100644 --- a/coderd/prometheusmetrics/insights/metricscollector_test.go +++ b/coderd/prometheusmetrics/insights/metricscollector_test.go @@ -128,7 +128,6 @@ func TestCollectInsights(t *testing.T) { AppStatBatchSize: workspaceapps.DefaultStatsDBReporterBatchSize, }) refTime := time.Now().Add(-3 * time.Minute).Truncate(time.Minute) - //nolint:gocritic // This is a test. err = reporter.ReportAppStats(dbauthz.AsSystemRestricted(context.Background()), []workspaceapps.StatsReport{ { UserID: user.ID, diff --git a/coderd/prometheusmetrics/prometheusmetrics.go b/coderd/prometheusmetrics/prometheusmetrics.go index 4fd2cfda607ed..6ea8615f3779a 100644 --- a/coderd/prometheusmetrics/prometheusmetrics.go +++ b/coderd/prometheusmetrics/prometheusmetrics.go @@ -150,7 +150,7 @@ func Workspaces(ctx context.Context, logger slog.Logger, registerer prometheus.R Namespace: "coderd", Subsystem: "api", Name: "workspace_latest_build", - Help: "The current number of workspace builds by status.", + Help: "The current number of workspace builds by status for all non-deleted workspaces.", }, []string{"status"}) if err := registerer.Register(workspaceLatestBuildTotals); err != nil { return nil, err @@ -159,7 +159,7 @@ func Workspaces(ctx context.Context, logger slog.Logger, registerer prometheus.R workspaceLatestBuildStatuses := prometheus.NewGaugeVec(prometheus.GaugeOpts{ Namespace: "coderd", Name: "workspace_latest_build_status", - Help: "The current workspace statuses by template, transition, and owner.", + Help: "The current workspace statuses by template, transition, and owner for all non-deleted workspaces.", }, []string{"status", "template_name", "template_version", "workspace_owner", "workspace_transition"}) if err := registerer.Register(workspaceLatestBuildStatuses); err != nil { return nil, err @@ -168,59 +168,37 @@ func Workspaces(ctx context.Context, logger slog.Logger, registerer prometheus.R ctx, cancelFunc := context.WithCancel(ctx) done := make(chan struct{}) - updateWorkspaceTotals := func() { - builds, err := db.GetLatestWorkspaceBuilds(ctx) - if err != nil { - if errors.Is(err, sql.ErrNoRows) { - // clear all series if there are no database entries - workspaceLatestBuildTotals.Reset() - } else { - logger.Warn(ctx, "failed to load latest workspace builds", slog.Error(err)) - } - return - } - jobIDs := make([]uuid.UUID, 0, len(builds)) - for _, build := range builds { - jobIDs = append(jobIDs, build.JobID) - } - jobs, err := db.GetProvisionerJobsByIDs(ctx, jobIDs) - if err != nil { - ids := make([]string, 0, len(jobIDs)) - for _, id := range jobIDs { - ids = append(ids, id.String()) - } - - logger.Warn(ctx, "failed to load provisioner jobs", slog.F("ids", ids), slog.Error(err)) - return - } - - workspaceLatestBuildTotals.Reset() - for _, job := range jobs { - status := codersdk.ProvisionerJobStatus(job.JobStatus) - workspaceLatestBuildTotals.WithLabelValues(string(status)).Add(1) - // TODO: deprecated: remove in the future - workspaceLatestBuildTotalsDeprecated.WithLabelValues(string(status)).Add(1) - } - } - - updateWorkspaceStatuses := func() { + updateWorkspaceMetrics := func() { ws, err := db.GetWorkspaces(ctx, database.GetWorkspacesParams{ Deleted: false, WithSummary: false, }) if err != nil { if errors.Is(err, sql.ErrNoRows) { - // clear all series if there are no database entries + workspaceLatestBuildTotals.Reset() workspaceLatestBuildStatuses.Reset() + } else { + logger.Warn(ctx, "failed to load active workspaces for metrics", slog.Error(err)) } - - logger.Warn(ctx, "failed to load active workspaces", slog.Error(err)) return } + workspaceLatestBuildTotals.Reset() workspaceLatestBuildStatuses.Reset() + for _, w := range ws { - workspaceLatestBuildStatuses.WithLabelValues(string(w.LatestBuildStatus), w.TemplateName, w.TemplateVersionName.String, w.OwnerUsername, string(w.LatestBuildTransition)).Add(1) + status := string(w.LatestBuildStatus) + workspaceLatestBuildTotals.WithLabelValues(status).Add(1) + // TODO: deprecated: remove in the future + workspaceLatestBuildTotalsDeprecated.WithLabelValues(status).Add(1) + + workspaceLatestBuildStatuses.WithLabelValues( + status, + w.TemplateName, + w.TemplateVersionName.String, + w.OwnerUsername, + string(w.LatestBuildTransition), + ).Add(1) } } @@ -230,8 +208,7 @@ func Workspaces(ctx context.Context, logger slog.Logger, registerer prometheus.R doTick := func() { defer ticker.Reset(duration) - updateWorkspaceTotals() - updateWorkspaceStatuses() + updateWorkspaceMetrics() } go func() { @@ -351,29 +328,24 @@ func Agents(ctx context.Context, logger slog.Logger, registerer prometheus.Regis templateVersionName = "unknown" } - user, err := db.GetUserByID(ctx, workspace.OwnerID) - if err != nil { - logger.Error(ctx, "can't get user from the database", slog.F("user_id", workspace.OwnerID), slog.Error(err)) - agentsGauge.WithLabelValues(VectorOperationAdd, 0, user.Username, workspace.Name, templateName, templateVersionName) - continue - } + // username := agents, err := db.GetWorkspaceAgentsInLatestBuildByWorkspaceID(ctx, workspace.ID) if err != nil { logger.Error(ctx, "can't get workspace agents", slog.F("workspace_id", workspace.ID), slog.Error(err)) - agentsGauge.WithLabelValues(VectorOperationAdd, 0, user.Username, workspace.Name, templateName, templateVersionName) + agentsGauge.WithLabelValues(VectorOperationAdd, 0, workspace.OwnerUsername, workspace.Name, templateName, templateVersionName) continue } if len(agents) == 0 { logger.Debug(ctx, "workspace agents are unavailable", slog.F("workspace_id", workspace.ID)) - agentsGauge.WithLabelValues(VectorOperationAdd, 0, user.Username, workspace.Name, templateName, templateVersionName) + agentsGauge.WithLabelValues(VectorOperationAdd, 0, workspace.OwnerUsername, workspace.Name, templateName, templateVersionName) continue } for _, agent := range agents { // Collect information about agents - agentsGauge.WithLabelValues(VectorOperationAdd, 1, user.Username, workspace.Name, templateName, templateVersionName) + agentsGauge.WithLabelValues(VectorOperationAdd, 1, workspace.OwnerUsername, workspace.Name, templateName, templateVersionName) connectionStatus := agent.Status(agentInactiveDisconnectTimeout) node := (*coordinator.Load()).Node(agent.ID) @@ -383,7 +355,7 @@ func Agents(ctx context.Context, logger slog.Logger, registerer prometheus.Regis tailnetNode = node.ID.String() } - agentsConnectionsGauge.WithLabelValues(VectorOperationSet, 1, agent.Name, user.Username, workspace.Name, string(connectionStatus.Status), string(agent.LifecycleState), tailnetNode) + agentsConnectionsGauge.WithLabelValues(VectorOperationSet, 1, agent.Name, workspace.OwnerUsername, workspace.Name, string(connectionStatus.Status), string(agent.LifecycleState), tailnetNode) if node == nil { logger.Debug(ctx, "can't read in-memory node for agent", slog.F("agent_id", agent.ID)) @@ -408,7 +380,7 @@ func Agents(ctx context.Context, logger slog.Logger, registerer prometheus.Regis } } - agentsConnectionLatenciesGauge.WithLabelValues(VectorOperationSet, latency, agent.Name, user.Username, workspace.Name, region.RegionName, fmt.Sprintf("%v", node.PreferredDERP == regionID)) + agentsConnectionLatenciesGauge.WithLabelValues(VectorOperationSet, latency, agent.Name, workspace.OwnerUsername, workspace.Name, region.RegionName, fmt.Sprintf("%v", node.PreferredDERP == regionID)) } } @@ -420,7 +392,7 @@ func Agents(ctx context.Context, logger slog.Logger, registerer prometheus.Regis } for _, app := range apps { - agentsAppsGauge.WithLabelValues(VectorOperationAdd, 1, agent.Name, user.Username, workspace.Name, app.DisplayName, string(app.Health)) + agentsAppsGauge.WithLabelValues(VectorOperationAdd, 1, agent.Name, workspace.OwnerUsername, workspace.Name, app.DisplayName, string(app.Health)) } } } diff --git a/coderd/prometheusmetrics/prometheusmetrics_test.go b/coderd/prometheusmetrics/prometheusmetrics_test.go index 1ce6b72347999..28046c1dff3fb 100644 --- a/coderd/prometheusmetrics/prometheusmetrics_test.go +++ b/coderd/prometheusmetrics/prometheusmetrics_test.go @@ -247,6 +247,32 @@ func TestWorkspaceLatestBuildTotals(t *testing.T) { codersdk.ProvisionerJobSucceeded: 3, codersdk.ProvisionerJobRunning: 1, }, + }, { + Name: "MultipleWithDeleted", + Database: func() database.Store { + db, _ := dbtestutil.NewDB(t) + u := dbgen.User(t, db, database.User{}) + org := dbgen.Organization(t, db, database.Organization{}) + insertCanceled(t, db, u, org) + insertFailed(t, db, u, org) + insertSuccess(t, db, u, org) + insertRunning(t, db, u, org) + + // Verify that deleted workspaces/builds are NOT counted in metrics. + n, err := cryptorand.Intn(5) + require.NoError(t, err) + for range 1 + n { + insertDeleted(t, db, u, org) + } + return db + }, + Total: 4, // Only non-deleted workspaces should be counted + Status: map[codersdk.ProvisionerJobStatus]int{ + codersdk.ProvisionerJobCanceled: 1, + codersdk.ProvisionerJobFailed: 1, + codersdk.ProvisionerJobSucceeded: 1, + codersdk.ProvisionerJobRunning: 1, + }, }} { t.Run(tc.Name, func(t *testing.T) { t.Parallel() @@ -323,6 +349,33 @@ func TestWorkspaceLatestBuildStatuses(t *testing.T) { codersdk.ProvisionerJobSucceeded: 3, codersdk.ProvisionerJobRunning: 1, }, + }, { + Name: "MultipleWithDeleted", + Database: func() database.Store { + db, _ := dbtestutil.NewDB(t) + u := dbgen.User(t, db, database.User{}) + org := dbgen.Organization(t, db, database.Organization{}) + insertTemplates(t, db, u, org) + insertCanceled(t, db, u, org) + insertFailed(t, db, u, org) + insertSuccess(t, db, u, org) + insertRunning(t, db, u, org) + + // Verify that deleted workspaces/builds are NOT counted in metrics. + n, err := cryptorand.Intn(5) + require.NoError(t, err) + for range 1 + n { + insertDeleted(t, db, u, org) + } + return db + }, + ExpectedWorkspaces: 4, // Only non-deleted workspaces should be counted + ExpectedStatuses: map[codersdk.ProvisionerJobStatus]int{ + codersdk.ProvisionerJobCanceled: 1, + codersdk.ProvisionerJobFailed: 1, + codersdk.ProvisionerJobSucceeded: 1, + codersdk.ProvisionerJobRunning: 1, + }, }} { t.Run(tc.Name, func(t *testing.T) { t.Parallel() @@ -744,6 +797,7 @@ func insertTemplates(t *testing.T, db database.Store, u database.User, org datab MaxPortSharingLevel: database.AppSharingLevelAuthenticated, CreatedBy: u.ID, OrganizationID: org.ID, + CorsBehavior: database.CorsBehaviorSimple, })) pj := dbgen.ProvisionerJob(t, db, nil, database.ProvisionerJob{}) @@ -763,6 +817,7 @@ func insertTemplates(t *testing.T, db database.Store, u database.User, org datab MaxPortSharingLevel: database.AppSharingLevelAuthenticated, CreatedBy: u.ID, OrganizationID: org.ID, + CorsBehavior: database.CorsBehaviorSimple, })) require.NoError(t, db.InsertTemplateVersion(context.Background(), database.InsertTemplateVersionParams{ @@ -905,3 +960,24 @@ func insertSuccess(t *testing.T, db database.Store, u database.User, org databas }) require.NoError(t, err) } + +func insertDeleted(t *testing.T, db database.Store, u database.User, org database.Organization) { + job := insertRunning(t, db, u, org) + err := db.UpdateProvisionerJobWithCompleteByID(context.Background(), database.UpdateProvisionerJobWithCompleteByIDParams{ + ID: job.ID, + CompletedAt: sql.NullTime{ + Time: dbtime.Now(), + Valid: true, + }, + }) + require.NoError(t, err) + + build, err := db.GetWorkspaceBuildByJobID(context.Background(), job.ID) + require.NoError(t, err) + + err = db.UpdateWorkspaceDeletedByID(context.Background(), database.UpdateWorkspaceDeletedByIDParams{ + ID: build.WorkspaceID, + Deleted: true, + }) + require.NoError(t, err) +} diff --git a/coderd/provisionerdaemons.go b/coderd/provisionerdaemons.go index 332ae3b352e0a..67a40b88f69e9 100644 --- a/coderd/provisionerdaemons.go +++ b/coderd/provisionerdaemons.go @@ -6,6 +6,7 @@ import ( "github.com/coder/coder/v2/coderd/database" "github.com/coder/coder/v2/coderd/database/db2sdk" + "github.com/coder/coder/v2/coderd/database/sdk2db" "github.com/coder/coder/v2/coderd/httpapi" "github.com/coder/coder/v2/coderd/httpmw" "github.com/coder/coder/v2/coderd/provisionerdserver" @@ -45,6 +46,9 @@ func (api *API) provisionerDaemons(rw http.ResponseWriter, r *http.Request) { limit := p.PositiveInt32(qp, 50, "limit") ids := p.UUIDs(qp, nil, "ids") tags := p.JSONStringMap(qp, database.StringMap{}, "tags") + includeOffline := p.NullableBoolean(qp, sql.NullBool{}, "offline") + statuses := p.ProvisionerDaemonStatuses(qp, []codersdk.ProvisionerDaemonStatus{}, "status") + maxAge := p.Duration(qp, 0, "max_age") p.ErrorExcessParams(qp) if len(p.Errors) > 0 { httpapi.Write(ctx, rw, http.StatusBadRequest, codersdk.Response{ @@ -54,12 +58,17 @@ func (api *API) provisionerDaemons(rw http.ResponseWriter, r *http.Request) { return } + dbStatuses := sdk2db.ProvisionerDaemonStatuses(statuses) + daemons, err := api.Database.GetProvisionerDaemonsWithStatusByOrganization( ctx, database.GetProvisionerDaemonsWithStatusByOrganizationParams{ OrganizationID: org.ID, StaleIntervalMS: provisionerdserver.StaleInterval.Milliseconds(), Limit: sql.NullInt32{Int32: limit, Valid: limit > 0}, + Offline: includeOffline, + Statuses: dbStatuses, + MaxAgeMs: sql.NullInt64{Int64: maxAge.Milliseconds(), Valid: maxAge > 0}, IDs: ids, Tags: tags, }, diff --git a/coderd/provisionerdaemons_test.go b/coderd/provisionerdaemons_test.go index 249da9d6bc922..8bbaca551a151 100644 --- a/coderd/provisionerdaemons_test.go +++ b/coderd/provisionerdaemons_test.go @@ -146,7 +146,9 @@ func TestProvisionerDaemons(t *testing.T) { t.Run("Default limit", func(t *testing.T) { t.Parallel() ctx := testutil.Context(t, testutil.WaitMedium) - daemons, err := templateAdminClient.OrganizationProvisionerDaemons(ctx, owner.OrganizationID, nil) + daemons, err := templateAdminClient.OrganizationProvisionerDaemons(ctx, owner.OrganizationID, &codersdk.OrganizationProvisionerDaemonsOptions{ + Offline: true, + }) require.NoError(t, err) require.Len(t, daemons, 50) }) @@ -155,7 +157,8 @@ func TestProvisionerDaemons(t *testing.T) { t.Parallel() ctx := testutil.Context(t, testutil.WaitMedium) daemons, err := templateAdminClient.OrganizationProvisionerDaemons(ctx, owner.OrganizationID, &codersdk.OrganizationProvisionerDaemonsOptions{ - IDs: []uuid.UUID{pd1.ID, pd2.ID}, + IDs: []uuid.UUID{pd1.ID, pd2.ID}, + Offline: true, }) require.NoError(t, err) require.Len(t, daemons, 2) @@ -167,7 +170,8 @@ func TestProvisionerDaemons(t *testing.T) { t.Parallel() ctx := testutil.Context(t, testutil.WaitMedium) daemons, err := templateAdminClient.OrganizationProvisionerDaemons(ctx, owner.OrganizationID, &codersdk.OrganizationProvisionerDaemonsOptions{ - Tags: map[string]string{"count": "1"}, + Tags: map[string]string{"count": "1"}, + Offline: true, }) require.NoError(t, err) require.Len(t, daemons, 1) @@ -209,7 +213,8 @@ func TestProvisionerDaemons(t *testing.T) { t.Parallel() ctx := testutil.Context(t, testutil.WaitMedium) daemons, err := templateAdminClient.OrganizationProvisionerDaemons(ctx, owner.OrganizationID, &codersdk.OrganizationProvisionerDaemonsOptions{ - IDs: []uuid.UUID{pd2.ID}, + IDs: []uuid.UUID{pd2.ID}, + Offline: true, }) require.NoError(t, err) require.Len(t, daemons, 1) diff --git a/coderd/provisionerdserver/provisionerdserver.go b/coderd/provisionerdserver/provisionerdserver.go index f545169c93b31..d7bc29aca3044 100644 --- a/coderd/provisionerdserver/provisionerdserver.go +++ b/coderd/provisionerdserver/provisionerdserver.go @@ -28,13 +28,6 @@ import ( protobuf "google.golang.org/protobuf/proto" "cdr.dev/slog" - - "github.com/coder/coder/v2/coderd/util/slice" - - "github.com/coder/coder/v2/codersdk/drpcsdk" - - "github.com/coder/quartz" - "github.com/coder/coder/v2/coderd/apikey" "github.com/coder/coder/v2/coderd/audit" "github.com/coder/coder/v2/coderd/database" @@ -48,13 +41,18 @@ import ( "github.com/coder/coder/v2/coderd/schedule" "github.com/coder/coder/v2/coderd/telemetry" "github.com/coder/coder/v2/coderd/tracing" + "github.com/coder/coder/v2/coderd/usage" + "github.com/coder/coder/v2/coderd/usage/usagetypes" + "github.com/coder/coder/v2/coderd/util/slice" "github.com/coder/coder/v2/coderd/wspubsub" "github.com/coder/coder/v2/codersdk" "github.com/coder/coder/v2/codersdk/agentsdk" + "github.com/coder/coder/v2/codersdk/drpcsdk" "github.com/coder/coder/v2/provisioner" "github.com/coder/coder/v2/provisionerd/proto" "github.com/coder/coder/v2/provisionersdk" sdkproto "github.com/coder/coder/v2/provisionersdk/proto" + "github.com/coder/quartz" ) const ( @@ -121,6 +119,7 @@ type server struct { DeploymentValues *codersdk.DeploymentValues NotificationsEnqueuer notifications.Enqueuer PrebuildsOrchestrator *atomic.Pointer[prebuilds.ReconciliationOrchestrator] + UsageInserter *atomic.Pointer[usage.Inserter] OIDCConfig promoauth.OAuth2Config @@ -174,6 +173,7 @@ func NewServer( auditor *atomic.Pointer[audit.Auditor], templateScheduleStore *atomic.Pointer[schedule.TemplateScheduleStore], userQuietHoursScheduleStore *atomic.Pointer[schedule.UserQuietHoursScheduleStore], + usageInserter *atomic.Pointer[usage.Inserter], deploymentValues *codersdk.DeploymentValues, options Options, enqueuer notifications.Enqueuer, @@ -195,6 +195,9 @@ func NewServer( if userQuietHoursScheduleStore == nil { return nil, xerrors.New("userQuietHoursScheduleStore is nil") } + if usageInserter == nil { + return nil, xerrors.New("usageCollector is nil") + } if deploymentValues == nil { return nil, xerrors.New("deploymentValues is nil") } @@ -244,6 +247,7 @@ func NewServer( heartbeatInterval: options.HeartbeatInterval, heartbeatFn: options.HeartbeatFn, PrebuildsOrchestrator: prebuildsOrchestrator, + UsageInserter: usageInserter, } if s.heartbeatFn == nil { @@ -902,29 +906,93 @@ func (s *server) UpdateJob(ctx context.Context, request *proto.UpdateJobRequest) return nil, xerrors.Errorf("update job: %w", err) } - if len(request.Logs) > 0 { + if len(request.Logs) > 0 && !job.LogsOverflowed { //nolint:exhaustruct // We append to the additional fields below. insertParams := database.InsertProvisionerJobLogsParams{ JobID: parsedID, } + + newLogSize := 0 + overflowedErrorMsg := "Provisioner logs exceeded the max size of 1MB. Will not continue to write provisioner logs for workspace build." + lenErrMsg := len(overflowedErrorMsg) + + var ( + createdAt time.Time + level database.LogLevel + stage string + source database.LogSource + output string + ) + for _, log := range request.Logs { - logLevel, err := convertLogLevel(log.Level) + // Build our log params + level, err = convertLogLevel(log.Level) if err != nil { return nil, xerrors.Errorf("convert log level: %w", err) } - logSource, err := convertLogSource(log.Source) + source, err = convertLogSource(log.Source) if err != nil { return nil, xerrors.Errorf("convert log source: %w", err) } - insertParams.CreatedAt = append(insertParams.CreatedAt, time.UnixMilli(log.CreatedAt)) - insertParams.Level = append(insertParams.Level, logLevel) - insertParams.Stage = append(insertParams.Stage, log.Stage) - insertParams.Source = append(insertParams.Source, logSource) - insertParams.Output = append(insertParams.Output, log.Output) + createdAt = time.UnixMilli(log.CreatedAt) + stage = log.Stage + output = log.Output + + // Check if we would overflow the job logs (not leaving enough room for the error message) + willOverflow := int64(job.LogsLength)+int64(newLogSize)+int64(lenErrMsg)+int64(len(output)) > 1048576 + if willOverflow { + s.Logger.Debug(ctx, "provisioner job logs overflowed 1MB size limit in database", slog.F("job_id", parsedID)) + err = s.Database.UpdateProvisionerJobLogsOverflowed(ctx, database.UpdateProvisionerJobLogsOverflowedParams{ + ID: parsedID, + LogsOverflowed: true, + }) + if err != nil { + s.Logger.Error(ctx, "failed to set logs overflowed flag", slog.F("job_id", parsedID), slog.Error(err)) + } + + level = database.LogLevelWarn + output = overflowedErrorMsg + } + + newLogSize += len(output) + + insertParams.CreatedAt = append(insertParams.CreatedAt, createdAt) + insertParams.Level = append(insertParams.Level, level) + insertParams.Stage = append(insertParams.Stage, stage) + insertParams.Source = append(insertParams.Source, source) + insertParams.Output = append(insertParams.Output, output) s.Logger.Debug(ctx, "job log", slog.F("job_id", parsedID), - slog.F("stage", log.Stage), - slog.F("output", log.Output)) + slog.F("stage", stage), + slog.F("output", output)) + + // Don't write any more logs because there's no room. + if willOverflow { + break + } + } + + err = s.Database.UpdateProvisionerJobLogsLength(ctx, database.UpdateProvisionerJobLogsLengthParams{ + ID: parsedID, + LogsLength: int32(newLogSize), // #nosec G115 - Log output length is limited to 1MB (2^20) which fits in an int32. + }) + if err != nil { + // Even though we do the runtime check for the overflow, we still check for the database error + // as well. + if database.IsProvisionerJobLogsLimitError(err) { + err = s.Database.UpdateProvisionerJobLogsOverflowed(ctx, database.UpdateProvisionerJobLogsOverflowedParams{ + ID: parsedID, + LogsOverflowed: true, + }) + if err != nil { + s.Logger.Error(ctx, "failed to set logs overflowed flag", slog.F("job_id", parsedID), slog.Error(err)) + } + return &proto.UpdateJobResponse{ + Canceled: job.CanceledAt.Valid, + }, nil + } + s.Logger.Error(ctx, "failed to update logs length", slog.F("job_id", parsedID), slog.Error(err)) + return nil, xerrors.Errorf("update logs length: %w", err) } logs, err := s.Database.InsertProvisionerJobLogs(ctx, insertParams) @@ -932,6 +1000,7 @@ func (s *server) UpdateJob(ctx context.Context, request *proto.UpdateJobRequest) s.Logger.Error(ctx, "failed to insert job logs", slog.F("job_id", parsedID), slog.Error(err)) return nil, xerrors.Errorf("insert job logs: %w", err) } + // Publish by the lowest log ID inserted so the log stream will fetch // everything from that point. lowestID := logs[0].ID @@ -1118,11 +1187,18 @@ func (s *server) FailJob(ctx context.Context, failJob *proto.FailedJob) (*proto. if err != nil { return xerrors.Errorf("update workspace build state: %w", err) } + + deadline := build.Deadline + maxDeadline := build.MaxDeadline + if workspace.IsPrebuild() { + deadline = time.Time{} + maxDeadline = time.Time{} + } err = db.UpdateWorkspaceBuildDeadlineByID(ctx, database.UpdateWorkspaceBuildDeadlineByIDParams{ ID: input.WorkspaceBuildID, UpdatedAt: s.timeNow(), - Deadline: build.Deadline, - MaxDeadline: build.MaxDeadline, + Deadline: deadline, + MaxDeadline: maxDeadline, }) if err != nil { return xerrors.Errorf("update workspace build deadline: %w", err) @@ -1655,16 +1731,20 @@ func (s *server) completeTemplateImportJob(ctx context.Context, job database.Pro if err != nil { return xerrors.Errorf("update template version external auth providers: %w", err) } - err = db.UpdateTemplateVersionAITaskByJobID(ctx, database.UpdateTemplateVersionAITaskByJobIDParams{ + err = db.UpdateTemplateVersionFlagsByJobID(ctx, database.UpdateTemplateVersionFlagsByJobIDParams{ JobID: jobID, HasAITask: sql.NullBool{ Bool: jobType.TemplateImport.HasAiTasks, Valid: true, }, + HasExternalAgent: sql.NullBool{ + Bool: jobType.TemplateImport.HasExternalAgents, + Valid: true, + }, UpdatedAt: now, }) if err != nil { - return xerrors.Errorf("update template version external auth providers: %w", err) + return xerrors.Errorf("update template version ai task and external agent: %w", err) } // Process terraform values @@ -1795,37 +1875,47 @@ func (s *server) completeWorkspaceBuildJob(ctx context.Context, job database.Pro return getWorkspaceError } - templateScheduleStore := *s.TemplateScheduleStore.Load() + // Prebuilt workspaces must not have Deadline or MaxDeadline set, + // as they are managed by the prebuild reconciliation loop, not the lifecycle executor + deadline := time.Time{} + maxDeadline := time.Time{} - autoStop, err := schedule.CalculateAutostop(ctx, schedule.CalculateAutostopParams{ - Database: db, - TemplateScheduleStore: templateScheduleStore, - UserQuietHoursScheduleStore: *s.UserQuietHoursScheduleStore.Load(), - Now: now, - Workspace: workspace.WorkspaceTable(), - // Allowed to be the empty string. - WorkspaceAutostart: workspace.AutostartSchedule.String, - }) - if err != nil { - return xerrors.Errorf("calculate auto stop: %w", err) - } + if !workspace.IsPrebuild() { + templateScheduleStore := *s.TemplateScheduleStore.Load() - if workspace.AutostartSchedule.Valid { - templateScheduleOptions, err := templateScheduleStore.Get(ctx, db, workspace.TemplateID) + autoStop, err := schedule.CalculateAutostop(ctx, schedule.CalculateAutostopParams{ + Database: db, + TemplateScheduleStore: templateScheduleStore, + UserQuietHoursScheduleStore: *s.UserQuietHoursScheduleStore.Load(), + // `now` is used below to set the build completion time. + WorkspaceBuildCompletedAt: now, + Workspace: workspace.WorkspaceTable(), + // Allowed to be the empty string. + WorkspaceAutostart: workspace.AutostartSchedule.String, + }) if err != nil { - return xerrors.Errorf("get template schedule options: %w", err) + return xerrors.Errorf("calculate auto stop: %w", err) } - nextStartAt, err := schedule.NextAllowedAutostart(now, workspace.AutostartSchedule.String, templateScheduleOptions) - if err == nil { - err = db.UpdateWorkspaceNextStartAt(ctx, database.UpdateWorkspaceNextStartAtParams{ - ID: workspace.ID, - NextStartAt: sql.NullTime{Valid: true, Time: nextStartAt.UTC()}, - }) + if workspace.AutostartSchedule.Valid { + templateScheduleOptions, err := templateScheduleStore.Get(ctx, db, workspace.TemplateID) if err != nil { - return xerrors.Errorf("update workspace next start at: %w", err) + return xerrors.Errorf("get template schedule options: %w", err) + } + + nextStartAt, err := schedule.NextAllowedAutostart(now, workspace.AutostartSchedule.String, templateScheduleOptions) + if err == nil { + err = db.UpdateWorkspaceNextStartAt(ctx, database.UpdateWorkspaceNextStartAtParams{ + ID: workspace.ID, + NextStartAt: sql.NullTime{Valid: true, Time: nextStartAt.UTC()}, + }) + if err != nil { + return xerrors.Errorf("update workspace next start at: %w", err) + } } } + deadline = autoStop.Deadline + maxDeadline = autoStop.MaxDeadline } err = db.UpdateProvisionerJobWithCompleteByID(ctx, database.UpdateProvisionerJobWithCompleteByIDParams{ @@ -1851,20 +1941,24 @@ func (s *server) completeWorkspaceBuildJob(ctx context.Context, job database.Pro } err = db.UpdateWorkspaceBuildDeadlineByID(ctx, database.UpdateWorkspaceBuildDeadlineByIDParams{ ID: workspaceBuild.ID, - Deadline: autoStop.Deadline, - MaxDeadline: autoStop.MaxDeadline, + Deadline: deadline, + MaxDeadline: maxDeadline, UpdatedAt: now, }) if err != nil { return xerrors.Errorf("update workspace build deadline: %w", err) } + appIDs := make([]string, 0) agentTimeouts := make(map[time.Duration]bool) // A set of agent timeouts. // This could be a bulk insert to improve performance. for _, protoResource := range jobType.WorkspaceBuild.Resources { for _, protoAgent := range protoResource.Agents { dur := time.Duration(protoAgent.GetConnectionTimeoutSeconds()) * time.Second agentTimeouts[dur] = true + for _, app := range protoAgent.GetApps() { + appIDs = append(appIDs, app.GetId()) + } } err = InsertWorkspaceResource(ctx, db, job.ID, workspaceBuild.Transition, protoResource, telemetrySnapshot) @@ -1879,14 +1973,21 @@ func (s *server) completeWorkspaceBuildJob(ctx context.Context, job database.Pro } var sidebarAppID uuid.NullUUID - hasAITask := len(jobType.WorkspaceBuild.AiTasks) == 1 - if hasAITask { - task := jobType.WorkspaceBuild.AiTasks[0] - if task.SidebarApp == nil { - return xerrors.Errorf("update ai task: sidebar app is nil") + var hasAITask bool + var warnUnknownSidebarAppID bool + if tasks := jobType.WorkspaceBuild.GetAiTasks(); len(tasks) > 0 { + hasAITask = true + task := tasks[0] + if task == nil || task.GetSidebarApp() == nil || len(task.GetSidebarApp().GetId()) == 0 { + return xerrors.Errorf("update ai task: sidebar app is nil or empty") + } + + sidebarTaskID := task.GetSidebarApp().GetId() + if !slices.Contains(appIDs, sidebarTaskID) { + warnUnknownSidebarAppID = true } - id, err := uuid.Parse(task.SidebarApp.Id) + id, err := uuid.Parse(task.GetSidebarApp().GetId()) if err != nil { return xerrors.Errorf("parse sidebar app id: %w", err) } @@ -1894,19 +1995,83 @@ func (s *server) completeWorkspaceBuildJob(ctx context.Context, job database.Pro sidebarAppID = uuid.NullUUID{UUID: id, Valid: true} } + if warnUnknownSidebarAppID { + // Ref: https://github.com/coder/coder/issues/18776 + // This can happen for a number of reasons: + // 1. Misconfigured template + // 2. Count=0 on the agent due to stop transition, meaning the associated coder_app was not inserted. + // Failing the build at this point is not ideal, so log a warning instead. + s.Logger.Warn(ctx, "unknown ai_task_sidebar_app_id", + slog.F("ai_task_sidebar_app_id", sidebarAppID.UUID.String()), + slog.F("job_id", job.ID.String()), + slog.F("workspace_id", workspace.ID), + slog.F("workspace_build_id", workspaceBuild.ID), + slog.F("transition", string(workspaceBuild.Transition)), + ) + // In order to surface this to the user, we will also insert a warning into the build logs. + if _, err := db.InsertProvisionerJobLogs(ctx, database.InsertProvisionerJobLogsParams{ + JobID: jobID, + CreatedAt: []time.Time{now, now, now, now}, + Source: []database.LogSource{database.LogSourceProvisionerDaemon, database.LogSourceProvisionerDaemon, database.LogSourceProvisionerDaemon, database.LogSourceProvisionerDaemon}, + Level: []database.LogLevel{database.LogLevelWarn, database.LogLevelWarn, database.LogLevelWarn, database.LogLevelWarn}, + Stage: []string{"Cleaning Up", "Cleaning Up", "Cleaning Up", "Cleaning Up"}, + Output: []string{ + fmt.Sprintf("Unknown ai_task_sidebar_app_id %q. This workspace will be unable to run AI tasks. This may be due to a template configuration issue, please check with the template author.", sidebarAppID.UUID.String()), + "Template author: double-check the following:", + " - You have associated the coder_ai_task with a valid coder_app in your template (ref: https://registry.terraform.io/providers/coder/coder/latest/docs/resources/ai_task).", + " - You have associated the coder_agent with at least one other compute resource. Agents with no other associated resources are not inserted into the database.", + }, + }); err != nil { + s.Logger.Error(ctx, "insert provisioner job log for ai task sidebar app id warning", + slog.F("job_id", jobID), + slog.F("workspace_id", workspace.ID), + slog.F("workspace_build_id", workspaceBuild.ID), + slog.F("transition", string(workspaceBuild.Transition)), + ) + } + // Important: reset hasAITask and sidebarAppID so that we don't run into a fk constraint violation. + hasAITask = false + sidebarAppID = uuid.NullUUID{} + } + + if hasAITask && workspaceBuild.Transition == database.WorkspaceTransitionStart { + // Insert usage event for managed agents. + usageInserter := s.UsageInserter.Load() + if usageInserter != nil { + event := usagetypes.DCManagedAgentsV1{ + Count: 1, + } + err = (*usageInserter).InsertDiscreteUsageEvent(ctx, db, event) + if err != nil { + return xerrors.Errorf("insert %q event: %w", event.EventType(), err) + } + } + } + + hasExternalAgent := false + for _, resource := range jobType.WorkspaceBuild.Resources { + if resource.Type == "coder_external_agent" { + hasExternalAgent = true + break + } + } + // Regardless of whether there is an AI task or not, update the field to indicate one way or the other since it // always defaults to nil. ONLY if has_ai_task=true MUST ai_task_sidebar_app_id be set. - err = db.UpdateWorkspaceBuildAITaskByID(ctx, database.UpdateWorkspaceBuildAITaskByIDParams{ + if err := db.UpdateWorkspaceBuildFlagsByID(ctx, database.UpdateWorkspaceBuildFlagsByIDParams{ ID: workspaceBuild.ID, HasAITask: sql.NullBool{ Bool: hasAITask, Valid: true, }, + HasExternalAgent: sql.NullBool{ + Bool: hasExternalAgent, + Valid: true, + }, SidebarAppID: sidebarAppID, UpdatedAt: now, - }) - if err != nil { - return xerrors.Errorf("update workspace build ai tasks flag: %w", err) + }); err != nil { + return xerrors.Errorf("update workspace build ai tasks and external agent flag: %w", err) } // Insert timings inside the transaction now @@ -2264,6 +2429,7 @@ func InsertWorkspacePresetAndParameters(ctx context.Context, db database.Store, prebuildSchedules = protoPreset.Prebuild.Scheduling.Schedule } } + dbPreset, err := tx.InsertPreset(ctx, database.InsertPresetParams{ ID: uuid.New(), TemplateVersionID: templateVersionID, @@ -2273,6 +2439,8 @@ func InsertWorkspacePresetAndParameters(ctx context.Context, db database.Store, InvalidateAfterSecs: ttl, SchedulingTimezone: schedulingTimezone, IsDefault: protoPreset.GetDefault(), + Description: protoPreset.Description, + Icon: protoPreset.Icon, }) if err != nil { return xerrors.Errorf("insert preset: %w", err) diff --git a/coderd/provisionerdserver/provisionerdserver_test.go b/coderd/provisionerdserver/provisionerdserver_test.go index 66684835650a8..8baa7c99c30b9 100644 --- a/coderd/provisionerdserver/provisionerdserver_test.go +++ b/coderd/provisionerdserver/provisionerdserver_test.go @@ -16,6 +16,7 @@ import ( "time" "github.com/google/uuid" + "github.com/prometheus/client_golang/prometheus" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" "go.opentelemetry.io/otel/trace" @@ -30,7 +31,9 @@ import ( "github.com/coder/coder/v2/buildinfo" "github.com/coder/coder/v2/coderd/audit" + "github.com/coder/coder/v2/coderd/coderdtest" "github.com/coder/coder/v2/coderd/database" + "github.com/coder/coder/v2/coderd/database/dbauthz" "github.com/coder/coder/v2/coderd/database/dbgen" "github.com/coder/coder/v2/coderd/database/dbtestutil" "github.com/coder/coder/v2/coderd/database/dbtime" @@ -44,6 +47,8 @@ import ( "github.com/coder/coder/v2/coderd/schedule" "github.com/coder/coder/v2/coderd/schedule/cron" "github.com/coder/coder/v2/coderd/telemetry" + "github.com/coder/coder/v2/coderd/usage" + "github.com/coder/coder/v2/coderd/usage/usagetypes" "github.com/coder/coder/v2/coderd/wspubsub" "github.com/coder/coder/v2/codersdk" "github.com/coder/coder/v2/codersdk/agentsdk" @@ -67,6 +72,13 @@ func testUserQuietHoursScheduleStore() *atomic.Pointer[schedule.UserQuietHoursSc return ptr } +func testUsageInserter() *atomic.Pointer[usage.Inserter] { + ptr := &atomic.Pointer[usage.Inserter]{} + inserter := usage.NewAGPLInserter() + ptr.Store(&inserter) + return ptr +} + func TestAcquireJob_LongPoll(t *testing.T) { t.Parallel() //nolint:dogsled @@ -535,7 +547,10 @@ func TestAcquireJob(t *testing.T) { ctx := context.Background() user := dbgen.User(t, db, database.User{}) - version := dbgen.TemplateVersion(t, db, database.TemplateVersion{}) + version := dbgen.TemplateVersion(t, db, database.TemplateVersion{ + CreatedBy: user.ID, + OrganizationID: pd.OrganizationID, + }) file := dbgen.File(t, db, database.File{CreatedBy: user.ID}) _ = dbgen.ProvisionerJob(t, db, ps, database.ProvisionerJob{ InitiatorID: user.ID, @@ -613,7 +628,10 @@ func TestAcquireJob(t *testing.T) { srv, db, ps, pd := setup(t, false, nil) user := dbgen.User(t, db, database.User{}) - version := dbgen.TemplateVersion(t, db, database.TemplateVersion{}) + version := dbgen.TemplateVersion(t, db, database.TemplateVersion{ + CreatedBy: user.ID, + OrganizationID: pd.OrganizationID, + }) file := dbgen.File(t, db, database.File{CreatedBy: user.ID}) _ = dbgen.ProvisionerJob(t, db, ps, database.ProvisionerJob{ FileID: file.ID, @@ -675,13 +693,22 @@ func TestUpdateJob(t *testing.T) { t.Run("NotRunning", func(t *testing.T) { t.Parallel() srv, db, _, pd := setup(t, false, nil) + user := dbgen.User(t, db, database.User{}) + version := dbgen.TemplateVersion(t, db, database.TemplateVersion{ + CreatedBy: user.ID, + OrganizationID: pd.OrganizationID, + JobID: uuid.New(), + }) job, err := db.InsertProvisionerJob(ctx, database.InsertProvisionerJobParams{ - ID: uuid.New(), + ID: version.JobID, Provisioner: database.ProvisionerTypeEcho, StorageMethod: database.ProvisionerStorageMethodFile, Type: database.ProvisionerJobTypeTemplateVersionDryRun, - Input: json.RawMessage("{}"), - Tags: pd.Tags, + Input: must(json.Marshal(provisionerdserver.TemplateVersionDryRunJob{ + TemplateVersionID: version.ID, + })), + OrganizationID: pd.OrganizationID, + Tags: pd.Tags, }) require.NoError(t, err) _, err = srv.UpdateJob(ctx, &proto.UpdateJobRequest{ @@ -693,13 +720,22 @@ func TestUpdateJob(t *testing.T) { t.Run("NotOwner", func(t *testing.T) { t.Parallel() srv, db, _, pd := setup(t, false, nil) + user := dbgen.User(t, db, database.User{}) + version := dbgen.TemplateVersion(t, db, database.TemplateVersion{ + CreatedBy: user.ID, + OrganizationID: pd.OrganizationID, + JobID: uuid.New(), + }) job, err := db.InsertProvisionerJob(ctx, database.InsertProvisionerJobParams{ - ID: uuid.New(), + ID: version.JobID, Provisioner: database.ProvisionerTypeEcho, StorageMethod: database.ProvisionerStorageMethodFile, Type: database.ProvisionerJobTypeTemplateVersionDryRun, - Input: json.RawMessage("{}"), - Tags: pd.Tags, + Input: must(json.Marshal(provisionerdserver.TemplateVersionDryRunJob{ + TemplateVersionID: version.ID, + })), + OrganizationID: pd.OrganizationID, + Tags: pd.Tags, }) require.NoError(t, err) _, err = db.AcquireProvisionerJob(ctx, database.AcquireProvisionerJobParams{ @@ -712,6 +748,7 @@ func TestUpdateJob(t *testing.T) { Time: dbtime.Now(), Valid: true, }, + OrganizationID: pd.OrganizationID, ProvisionerTags: must(json.Marshal(job.Tags)), }) require.NoError(t, err) @@ -721,36 +758,57 @@ func TestUpdateJob(t *testing.T) { require.ErrorContains(t, err, "you don't own this job") }) - setupJob := func(t *testing.T, db database.Store, srvID uuid.UUID, tags database.StringMap) uuid.UUID { - job, err := db.InsertProvisionerJob(ctx, database.InsertProvisionerJobParams{ - ID: uuid.New(), - Provisioner: database.ProvisionerTypeEcho, - Type: database.ProvisionerJobTypeTemplateVersionImport, - StorageMethod: database.ProvisionerStorageMethodFile, - Input: json.RawMessage("{}"), - Tags: tags, - }) - require.NoError(t, err) - _, err = db.AcquireProvisionerJob(ctx, database.AcquireProvisionerJobParams{ - WorkerID: uuid.NullUUID{ - UUID: srvID, - Valid: true, - }, - Types: []database.ProvisionerType{database.ProvisionerTypeEcho}, - StartedAt: sql.NullTime{ - Time: dbtime.Now(), - Valid: true, - }, - ProvisionerTags: must(json.Marshal(job.Tags)), - }) + setupJob := func(t *testing.T, db database.Store, srvID, orgID uuid.UUID, tags database.StringMap) (templateVersionID, jobID uuid.UUID) { + templateVersionID = uuid.New() + jobID = uuid.New() + err := db.InTx(func(db database.Store) error { + user := dbgen.User(t, db, database.User{}) + version := dbgen.TemplateVersion(t, db, database.TemplateVersion{ + ID: templateVersionID, + CreatedBy: user.ID, + OrganizationID: orgID, + JobID: jobID, + }) + job, err := db.InsertProvisionerJob(ctx, database.InsertProvisionerJobParams{ + ID: version.JobID, + OrganizationID: orgID, + Provisioner: database.ProvisionerTypeEcho, + Type: database.ProvisionerJobTypeTemplateVersionImport, + StorageMethod: database.ProvisionerStorageMethodFile, + Input: must(json.Marshal(provisionerdserver.TemplateVersionDryRunJob{ + TemplateVersionID: version.ID, + })), + Tags: tags, + }) + if err != nil { + return xerrors.Errorf("insert provisioner job: %w", err) + } + _, err = db.AcquireProvisionerJob(ctx, database.AcquireProvisionerJobParams{ + WorkerID: uuid.NullUUID{ + UUID: srvID, + Valid: true, + }, + Types: []database.ProvisionerType{database.ProvisionerTypeEcho}, + StartedAt: sql.NullTime{ + Time: dbtime.Now(), + Valid: true, + }, + OrganizationID: orgID, + ProvisionerTags: must(json.Marshal(job.Tags)), + }) + if err != nil { + return xerrors.Errorf("acquire provisioner job: %w", err) + } + return nil + }, nil) require.NoError(t, err) - return job.ID + return templateVersionID, jobID } t.Run("Success", func(t *testing.T) { t.Parallel() srv, db, _, pd := setup(t, false, &overrides{}) - job := setupJob(t, db, pd.ID, pd.Tags) + _, job := setupJob(t, db, pd.ID, pd.OrganizationID, pd.Tags) _, err := srv.UpdateJob(ctx, &proto.UpdateJobRequest{ JobId: job.String(), }) @@ -760,7 +818,7 @@ func TestUpdateJob(t *testing.T) { t.Run("Logs", func(t *testing.T) { t.Parallel() srv, db, ps, pd := setup(t, false, &overrides{}) - job := setupJob(t, db, pd.ID, pd.Tags) + _, job := setupJob(t, db, pd.ID, pd.OrganizationID, pd.Tags) published := make(chan struct{}) @@ -785,20 +843,14 @@ func TestUpdateJob(t *testing.T) { t.Run("Readme", func(t *testing.T) { t.Parallel() srv, db, _, pd := setup(t, false, &overrides{}) - job := setupJob(t, db, pd.ID, pd.Tags) - versionID := uuid.New() - err := db.InsertTemplateVersion(ctx, database.InsertTemplateVersionParams{ - ID: versionID, - JobID: job, - }) - require.NoError(t, err) - _, err = srv.UpdateJob(ctx, &proto.UpdateJobRequest{ + templateVersionID, job := setupJob(t, db, pd.ID, pd.OrganizationID, pd.Tags) + _, err := srv.UpdateJob(ctx, &proto.UpdateJobRequest{ JobId: job.String(), Readme: []byte("# hello world"), }) require.NoError(t, err) - version, err := db.GetTemplateVersionByID(ctx, versionID) + version, err := db.GetTemplateVersionByID(ctx, templateVersionID) require.NoError(t, err) require.Equal(t, "# hello world", version.Readme) }) @@ -811,13 +863,7 @@ func TestUpdateJob(t *testing.T) { defer cancel() srv, db, _, pd := setup(t, false, &overrides{}) - job := setupJob(t, db, pd.ID, pd.Tags) - versionID := uuid.New() - err := db.InsertTemplateVersion(ctx, database.InsertTemplateVersionParams{ - ID: versionID, - JobID: job, - }) - require.NoError(t, err) + templateVersionID, job := setupJob(t, db, pd.ID, pd.OrganizationID, pd.Tags) firstTemplateVariable := &sdkproto.TemplateVariable{ Name: "first", Type: "string", @@ -846,7 +892,7 @@ func TestUpdateJob(t *testing.T) { require.NoError(t, err) require.Len(t, response.VariableValues, 2) - templateVariables, err := db.GetTemplateVersionVariables(ctx, versionID) + templateVariables, err := db.GetTemplateVersionVariables(ctx, templateVersionID) require.NoError(t, err) require.Len(t, templateVariables, 2) require.Equal(t, templateVariables[0].Value, firstTemplateVariable.DefaultValue) @@ -858,13 +904,7 @@ func TestUpdateJob(t *testing.T) { defer cancel() srv, db, _, pd := setup(t, false, &overrides{}) - job := setupJob(t, db, pd.ID, pd.Tags) - versionID := uuid.New() - err := db.InsertTemplateVersion(ctx, database.InsertTemplateVersionParams{ - ID: versionID, - JobID: job, - }) - require.NoError(t, err) + templateVersionID, job := setupJob(t, db, pd.ID, pd.OrganizationID, pd.Tags) firstTemplateVariable := &sdkproto.TemplateVariable{ Name: "first", Type: "string", @@ -889,7 +929,7 @@ func TestUpdateJob(t *testing.T) { // Even though there is an error returned, variables are stored in the database // to show the schema in the site UI. - templateVariables, err := db.GetTemplateVersionVariables(ctx, versionID) + templateVariables, err := db.GetTemplateVersionVariables(ctx, templateVersionID) require.NoError(t, err) require.Len(t, templateVariables, 2) require.Equal(t, templateVariables[0].Value, firstTemplateVariable.DefaultValue) @@ -903,15 +943,9 @@ func TestUpdateJob(t *testing.T) { ctx, cancel := context.WithTimeout(context.Background(), testutil.WaitLong) defer cancel() - srv, db, _, pd := setup(t, false, &overrides{}) - job := setupJob(t, db, pd.ID, pd.Tags) - versionID := uuid.New() - err := db.InsertTemplateVersion(ctx, database.InsertTemplateVersionParams{ - ID: versionID, - JobID: job, - }) - require.NoError(t, err) - _, err = srv.UpdateJob(ctx, &proto.UpdateJobRequest{ + srv, db, _, pd := setup(t, false, nil) + templateVersionID, job := setupJob(t, db, pd.ID, pd.OrganizationID, pd.Tags) + _, err := srv.UpdateJob(ctx, &proto.UpdateJobRequest{ JobId: job.String(), WorkspaceTags: map[string]string{ "bird": "tweety", @@ -920,7 +954,7 @@ func TestUpdateJob(t *testing.T) { }) require.NoError(t, err) - workspaceTags, err := db.GetTemplateVersionWorkspaceTags(ctx, versionID) + workspaceTags, err := db.GetTemplateVersionWorkspaceTags(ctx, templateVersionID) require.NoError(t, err) require.Len(t, workspaceTags, 2) require.Equal(t, workspaceTags[0].Key, "bird") @@ -928,6 +962,141 @@ func TestUpdateJob(t *testing.T) { require.Equal(t, workspaceTags[1].Key, "cat") require.Equal(t, workspaceTags[1].Value, "jinx") }) + + t.Run("LogSizeLimit", func(t *testing.T) { + t.Parallel() + srv, db, _, pd := setup(t, false, &overrides{}) + _, job := setupJob(t, db, pd.ID, pd.OrganizationID, pd.Tags) + + // Create a log message that exceeds the 1MB limit + largeOutput := strings.Repeat("a", 1048577) // 1MB + 1 byte + + _, err := srv.UpdateJob(ctx, &proto.UpdateJobRequest{ + JobId: job.String(), + Logs: []*proto.Log{{ + Source: proto.LogSource_PROVISIONER, + Level: sdkproto.LogLevel_INFO, + Output: largeOutput, + }}, + }) + require.NoError(t, err) // Should succeed but trigger overflow + + // Verify the overflow flag is set + jobResult, err := db.GetProvisionerJobByID(ctx, job) + require.NoError(t, err) + require.True(t, jobResult.LogsOverflowed) + }) + + t.Run("IncrementalLogSizeOverflow", func(t *testing.T) { + t.Parallel() + srv, db, _, pd := setup(t, false, &overrides{}) + _, job := setupJob(t, db, pd.ID, pd.OrganizationID, pd.Tags) + + // Send logs that together exceed the limit + mediumOutput := strings.Repeat("b", 524289) // Half a MB + 1 byte + + // First log - should succeed + _, err := srv.UpdateJob(ctx, &proto.UpdateJobRequest{ + JobId: job.String(), + Logs: []*proto.Log{{ + Source: proto.LogSource_PROVISIONER, + Level: sdkproto.LogLevel_INFO, + Output: mediumOutput, + }}, + }) + require.NoError(t, err) + + // Verify overflow flag not yet set + jobResult, err := db.GetProvisionerJobByID(ctx, job) + require.NoError(t, err) + require.False(t, jobResult.LogsOverflowed) + + // Second log - should trigger overflow + _, err = srv.UpdateJob(ctx, &proto.UpdateJobRequest{ + JobId: job.String(), + Logs: []*proto.Log{{ + Source: proto.LogSource_PROVISIONER, + Level: sdkproto.LogLevel_INFO, + Output: mediumOutput, + }}, + }) + require.NoError(t, err) + + // Verify overflow flag is set + jobResult, err = db.GetProvisionerJobByID(ctx, job) + require.NoError(t, err) + require.True(t, jobResult.LogsOverflowed) + }) + + t.Run("LogSizeTracking", func(t *testing.T) { + t.Parallel() + srv, db, _, pd := setup(t, false, &overrides{}) + _, job := setupJob(t, db, pd.ID, pd.OrganizationID, pd.Tags) + + logOutput := "test log message" + expectedSize := int32(len(logOutput)) // #nosec G115 - Log length is 16. + + _, err := srv.UpdateJob(ctx, &proto.UpdateJobRequest{ + JobId: job.String(), + Logs: []*proto.Log{{ + Source: proto.LogSource_PROVISIONER, + Level: sdkproto.LogLevel_INFO, + Output: logOutput, + }}, + }) + require.NoError(t, err) + + // Verify the logs_length is correctly tracked + jobResult, err := db.GetProvisionerJobByID(ctx, job) + require.NoError(t, err) + require.Equal(t, expectedSize, jobResult.LogsLength) + require.False(t, jobResult.LogsOverflowed) + }) + + t.Run("LogOverflowStopsProcessing", func(t *testing.T) { + t.Parallel() + srv, db, _, pd := setup(t, false, &overrides{}) + _, job := setupJob(t, db, pd.ID, pd.OrganizationID, pd.Tags) + + // First: trigger overflow + largeOutput := strings.Repeat("a", 1048577) // 1MB + 1 byte + _, err := srv.UpdateJob(ctx, &proto.UpdateJobRequest{ + JobId: job.String(), + Logs: []*proto.Log{{ + Source: proto.LogSource_PROVISIONER, + Level: sdkproto.LogLevel_INFO, + Output: largeOutput, + }}, + }) + require.NoError(t, err) + + // Get the initial log count + initialLogs, err := db.GetProvisionerLogsAfterID(ctx, database.GetProvisionerLogsAfterIDParams{ + JobID: job, + CreatedAfter: -1, + }) + require.NoError(t, err) + initialCount := len(initialLogs) + + // Second: try to send more logs - should be ignored + _, err = srv.UpdateJob(ctx, &proto.UpdateJobRequest{ + JobId: job.String(), + Logs: []*proto.Log{{ + Source: proto.LogSource_PROVISIONER, + Level: sdkproto.LogLevel_INFO, + Output: "this should be ignored", + }}, + }) + require.NoError(t, err) + + // Verify no new logs were added + finalLogs, err := db.GetProvisionerLogsAfterID(ctx, database.GetProvisionerLogsAfterIDParams{ + JobID: job, + CreatedAfter: -1, + }) + require.NoError(t, err) + require.Equal(t, initialCount, len(finalLogs)) + }) } func TestFailJob(t *testing.T) { @@ -950,13 +1119,22 @@ func TestFailJob(t *testing.T) { t.Run("NotOwner", func(t *testing.T) { t.Parallel() srv, db, _, pd := setup(t, false, nil) + user := dbgen.User(t, db, database.User{}) + version := dbgen.TemplateVersion(t, db, database.TemplateVersion{ + CreatedBy: user.ID, + OrganizationID: pd.OrganizationID, + JobID: uuid.New(), + }) job, err := db.InsertProvisionerJob(ctx, database.InsertProvisionerJobParams{ - ID: uuid.New(), + ID: version.JobID, Provisioner: database.ProvisionerTypeEcho, StorageMethod: database.ProvisionerStorageMethodFile, Type: database.ProvisionerJobTypeTemplateVersionImport, - Input: json.RawMessage("{}"), - Tags: pd.Tags, + Input: must(json.Marshal(provisionerdserver.TemplateVersionImportJob{ + TemplateVersionID: version.ID, + })), + OrganizationID: pd.OrganizationID, + Tags: pd.Tags, }) require.NoError(t, err) _, err = db.AcquireProvisionerJob(ctx, database.AcquireProvisionerJobParams{ @@ -969,6 +1147,7 @@ func TestFailJob(t *testing.T) { Time: dbtime.Now(), Valid: true, }, + OrganizationID: pd.OrganizationID, ProvisionerTags: must(json.Marshal(job.Tags)), }) require.NoError(t, err) @@ -979,14 +1158,23 @@ func TestFailJob(t *testing.T) { }) t.Run("AlreadyCompleted", func(t *testing.T) { t.Parallel() - srv, db, _, pd := setup(t, false, &overrides{}) + srv, db, _, pd := setup(t, false, nil) + user := dbgen.User(t, db, database.User{}) + version := dbgen.TemplateVersion(t, db, database.TemplateVersion{ + CreatedBy: user.ID, + OrganizationID: pd.OrganizationID, + JobID: uuid.New(), + }) job, err := db.InsertProvisionerJob(ctx, database.InsertProvisionerJobParams{ - ID: uuid.New(), + ID: version.JobID, Provisioner: database.ProvisionerTypeEcho, Type: database.ProvisionerJobTypeTemplateVersionImport, StorageMethod: database.ProvisionerStorageMethodFile, - Input: json.RawMessage("{}"), - Tags: pd.Tags, + Input: must(json.Marshal(provisionerdserver.TemplateVersionImportJob{ + TemplateVersionID: version.ID, + })), + OrganizationID: pd.OrganizationID, + Tags: pd.Tags, }) require.NoError(t, err) _, err = db.AcquireProvisionerJob(ctx, database.AcquireProvisionerJobParams{ @@ -999,6 +1187,7 @@ func TestFailJob(t *testing.T) { Time: dbtime.Now(), Valid: true, }, + OrganizationID: pd.OrganizationID, ProvisionerTags: must(json.Marshal(job.Tags)), }) require.NoError(t, err) @@ -1017,19 +1206,20 @@ func TestFailJob(t *testing.T) { }) t.Run("WorkspaceBuild", func(t *testing.T) { t.Parallel() - // Ignore log errors because we get: - // - // (*Server).FailJob audit log - get build {"error": "sql: no rows in result set"} - ignoreLogErrors := true auditor := audit.NewMock() - srv, db, ps, pd := setup(t, ignoreLogErrors, &overrides{ + srv, db, ps, pd := setup(t, false, &overrides{ auditor: auditor, }) org := dbgen.Organization(t, db, database.Organization{}) u := dbgen.User(t, db, database.User{}) - tpl := dbgen.Template(t, db, database.Template{ - OrganizationID: org.ID, + tv := dbgen.TemplateVersion(t, db, database.TemplateVersion{ CreatedBy: u.ID, + OrganizationID: org.ID, + }) + tpl := dbgen.Template(t, db, database.Template{ + OrganizationID: org.ID, + CreatedBy: u.ID, + ActiveVersionID: tv.ID, }) workspace, err := db.InsertWorkspace(ctx, database.InsertWorkspaceParams{ ID: uuid.New(), @@ -1046,22 +1236,24 @@ func TestFailJob(t *testing.T) { require.NoError(t, err) job, err := db.InsertProvisionerJob(ctx, database.InsertProvisionerJobParams{ - ID: uuid.New(), - Input: input, - InitiatorID: workspace.OwnerID, - Provisioner: database.ProvisionerTypeEcho, - Type: database.ProvisionerJobTypeWorkspaceBuild, - StorageMethod: database.ProvisionerStorageMethodFile, - Tags: pd.Tags, + ID: uuid.New(), + Input: input, + InitiatorID: workspace.OwnerID, + OrganizationID: pd.OrganizationID, + Provisioner: database.ProvisionerTypeEcho, + Type: database.ProvisionerJobTypeWorkspaceBuild, + StorageMethod: database.ProvisionerStorageMethodFile, + Tags: pd.Tags, }) require.NoError(t, err) err = db.InsertWorkspaceBuild(ctx, database.InsertWorkspaceBuildParams{ - ID: buildID, - WorkspaceID: workspace.ID, - InitiatorID: workspace.OwnerID, - Transition: database.WorkspaceTransitionStart, - Reason: database.BuildReasonInitiator, - JobID: job.ID, + ID: buildID, + WorkspaceID: workspace.ID, + InitiatorID: workspace.OwnerID, + TemplateVersionID: tpl.ActiveVersionID, + Transition: database.WorkspaceTransitionStart, + Reason: database.BuildReasonInitiator, + JobID: job.ID, }) require.NoError(t, err) @@ -1075,6 +1267,7 @@ func TestFailJob(t *testing.T) { Time: dbtime.Now(), Valid: true, }, + OrganizationID: pd.OrganizationID, ProvisionerTags: must(json.Marshal(job.Tags)), }) require.NoError(t, err) @@ -1144,14 +1337,22 @@ func TestCompleteJob(t *testing.T) { t.Run("NotOwner", func(t *testing.T) { t.Parallel() srv, db, _, pd := setup(t, false, nil) + user := dbgen.User(t, db, database.User{}) + version := dbgen.TemplateVersion(t, db, database.TemplateVersion{ + CreatedBy: user.ID, + OrganizationID: pd.OrganizationID, + JobID: uuid.New(), + }) job, err := db.InsertProvisionerJob(ctx, database.InsertProvisionerJobParams{ - ID: uuid.New(), + ID: version.JobID, Provisioner: database.ProvisionerTypeEcho, StorageMethod: database.ProvisionerStorageMethodFile, - Type: database.ProvisionerJobTypeWorkspaceBuild, + Type: database.ProvisionerJobTypeTemplateVersionImport, OrganizationID: pd.OrganizationID, - Input: json.RawMessage("{}"), - Tags: pd.Tags, + Input: must(json.Marshal(provisionerdserver.TemplateVersionDryRunJob{ + TemplateVersionID: version.ID, + })), + Tags: pd.Tags, }) require.NoError(t, err) _, err = db.AcquireProvisionerJob(ctx, database.AcquireProvisionerJobParams{ @@ -1183,7 +1384,9 @@ func TestCompleteJob(t *testing.T) { srv, db, _, pd := setup(t, false, &overrides{}) jobID := uuid.New() versionID := uuid.New() + user := dbgen.User(t, db, database.User{}) err := db.InsertTemplateVersion(ctx, database.InsertTemplateVersionParams{ + CreatedBy: user.ID, ID: versionID, JobID: jobID, OrganizationID: pd.OrganizationID, @@ -1193,10 +1396,12 @@ func TestCompleteJob(t *testing.T) { OrganizationID: pd.OrganizationID, ID: jobID, Provisioner: database.ProvisionerTypeEcho, - Input: []byte(`{"template_version_id": "` + versionID.String() + `"}`), - StorageMethod: database.ProvisionerStorageMethodFile, - Type: database.ProvisionerJobTypeTemplateVersionImport, - Tags: pd.Tags, + Input: must(json.Marshal(provisionerdserver.TemplateVersionImportJob{ + TemplateVersionID: versionID, + })), + StorageMethod: database.ProvisionerStorageMethodFile, + Type: database.ProvisionerJobTypeTemplateVersionImport, + Tags: pd.Tags, }) require.NoError(t, err) _, err = db.AcquireProvisionerJob(ctx, database.AcquireProvisionerJobParams{ @@ -1241,13 +1446,23 @@ func TestCompleteJob(t *testing.T) { t.Run("TemplateDryRunTransaction", func(t *testing.T) { t.Parallel() srv, db, _, pd := setup(t, false, &overrides{}) + org := dbgen.Organization(t, db, database.Organization{}) + user := dbgen.User(t, db, database.User{}) + version := dbgen.TemplateVersion(t, db, database.TemplateVersion{ + CreatedBy: user.ID, + OrganizationID: org.ID, + JobID: uuid.New(), + }) job, err := db.InsertProvisionerJob(ctx, database.InsertProvisionerJobParams{ - ID: uuid.New(), - Provisioner: database.ProvisionerTypeEcho, - Type: database.ProvisionerJobTypeTemplateVersionDryRun, - StorageMethod: database.ProvisionerStorageMethodFile, - Input: json.RawMessage("{}"), - Tags: pd.Tags, + ID: uuid.New(), + OrganizationID: org.ID, + Provisioner: database.ProvisionerTypeEcho, + Type: database.ProvisionerJobTypeTemplateVersionDryRun, + StorageMethod: database.ProvisionerStorageMethodFile, + Input: must(json.Marshal(provisionerdserver.TemplateVersionDryRunJob{ + TemplateVersionID: version.ID, + })), + Tags: pd.Tags, }) require.NoError(t, err) _, err = db.AcquireProvisionerJob(ctx, database.AcquireProvisionerJobParams{ @@ -1255,6 +1470,7 @@ func TestCompleteJob(t *testing.T) { UUID: pd.ID, Valid: true, }, + OrganizationID: org.ID, Types: []database.ProvisionerType{database.ProvisionerTypeEcho}, ProvisionerTags: must(json.Marshal(job.Tags)), StartedAt: sql.NullTime{Time: job.CreatedAt, Valid: true}, @@ -1295,6 +1511,7 @@ func TestCompleteJob(t *testing.T) { user := dbgen.User(t, db, database.User{}) template := dbgen.Template(t, db, database.Template{ Name: "template", + CreatedBy: user.ID, Provisioner: database.ProvisionerTypeEcho, OrganizationID: pd.OrganizationID, }) @@ -1306,27 +1523,32 @@ func TestCompleteJob(t *testing.T) { }) version := dbgen.TemplateVersion(t, db, database.TemplateVersion{ OrganizationID: pd.OrganizationID, + CreatedBy: user.ID, TemplateID: uuid.NullUUID{ UUID: template.ID, Valid: true, }, JobID: uuid.New(), }) - build := dbgen.WorkspaceBuild(t, db, database.WorkspaceBuild{ - WorkspaceID: workspaceTable.ID, - TemplateVersionID: version.ID, - Transition: database.WorkspaceTransitionStart, - Reason: database.BuildReasonInitiator, - }) + wsBuildID := uuid.New() job := dbgen.ProvisionerJob(t, db, ps, database.ProvisionerJob{ + ID: uuid.New(), FileID: file.ID, InitiatorID: user.ID, Type: database.ProvisionerJobTypeWorkspaceBuild, Input: must(json.Marshal(provisionerdserver.WorkspaceProvisionJob{ - WorkspaceBuildID: build.ID, + WorkspaceBuildID: wsBuildID, })), OrganizationID: pd.OrganizationID, }) + _ = dbgen.WorkspaceBuild(t, db, database.WorkspaceBuild{ + ID: wsBuildID, + JobID: job.ID, + WorkspaceID: workspaceTable.ID, + TemplateVersionID: version.ID, + Transition: database.WorkspaceTransitionStart, + Reason: database.BuildReasonInitiator, + }) _, err := db.AcquireProvisionerJob(ctx, database.AcquireProvisionerJobParams{ OrganizationID: pd.OrganizationID, WorkerID: uuid.NullUUID{ @@ -1451,23 +1673,49 @@ func TestCompleteJob(t *testing.T) { t.Parallel() srv, db, _, pd := setup(t, false, &overrides{}) jobID := uuid.New() - versionID := uuid.New() - err := db.InsertTemplateVersion(ctx, database.InsertTemplateVersionParams{ - ID: versionID, - JobID: jobID, + user := dbgen.User(t, db, database.User{}) + tv := dbgen.TemplateVersion(t, db, database.TemplateVersion{ + CreatedBy: user.ID, OrganizationID: pd.OrganizationID, + JobID: jobID, + }) + template := dbgen.Template(t, db, database.Template{ + CreatedBy: user.ID, + OrganizationID: pd.OrganizationID, + ActiveVersionID: tv.ID, + }) + err := db.UpdateTemplateVersionByID(ctx, database.UpdateTemplateVersionByIDParams{ + ID: tv.ID, + TemplateID: uuid.NullUUID{ + UUID: template.ID, + Valid: true, + }, + UpdatedAt: dbtime.Now(), + Name: tv.Name, + Message: tv.Message, }) require.NoError(t, err) + workspace := dbgen.Workspace(t, db, database.WorkspaceTable{ + OwnerID: user.ID, + OrganizationID: pd.OrganizationID, + TemplateID: template.ID, + }) job, err := db.InsertProvisionerJob(ctx, database.InsertProvisionerJobParams{ ID: jobID, Provisioner: database.ProvisionerTypeEcho, - Input: []byte(`{"template_version_id": "` + versionID.String() + `"}`), + Input: json.RawMessage("{}"), StorageMethod: database.ProvisionerStorageMethodFile, Type: database.ProvisionerJobTypeWorkspaceBuild, OrganizationID: pd.OrganizationID, Tags: pd.Tags, }) require.NoError(t, err) + _ = dbgen.WorkspaceBuild(t, db, database.WorkspaceBuild{ + WorkspaceID: workspace.ID, + TemplateVersionID: tv.ID, + InitiatorID: user.ID, + JobID: jobID, + }) _, err = db.AcquireProvisionerJob(ctx, database.AcquireProvisionerJobParams{ OrganizationID: pd.OrganizationID, WorkerID: uuid.NullUUID{ @@ -1509,18 +1757,22 @@ func TestCompleteJob(t *testing.T) { srv, db, _, pd := setup(t, false, &overrides{}) jobID := uuid.New() versionID := uuid.New() + user := dbgen.User(t, db, database.User{}) err := db.InsertTemplateVersion(ctx, database.InsertTemplateVersionParams{ + CreatedBy: user.ID, ID: versionID, JobID: jobID, OrganizationID: pd.OrganizationID, }) require.NoError(t, err) job, err := db.InsertProvisionerJob(ctx, database.InsertProvisionerJobParams{ - ID: jobID, - Provisioner: database.ProvisionerTypeEcho, - Input: []byte(`{"template_version_id": "` + versionID.String() + `"}`), + ID: jobID, + Provisioner: database.ProvisionerTypeEcho, + Input: must(json.Marshal(provisionerdserver.TemplateVersionImportJob{ + TemplateVersionID: versionID, + })), StorageMethod: database.ProvisionerStorageMethodFile, - Type: database.ProvisionerJobTypeWorkspaceBuild, + Type: database.ProvisionerJobTypeTemplateVersionImport, OrganizationID: pd.OrganizationID, Tags: pd.Tags, }) @@ -1573,8 +1825,10 @@ func TestCompleteJob(t *testing.T) { }) jobID := uuid.New() versionID := uuid.New() + user := dbgen.User(t, db, database.User{}) err := db.InsertTemplateVersion(ctx, database.InsertTemplateVersionParams{ ID: versionID, + CreatedBy: user.ID, JobID: jobID, OrganizationID: pd.OrganizationID, }) @@ -1583,10 +1837,12 @@ func TestCompleteJob(t *testing.T) { OrganizationID: pd.OrganizationID, ID: jobID, Provisioner: database.ProvisionerTypeEcho, - Input: []byte(`{"template_version_id": "` + versionID.String() + `"}`), - StorageMethod: database.ProvisionerStorageMethodFile, - Type: database.ProvisionerJobTypeWorkspaceBuild, - Tags: pd.Tags, + Input: must(json.Marshal(provisionerdserver.TemplateVersionImportJob{ + TemplateVersionID: versionID, + })), + StorageMethod: database.ProvisionerStorageMethodFile, + Type: database.ProvisionerJobTypeTemplateVersionImport, + Tags: pd.Tags, }) require.NoError(t, err) _, err = db.AcquireProvisionerJob(ctx, database.AcquireProvisionerJobParams{ @@ -1761,6 +2017,7 @@ func TestCompleteJob(t *testing.T) { QuietHoursSchedule: c.userQuietHoursSchedule, }) template := dbgen.Template(t, db, database.Template{ + CreatedBy: user.ID, Name: "template", Provisioner: database.ProvisionerTypeEcho, OrganizationID: pd.OrganizationID, @@ -1792,6 +2049,7 @@ func TestCompleteJob(t *testing.T) { OrganizationID: pd.OrganizationID, }) version := dbgen.TemplateVersion(t, db, database.TemplateVersion{ + CreatedBy: user.ID, OrganizationID: pd.OrganizationID, TemplateID: uuid.NullUUID{ UUID: template.ID, @@ -1799,22 +2057,25 @@ func TestCompleteJob(t *testing.T) { }, JobID: uuid.New(), }) - build := dbgen.WorkspaceBuild(t, db, database.WorkspaceBuild{ - WorkspaceID: workspaceTable.ID, - InitiatorID: user.ID, - TemplateVersionID: version.ID, - Transition: c.transition, - Reason: database.BuildReasonInitiator, - }) + buildID := uuid.New() job := dbgen.ProvisionerJob(t, db, ps, database.ProvisionerJob{ FileID: file.ID, InitiatorID: user.ID, Type: database.ProvisionerJobTypeWorkspaceBuild, Input: must(json.Marshal(provisionerdserver.WorkspaceProvisionJob{ - WorkspaceBuildID: build.ID, + WorkspaceBuildID: buildID, })), OrganizationID: pd.OrganizationID, }) + build := dbgen.WorkspaceBuild(t, db, database.WorkspaceBuild{ + ID: buildID, + JobID: job.ID, + WorkspaceID: workspaceTable.ID, + InitiatorID: user.ID, + TemplateVersionID: version.ID, + Transition: c.transition, + Reason: database.BuildReasonInitiator, + }) _, err = db.AcquireProvisionerJob(ctx, database.AcquireProvisionerJobParams{ OrganizationID: pd.OrganizationID, WorkerID: uuid.NullUUID{ @@ -1903,13 +2164,22 @@ func TestCompleteJob(t *testing.T) { t.Run("TemplateDryRun", func(t *testing.T) { t.Parallel() srv, db, _, pd := setup(t, false, &overrides{}) + user := dbgen.User(t, db, database.User{}) + version := dbgen.TemplateVersion(t, db, database.TemplateVersion{ + CreatedBy: user.ID, + OrganizationID: pd.OrganizationID, + JobID: uuid.New(), + }) job, err := db.InsertProvisionerJob(ctx, database.InsertProvisionerJobParams{ - ID: uuid.New(), + ID: version.JobID, Provisioner: database.ProvisionerTypeEcho, Type: database.ProvisionerJobTypeTemplateVersionDryRun, StorageMethod: database.ProvisionerStorageMethodFile, - Input: json.RawMessage("{}"), - Tags: pd.Tags, + Input: must(json.Marshal(provisionerdserver.TemplateVersionDryRunJob{ + TemplateVersionID: version.ID, + })), + OrganizationID: pd.OrganizationID, + Tags: pd.Tags, }) require.NoError(t, err) _, err = db.AcquireProvisionerJob(ctx, database.AcquireProvisionerJobParams{ @@ -1922,6 +2192,7 @@ func TestCompleteJob(t *testing.T) { Time: dbtime.Now(), Valid: true, }, + OrganizationID: pd.OrganizationID, ProvisionerTags: must(json.Marshal(job.Tags)), }) require.NoError(t, err) @@ -2001,8 +2272,10 @@ func TestCompleteJob(t *testing.T) { Transition: database.WorkspaceTransitionStart, }}, provisionerJobParams: database.InsertProvisionerJobParams{ - Type: database.ProvisionerJobTypeTemplateVersionDryRun, - Input: json.RawMessage("{}"), + Type: database.ProvisionerJobTypeTemplateVersionDryRun, + Input: must(json.Marshal(provisionerdserver.TemplateVersionDryRunJob{ + TemplateVersionID: templateVersionID, + })), }, }, { @@ -2147,34 +2420,46 @@ func TestCompleteJob(t *testing.T) { if jobParams.Tags == nil { jobParams.Tags = pd.Tags } + if jobParams.OrganizationID == uuid.Nil { + jobParams.OrganizationID = pd.OrganizationID + } user := dbgen.User(t, db, database.User{}) job, err := db.InsertProvisionerJob(ctx, jobParams) + require.NoError(t, err) tpl := dbgen.Template(t, db, database.Template{ + CreatedBy: user.ID, OrganizationID: pd.OrganizationID, }) tv := dbgen.TemplateVersion(t, db, database.TemplateVersion{ - TemplateID: uuid.NullUUID{UUID: tpl.ID, Valid: true}, - JobID: job.ID, - }) - workspace := dbgen.Workspace(t, db, database.WorkspaceTable{ - TemplateID: tpl.ID, + ID: templateVersionID, + CreatedBy: user.ID, OrganizationID: pd.OrganizationID, - OwnerID: user.ID, - }) - _ = dbgen.WorkspaceBuild(t, db, database.WorkspaceBuild{ - ID: workspaceBuildID, - JobID: job.ID, - WorkspaceID: workspace.ID, - TemplateVersionID: tv.ID, + TemplateID: uuid.NullUUID{UUID: tpl.ID, Valid: true}, + JobID: job.ID, }) + if jobParams.Type == database.ProvisionerJobTypeWorkspaceBuild { + workspace := dbgen.Workspace(t, db, database.WorkspaceTable{ + TemplateID: tpl.ID, + OrganizationID: pd.OrganizationID, + OwnerID: user.ID, + }) + _ = dbgen.WorkspaceBuild(t, db, database.WorkspaceBuild{ + ID: workspaceBuildID, + JobID: job.ID, + WorkspaceID: workspace.ID, + TemplateVersionID: tv.ID, + }) + } + require.NoError(t, err) _, err = db.AcquireProvisionerJob(ctx, database.AcquireProvisionerJobParams{ WorkerID: uuid.NullUUID{ UUID: pd.ID, Valid: true, }, + OrganizationID: pd.OrganizationID, Types: []database.ProvisionerType{jobParams.Provisioner}, ProvisionerTags: must(json.Marshal(job.Tags)), StartedAt: sql.NullTime{Time: job.CreatedAt, Valid: true}, @@ -2364,6 +2649,7 @@ func TestCompleteJob(t *testing.T) { // Given: a workspace build which simulates claiming a prebuild. user := dbgen.User(t, db, database.User{}) template := dbgen.Template(t, db, database.Template{ + CreatedBy: user.ID, Name: "template", Provisioner: database.ProvisionerTypeEcho, OrganizationID: pd.OrganizationID, @@ -2375,6 +2661,7 @@ func TestCompleteJob(t *testing.T) { OrganizationID: pd.OrganizationID, }) version := dbgen.TemplateVersion(t, db, database.TemplateVersion{ + CreatedBy: user.ID, OrganizationID: pd.OrganizationID, TemplateID: uuid.NullUUID{ UUID: template.ID, @@ -2382,23 +2669,26 @@ func TestCompleteJob(t *testing.T) { }, JobID: uuid.New(), }) - build := dbgen.WorkspaceBuild(t, db, database.WorkspaceBuild{ - WorkspaceID: workspaceTable.ID, - InitiatorID: user.ID, - TemplateVersionID: version.ID, - Transition: database.WorkspaceTransitionStart, - Reason: database.BuildReasonInitiator, - }) + buildID := uuid.New() job := dbgen.ProvisionerJob(t, db, ps, database.ProvisionerJob{ FileID: file.ID, InitiatorID: user.ID, Type: database.ProvisionerJobTypeWorkspaceBuild, Input: must(json.Marshal(provisionerdserver.WorkspaceProvisionJob{ - WorkspaceBuildID: build.ID, + WorkspaceBuildID: buildID, PrebuiltWorkspaceBuildStage: sdkproto.PrebuiltWorkspaceBuildStage_CLAIM, })), OrganizationID: pd.OrganizationID, }) + _ = dbgen.WorkspaceBuild(t, db, database.WorkspaceBuild{ + ID: buildID, + JobID: job.ID, + WorkspaceID: workspaceTable.ID, + InitiatorID: user.ID, + TemplateVersionID: version.ID, + Transition: database.WorkspaceTransitionStart, + Reason: database.BuildReasonInitiator, + }) _, err := db.AcquireProvisionerJob(ctx, database.AcquireProvisionerJobParams{ OrganizationID: pd.OrganizationID, WorkerID: uuid.NullUUID{ @@ -2469,17 +2759,23 @@ func TestCompleteJob(t *testing.T) { t.Run(tc.name, func(t *testing.T) { t.Parallel() - srv, db, _, pd := setup(t, false, &overrides{}) + fakeUsageInserter, usageInserterPtr := newFakeUsageInserter() + srv, db, _, pd := setup(t, false, &overrides{ + usageInserter: usageInserterPtr, + }) importJobID := uuid.New() tvID := uuid.New() + templateAdminUser := dbgen.User(t, db, database.User{RBACRoles: []string{codersdk.RoleTemplateAdmin}}) template := dbgen.Template(t, db, database.Template{ Name: "template", + CreatedBy: templateAdminUser.ID, Provisioner: database.ProvisionerTypeEcho, OrganizationID: pd.OrganizationID, }) version := dbgen.TemplateVersion(t, db, database.TemplateVersion{ ID: tvID, + CreatedBy: templateAdminUser.ID, OrganizationID: pd.OrganizationID, TemplateID: uuid.NullUUID{ UUID: template.ID, @@ -2535,6 +2831,10 @@ func TestCompleteJob(t *testing.T) { require.NoError(t, err) require.True(t, version.HasAITask.Valid) // We ALWAYS expect a value to be set, therefore not nil, i.e. valid = true. require.Equal(t, tc.expected, version.HasAITask.Bool) + + // We never expect a usage event to be collected for + // template imports. + require.Empty(t, fakeUsageInserter.collectedEvents) }) } }) @@ -2544,22 +2844,27 @@ func TestCompleteJob(t *testing.T) { // will be set as well in that case. t.Run("WorkspaceBuild", func(t *testing.T) { type testcase struct { - name string - input *proto.CompletedJob_WorkspaceBuild - expected bool + name string + transition database.WorkspaceTransition + input *proto.CompletedJob_WorkspaceBuild + expectHasAiTask bool + expectUsageEvent bool } sidebarAppID := uuid.NewString() for _, tc := range []testcase{ { - name: "has_ai_task is false by default", - input: &proto.CompletedJob_WorkspaceBuild{ + name: "has_ai_task is false by default", + transition: database.WorkspaceTransitionStart, + input: &proto.CompletedJob_WorkspaceBuild{ // No AiTasks defined. }, - expected: false, + expectHasAiTask: false, + expectUsageEvent: false, }, { - name: "has_ai_task is set to true", + name: "has_ai_task is set to true", + transition: database.WorkspaceTransitionStart, input: &proto.CompletedJob_WorkspaceBuild{ AiTasks: []*sdkproto.AITask{ { @@ -2569,24 +2874,97 @@ func TestCompleteJob(t *testing.T) { }, }, }, + Resources: []*sdkproto.Resource{ + { + Agents: []*sdkproto.Agent{ + { + Id: uuid.NewString(), + Name: "a", + Apps: []*sdkproto.App{ + { + Id: sidebarAppID, + Slug: "test-app", + }, + }, + }, + }, + }, + }, }, - expected: true, + expectHasAiTask: true, + expectUsageEvent: true, + }, + // Checks regression for https://github.com/coder/coder/issues/18776 + { + name: "non-existing app", + transition: database.WorkspaceTransitionStart, + input: &proto.CompletedJob_WorkspaceBuild{ + AiTasks: []*sdkproto.AITask{ + { + Id: uuid.NewString(), + SidebarApp: &sdkproto.AITaskSidebarApp{ + // Non-existing app ID would previously trigger a FK violation. + Id: uuid.NewString(), + }, + }, + }, + }, + expectHasAiTask: false, + expectUsageEvent: false, + }, + { + name: "has_ai_task is set to true, but transition is not start", + transition: database.WorkspaceTransitionStop, + input: &proto.CompletedJob_WorkspaceBuild{ + AiTasks: []*sdkproto.AITask{ + { + Id: uuid.NewString(), + SidebarApp: &sdkproto.AITaskSidebarApp{ + Id: sidebarAppID, + }, + }, + }, + Resources: []*sdkproto.Resource{ + { + Agents: []*sdkproto.Agent{ + { + Id: uuid.NewString(), + Name: "a", + Apps: []*sdkproto.App{ + { + Id: sidebarAppID, + Slug: "test-app", + }, + }, + }, + }, + }, + }, + }, + expectHasAiTask: true, + expectUsageEvent: false, }, } { t.Run(tc.name, func(t *testing.T) { t.Parallel() - srv, db, _, pd := setup(t, false, &overrides{}) + fakeUsageInserter, usageInserterPtr := newFakeUsageInserter() + srv, db, _, pd := setup(t, false, &overrides{ + usageInserter: usageInserterPtr, + }) importJobID := uuid.New() tvID := uuid.New() + templateUser := dbgen.User(t, db, database.User{RBACRoles: []string{codersdk.RoleTemplateAdmin}}) template := dbgen.Template(t, db, database.Template{ Name: "template", + CreatedBy: templateUser.ID, Provisioner: database.ProvisionerTypeEcho, OrganizationID: pd.OrganizationID, }) version := dbgen.TemplateVersion(t, db, database.TemplateVersion{ ID: tvID, + CreatedBy: templateUser.ID, OrganizationID: pd.OrganizationID, TemplateID: uuid.NullUUID{ UUID: template.ID, @@ -2600,22 +2978,19 @@ func TestCompleteJob(t *testing.T) { OwnerID: user.ID, OrganizationID: pd.OrganizationID, }) - build := dbgen.WorkspaceBuild(t, db, database.WorkspaceBuild{ - WorkspaceID: workspaceTable.ID, - TemplateVersionID: version.ID, - InitiatorID: user.ID, - Transition: database.WorkspaceTransitionStart, - }) ctx := testutil.Context(t, testutil.WaitShort) + + buildJobID := uuid.New() + wsBuildID := uuid.New() job, err := db.InsertProvisionerJob(ctx, database.InsertProvisionerJobParams{ - ID: importJobID, + ID: buildJobID, CreatedAt: dbtime.Now(), UpdatedAt: dbtime.Now(), OrganizationID: pd.OrganizationID, - InitiatorID: uuid.New(), + InitiatorID: user.ID, Input: must(json.Marshal(provisionerdserver.WorkspaceProvisionJob{ - WorkspaceBuildID: build.ID, + WorkspaceBuildID: wsBuildID, LogLevel: "DEBUG", })), Provisioner: database.ProvisionerTypeEcho, @@ -2624,6 +2999,14 @@ func TestCompleteJob(t *testing.T) { Tags: pd.Tags, }) require.NoError(t, err) + build := dbgen.WorkspaceBuild(t, db, database.WorkspaceBuild{ + ID: wsBuildID, + JobID: buildJobID, + WorkspaceID: workspaceTable.ID, + TemplateVersionID: version.ID, + InitiatorID: user.ID, + Transition: tc.transition, + }) _, err = db.AcquireProvisionerJob(ctx, database.AcquireProvisionerJobParams{ OrganizationID: pd.OrganizationID, @@ -2653,11 +3036,22 @@ func TestCompleteJob(t *testing.T) { build, err = db.GetWorkspaceBuildByID(ctx, build.ID) require.NoError(t, err) require.True(t, build.HasAITask.Valid) // We ALWAYS expect a value to be set, therefore not nil, i.e. valid = true. - require.Equal(t, tc.expected, build.HasAITask.Bool) + require.Equal(t, tc.expectHasAiTask, build.HasAITask.Bool) - if tc.expected { + if tc.expectHasAiTask { require.Equal(t, sidebarAppID, build.AITaskSidebarAppID.UUID.String()) } + + if tc.expectUsageEvent { + // Check that a usage event was collected. + require.Len(t, fakeUsageInserter.collectedEvents, 1) + require.Equal(t, usagetypes.DCManagedAgentsV1{ + Count: 1, + }, fakeUsageInserter.collectedEvents[0]) + } else { + // Check that no usage event was collected. + require.Empty(t, fakeUsageInserter.collectedEvents) + } }) } }) @@ -2881,22 +3275,21 @@ func TestInsertWorkspaceResource(t *testing.T) { t.Run("NoAgents", func(t *testing.T) { t.Parallel() db, _ := dbtestutil.NewDB(t) - dbtestutil.DisableForeignKeysAndTriggers(t, db) - job := uuid.New() - err := insert(db, job, &sdkproto.Resource{ + job := dbgen.ProvisionerJob(t, db, nil, database.ProvisionerJob{}) + err := insert(db, job.ID, &sdkproto.Resource{ Name: "something", Type: "aws_instance", }) require.NoError(t, err) - resources, err := db.GetWorkspaceResourcesByJobID(ctx, job) + resources, err := db.GetWorkspaceResourcesByJobID(ctx, job.ID) require.NoError(t, err) require.Len(t, resources, 1) }) t.Run("InvalidAgentToken", func(t *testing.T) { t.Parallel() db, _ := dbtestutil.NewDB(t) - dbtestutil.DisableForeignKeysAndTriggers(t, db) - err := insert(db, uuid.New(), &sdkproto.Resource{ + job := dbgen.ProvisionerJob(t, db, nil, database.ProvisionerJob{}) + err := insert(db, job.ID, &sdkproto.Resource{ Name: "something", Type: "aws_instance", Agents: []*sdkproto.Agent{{ @@ -2911,8 +3304,8 @@ func TestInsertWorkspaceResource(t *testing.T) { t.Run("DuplicateApps", func(t *testing.T) { t.Parallel() db, _ := dbtestutil.NewDB(t) - dbtestutil.DisableForeignKeysAndTriggers(t, db) - err := insert(db, uuid.New(), &sdkproto.Resource{ + job := dbgen.ProvisionerJob(t, db, nil, database.ProvisionerJob{}) + err := insert(db, job.ID, &sdkproto.Resource{ Name: "something", Type: "aws_instance", Agents: []*sdkproto.Agent{{ @@ -2927,8 +3320,8 @@ func TestInsertWorkspaceResource(t *testing.T) { require.ErrorContains(t, err, `duplicate app slug, must be unique per template: "a"`) db, _ = dbtestutil.NewDB(t) - dbtestutil.DisableForeignKeysAndTriggers(t, db) - err = insert(db, uuid.New(), &sdkproto.Resource{ + job = dbgen.ProvisionerJob(t, db, nil, database.ProvisionerJob{}) + err = insert(db, job.ID, &sdkproto.Resource{ Name: "something", Type: "aws_instance", Agents: []*sdkproto.Agent{{ @@ -2948,9 +3341,8 @@ func TestInsertWorkspaceResource(t *testing.T) { t.Run("AppSlugInvalid", func(t *testing.T) { t.Parallel() db, _ := dbtestutil.NewDB(t) - dbtestutil.DisableForeignKeysAndTriggers(t, db) - job := uuid.New() - err := insert(db, job, &sdkproto.Resource{ + job := dbgen.ProvisionerJob(t, db, nil, database.ProvisionerJob{}) + err := insert(db, job.ID, &sdkproto.Resource{ Name: "something", Type: "aws_instance", Agents: []*sdkproto.Agent{{ @@ -2961,7 +3353,7 @@ func TestInsertWorkspaceResource(t *testing.T) { }}, }) require.ErrorContains(t, err, `app slug "dev_1" does not match regex`) - err = insert(db, job, &sdkproto.Resource{ + err = insert(db, job.ID, &sdkproto.Resource{ Name: "something", Type: "aws_instance", Agents: []*sdkproto.Agent{{ @@ -2972,7 +3364,7 @@ func TestInsertWorkspaceResource(t *testing.T) { }}, }) require.ErrorContains(t, err, `app slug "dev--1" does not match regex`) - err = insert(db, job, &sdkproto.Resource{ + err = insert(db, job.ID, &sdkproto.Resource{ Name: "something", Type: "aws_instance", Agents: []*sdkproto.Agent{{ @@ -2987,10 +3379,9 @@ func TestInsertWorkspaceResource(t *testing.T) { t.Run("DuplicateAgentNames", func(t *testing.T) { t.Parallel() db, _ := dbtestutil.NewDB(t) - dbtestutil.DisableForeignKeysAndTriggers(t, db) - job := uuid.New() + job := dbgen.ProvisionerJob(t, db, nil, database.ProvisionerJob{}) // case-insensitive-unique - err := insert(db, job, &sdkproto.Resource{ + err := insert(db, job.ID, &sdkproto.Resource{ Name: "something", Type: "aws_instance", Agents: []*sdkproto.Agent{{ @@ -3000,7 +3391,7 @@ func TestInsertWorkspaceResource(t *testing.T) { }}, }) require.ErrorContains(t, err, "duplicate agent name") - err = insert(db, job, &sdkproto.Resource{ + err = insert(db, job.ID, &sdkproto.Resource{ Name: "something", Type: "aws_instance", Agents: []*sdkproto.Agent{{ @@ -3014,9 +3405,8 @@ func TestInsertWorkspaceResource(t *testing.T) { t.Run("AgentNameInvalid", func(t *testing.T) { t.Parallel() db, _ := dbtestutil.NewDB(t) - dbtestutil.DisableForeignKeysAndTriggers(t, db) - job := uuid.New() - err := insert(db, job, &sdkproto.Resource{ + job := dbgen.ProvisionerJob(t, db, nil, database.ProvisionerJob{}) + err := insert(db, job.ID, &sdkproto.Resource{ Name: "something", Type: "aws_instance", Agents: []*sdkproto.Agent{{ @@ -3024,7 +3414,7 @@ func TestInsertWorkspaceResource(t *testing.T) { }}, }) require.NoError(t, err) // uppercase is still allowed - err = insert(db, job, &sdkproto.Resource{ + err = insert(db, job.ID, &sdkproto.Resource{ Name: "something", Type: "aws_instance", Agents: []*sdkproto.Agent{{ @@ -3032,7 +3422,7 @@ func TestInsertWorkspaceResource(t *testing.T) { }}, }) require.ErrorContains(t, err, `agent name "dev_1" contains underscores`) // custom error for underscores - err = insert(db, job, &sdkproto.Resource{ + err = insert(db, job.ID, &sdkproto.Resource{ Name: "something", Type: "aws_instance", Agents: []*sdkproto.Agent{{ @@ -3044,9 +3434,8 @@ func TestInsertWorkspaceResource(t *testing.T) { t.Run("Success", func(t *testing.T) { t.Parallel() db, _ := dbtestutil.NewDB(t) - dbtestutil.DisableForeignKeysAndTriggers(t, db) - job := uuid.New() - err := insert(db, job, &sdkproto.Resource{ + job := dbgen.ProvisionerJob(t, db, nil, database.ProvisionerJob{}) + err := insert(db, job.ID, &sdkproto.Resource{ Name: "something", Type: "aws_instance", DailyCost: 10, @@ -3085,7 +3474,7 @@ func TestInsertWorkspaceResource(t *testing.T) { }}, }) require.NoError(t, err) - resources, err := db.GetWorkspaceResourcesByJobID(ctx, job) + resources, err := db.GetWorkspaceResourcesByJobID(ctx, job.ID) require.NoError(t, err) require.Len(t, resources, 1) require.EqualValues(t, 10, resources[0].DailyCost) @@ -3114,9 +3503,8 @@ func TestInsertWorkspaceResource(t *testing.T) { t.Run("AllDisplayApps", func(t *testing.T) { t.Parallel() db, _ := dbtestutil.NewDB(t) - dbtestutil.DisableForeignKeysAndTriggers(t, db) - job := uuid.New() - err := insert(db, job, &sdkproto.Resource{ + job := dbgen.ProvisionerJob(t, db, nil, database.ProvisionerJob{}) + err := insert(db, job.ID, &sdkproto.Resource{ Name: "something", Type: "aws_instance", Agents: []*sdkproto.Agent{{ @@ -3131,7 +3519,7 @@ func TestInsertWorkspaceResource(t *testing.T) { }}, }) require.NoError(t, err) - resources, err := db.GetWorkspaceResourcesByJobID(ctx, job) + resources, err := db.GetWorkspaceResourcesByJobID(ctx, job.ID) require.NoError(t, err) require.Len(t, resources, 1) agents, err := db.GetWorkspaceAgentsByResourceIDs(ctx, []uuid.UUID{resources[0].ID}) @@ -3144,9 +3532,8 @@ func TestInsertWorkspaceResource(t *testing.T) { t.Run("DisableDefaultApps", func(t *testing.T) { t.Parallel() db, _ := dbtestutil.NewDB(t) - dbtestutil.DisableForeignKeysAndTriggers(t, db) - job := uuid.New() - err := insert(db, job, &sdkproto.Resource{ + job := dbgen.ProvisionerJob(t, db, nil, database.ProvisionerJob{}) + err := insert(db, job.ID, &sdkproto.Resource{ Name: "something", Type: "aws_instance", Agents: []*sdkproto.Agent{{ @@ -3155,7 +3542,7 @@ func TestInsertWorkspaceResource(t *testing.T) { }}, }) require.NoError(t, err) - resources, err := db.GetWorkspaceResourcesByJobID(ctx, job) + resources, err := db.GetWorkspaceResourcesByJobID(ctx, job.ID) require.NoError(t, err) require.Len(t, resources, 1) agents, err := db.GetWorkspaceAgentsByResourceIDs(ctx, []uuid.UUID{resources[0].ID}) @@ -3170,9 +3557,8 @@ func TestInsertWorkspaceResource(t *testing.T) { t.Run("ResourcesMonitoring", func(t *testing.T) { t.Parallel() db, _ := dbtestutil.NewDB(t) - dbtestutil.DisableForeignKeysAndTriggers(t, db) - job := uuid.New() - err := insert(db, job, &sdkproto.Resource{ + job := dbgen.ProvisionerJob(t, db, nil, database.ProvisionerJob{}) + err := insert(db, job.ID, &sdkproto.Resource{ Name: "something", Type: "aws_instance", Agents: []*sdkproto.Agent{{ @@ -3199,7 +3585,7 @@ func TestInsertWorkspaceResource(t *testing.T) { }}, }) require.NoError(t, err) - resources, err := db.GetWorkspaceResourcesByJobID(ctx, job) + resources, err := db.GetWorkspaceResourcesByJobID(ctx, job.ID) require.NoError(t, err) require.Len(t, resources, 1) agents, err := db.GetWorkspaceAgentsByResourceIDs(ctx, []uuid.UUID{resources[0].ID}) @@ -3223,9 +3609,8 @@ func TestInsertWorkspaceResource(t *testing.T) { t.Run("Devcontainers", func(t *testing.T) { t.Parallel() db, _ := dbtestutil.NewDB(t) - dbtestutil.DisableForeignKeysAndTriggers(t, db) - job := uuid.New() - err := insert(db, job, &sdkproto.Resource{ + job := dbgen.ProvisionerJob(t, db, nil, database.ProvisionerJob{}) + err := insert(db, job.ID, &sdkproto.Resource{ Name: "something", Type: "aws_instance", Agents: []*sdkproto.Agent{{ @@ -3237,7 +3622,7 @@ func TestInsertWorkspaceResource(t *testing.T) { }}, }) require.NoError(t, err) - resources, err := db.GetWorkspaceResourcesByJobID(ctx, job) + resources, err := db.GetWorkspaceResourcesByJobID(ctx, job.ID) require.NoError(t, err) require.Len(t, resources, 1) agents, err := db.GetWorkspaceAgentsByResourceIDs(ctx, []uuid.UUID{resources[0].ID}) @@ -3308,6 +3693,7 @@ func TestNotifications(t *testing.T) { } template := dbgen.Template(t, db, database.Template{ + CreatedBy: user.ID, Name: "template", Provisioner: database.ProvisionerTypeEcho, OrganizationID: pd.OrganizationID, @@ -3321,6 +3707,7 @@ func TestNotifications(t *testing.T) { OrganizationID: pd.OrganizationID, }) version := dbgen.TemplateVersion(t, db, database.TemplateVersion{ + CreatedBy: user.ID, OrganizationID: pd.OrganizationID, TemplateID: uuid.NullUUID{ UUID: template.ID, @@ -3328,24 +3715,27 @@ func TestNotifications(t *testing.T) { }, JobID: uuid.New(), }) - build := dbgen.WorkspaceBuild(t, db, database.WorkspaceBuild{ - WorkspaceID: workspaceTable.ID, - TemplateVersionID: version.ID, - InitiatorID: initiator.ID, - Transition: database.WorkspaceTransitionDelete, - Reason: tc.deletionReason, - }) + wsBuildID := uuid.New() job := dbgen.ProvisionerJob(t, db, ps, database.ProvisionerJob{ FileID: file.ID, InitiatorID: initiator.ID, Type: database.ProvisionerJobTypeWorkspaceBuild, Input: must(json.Marshal(provisionerdserver.WorkspaceProvisionJob{ - WorkspaceBuildID: build.ID, + WorkspaceBuildID: wsBuildID, })), OrganizationID: pd.OrganizationID, CreatedAt: time.Now(), UpdatedAt: time.Now(), }) + _ = dbgen.WorkspaceBuild(t, db, database.WorkspaceBuild{ + ID: wsBuildID, + JobID: job.ID, + WorkspaceID: workspaceTable.ID, + TemplateVersionID: version.ID, + InitiatorID: initiator.ID, + Transition: database.WorkspaceTransitionDelete, + Reason: tc.deletionReason, + }) _, err = db.AcquireProvisionerJob(ctx, database.AcquireProvisionerJobParams{ OrganizationID: pd.OrganizationID, WorkerID: uuid.NullUUID{ @@ -3434,6 +3824,7 @@ func TestNotifications(t *testing.T) { initiator := user template := dbgen.Template(t, db, database.Template{ + CreatedBy: user.ID, Name: "template", Provisioner: database.ProvisionerTypeEcho, OrganizationID: pd.OrganizationID, @@ -3445,6 +3836,7 @@ func TestNotifications(t *testing.T) { OrganizationID: pd.OrganizationID, }) version := dbgen.TemplateVersion(t, db, database.TemplateVersion{ + CreatedBy: user.ID, OrganizationID: pd.OrganizationID, TemplateID: uuid.NullUUID{ UUID: template.ID, @@ -3452,24 +3844,28 @@ func TestNotifications(t *testing.T) { }, JobID: uuid.New(), }) - build := dbgen.WorkspaceBuild(t, db, database.WorkspaceBuild{ - WorkspaceID: workspace.ID, - TemplateVersionID: version.ID, - InitiatorID: initiator.ID, - Transition: database.WorkspaceTransitionDelete, - Reason: tc.buildReason, - }) + wsBuildID := uuid.New() job := dbgen.ProvisionerJob(t, db, ps, database.ProvisionerJob{ + ID: uuid.New(), FileID: file.ID, InitiatorID: initiator.ID, Type: database.ProvisionerJobTypeWorkspaceBuild, Input: must(json.Marshal(provisionerdserver.WorkspaceProvisionJob{ - WorkspaceBuildID: build.ID, + WorkspaceBuildID: wsBuildID, })), OrganizationID: pd.OrganizationID, CreatedAt: time.Now(), UpdatedAt: time.Now(), }) + _ = dbgen.WorkspaceBuild(t, db, database.WorkspaceBuild{ + ID: wsBuildID, + JobID: job.ID, + WorkspaceID: workspace.ID, + TemplateVersionID: version.ID, + InitiatorID: initiator.ID, + Transition: database.WorkspaceTransitionDelete, + Reason: tc.buildReason, + }) _, err := db.AcquireProvisionerJob(ctx, database.AcquireProvisionerJobParams{ OrganizationID: pd.OrganizationID, WorkerID: uuid.NullUUID{ @@ -3525,24 +3921,29 @@ func TestNotifications(t *testing.T) { _ = dbgen.OrganizationMember(t, db, database.OrganizationMember{UserID: user.ID, OrganizationID: pd.OrganizationID}) template := dbgen.Template(t, db, database.Template{ - Name: "template", DisplayName: "William's Template", Provisioner: database.ProvisionerTypeEcho, OrganizationID: pd.OrganizationID, + CreatedBy: user.ID, + Name: "template", DisplayName: "William's Template", Provisioner: database.ProvisionerTypeEcho, OrganizationID: pd.OrganizationID, }) workspace := dbgen.Workspace(t, db, database.WorkspaceTable{ TemplateID: template.ID, OwnerID: user.ID, OrganizationID: pd.OrganizationID, }) version := dbgen.TemplateVersion(t, db, database.TemplateVersion{ + CreatedBy: user.ID, OrganizationID: pd.OrganizationID, TemplateID: uuid.NullUUID{UUID: template.ID, Valid: true}, JobID: uuid.New(), }) - build := dbgen.WorkspaceBuild(t, db, database.WorkspaceBuild{ - WorkspaceID: workspace.ID, TemplateVersionID: version.ID, InitiatorID: user.ID, Transition: database.WorkspaceTransitionDelete, Reason: database.BuildReasonInitiator, - }) + wsBuildID := uuid.New() job := dbgen.ProvisionerJob(t, db, ps, database.ProvisionerJob{ FileID: dbgen.File(t, db, database.File{CreatedBy: user.ID}).ID, InitiatorID: user.ID, Type: database.ProvisionerJobTypeWorkspaceBuild, - Input: must(json.Marshal(provisionerdserver.WorkspaceProvisionJob{WorkspaceBuildID: build.ID})), + Input: must(json.Marshal(provisionerdserver.WorkspaceProvisionJob{WorkspaceBuildID: wsBuildID})), OrganizationID: pd.OrganizationID, }) + build := dbgen.WorkspaceBuild(t, db, database.WorkspaceBuild{ + ID: wsBuildID, + JobID: job.ID, + WorkspaceID: workspace.ID, TemplateVersionID: version.ID, InitiatorID: user.ID, Transition: database.WorkspaceTransitionDelete, Reason: database.BuildReasonInitiator, + }) _, err := db.AcquireProvisionerJob(ctx, database.AcquireProvisionerJobParams{ OrganizationID: pd.OrganizationID, WorkerID: uuid.NullUUID{UUID: pd.ID, Valid: true}, @@ -3582,6 +3983,7 @@ type overrides struct { externalAuthConfigs []*externalauth.Config templateScheduleStore *atomic.Pointer[schedule.TemplateScheduleStore] userQuietHoursScheduleStore *atomic.Pointer[schedule.UserQuietHoursScheduleStore] + usageInserter *atomic.Pointer[usage.Inserter] clock *quartz.Mock acquireJobLongPollDuration time.Duration heartbeatFn func(ctx context.Context) error @@ -3595,7 +3997,6 @@ func setup(t *testing.T, ignoreLogErrors bool, ov *overrides) (proto.DRPCProvisi t.Helper() logger := testutil.Logger(t) db, ps := dbtestutil.NewDB(t) - dbtestutil.DisableForeignKeysAndTriggers(t, db) defOrg, err := db.GetDefaultOrganization(context.Background()) require.NoError(t, err, "default org not found") @@ -3603,13 +4004,14 @@ func setup(t *testing.T, ignoreLogErrors bool, ov *overrides) (proto.DRPCProvisi var externalAuthConfigs []*externalauth.Config tss := testTemplateScheduleStore() uqhss := testUserQuietHoursScheduleStore() + usageInserter := testUsageInserter() clock := quartz.NewReal() pollDur := time.Duration(0) if ov == nil { ov = &overrides{} } if ov.ctx == nil { - ctx, cancel := context.WithCancel(context.Background()) + ctx, cancel := context.WithCancel(dbauthz.AsProvisionerd(context.Background())) t.Cleanup(cancel) ov.ctx = ctx } @@ -3640,6 +4042,15 @@ func setup(t *testing.T, ignoreLogErrors bool, ov *overrides) (proto.DRPCProvisi require.True(t, swapped) } } + if ov.usageInserter != nil { + tUsageInserter := usageInserter.Load() + // keep the initial test value if the override hasn't set the atomic pointer. + usageInserter = ov.usageInserter + if usageInserter.Load() == nil { + swapped := usageInserter.CompareAndSwap(nil, tUsageInserter) + require.True(t, swapped) + } + } if ov.clock != nil { clock = ov.clock } @@ -3677,6 +4088,10 @@ func setup(t *testing.T, ignoreLogErrors bool, ov *overrides) (proto.DRPCProvisi var op atomic.Pointer[agplprebuilds.ReconciliationOrchestrator] op.Store(&prebuildsOrchestrator) + // Use an authz wrapped database for the server to ensure permission checks + // work. + authorizer := rbac.NewStrictCachingAuthorizer(prometheus.NewRegistry()) + serverDB := dbauthz.New(db, authorizer, logger, coderdtest.AccessControlStorePointer()) srv, err := provisionerdserver.NewServer( ov.ctx, proto.CurrentVersion.String(), @@ -3686,7 +4101,7 @@ func setup(t *testing.T, ignoreLogErrors bool, ov *overrides) (proto.DRPCProvisi slogtest.Make(t, &slogtest.Options{IgnoreErrors: ignoreLogErrors}), []database.ProvisionerType{database.ProvisionerTypeEcho}, provisionerdserver.Tags(daemon.Tags), - db, + serverDB, ps, provisionerdserver.NewAcquirer(ov.ctx, logger.Named("acquirer"), db, ps), telemetry.NewNoop(), @@ -3695,6 +4110,7 @@ func setup(t *testing.T, ignoreLogErrors bool, ov *overrides) (proto.DRPCProvisi auditPtr, tss, uqhss, + usageInserter, deploymentValues, provisionerdserver.Options{ ExternalAuthConfigs: externalAuthConfigs, @@ -3809,3 +4225,22 @@ func (s *fakeStream) cancel() { s.canceled = true s.c.Broadcast() } + +type fakeUsageInserter struct { + collectedEvents []usagetypes.Event +} + +var _ usage.Inserter = &fakeUsageInserter{} + +func newFakeUsageInserter() (*fakeUsageInserter, *atomic.Pointer[usage.Inserter]) { + ptr := &atomic.Pointer[usage.Inserter]{} + fake := &fakeUsageInserter{} + var inserter usage.Inserter = fake + ptr.Store(&inserter) + return fake, ptr +} + +func (f *fakeUsageInserter) InsertDiscreteUsageEvent(_ context.Context, _ database.Store, event usagetypes.DiscreteEvent) error { + f.collectedEvents = append(f.collectedEvents, event) + return nil +} diff --git a/coderd/provisionerjobs.go b/coderd/provisionerjobs.go index 800b2916efef3..e9ab5260988d4 100644 --- a/coderd/provisionerjobs.go +++ b/coderd/provisionerjobs.go @@ -363,6 +363,7 @@ func convertProvisionerJob(pj database.GetProvisionerJobsByIDsWithQueuePositionR Tags: provisionerJob.Tags, QueuePosition: int(pj.QueuePosition), QueueSize: int(pj.QueueSize), + LogsOverflowed: provisionerJob.LogsOverflowed, } // Applying values optional to the struct. if provisionerJob.StartedAt.Valid { diff --git a/coderd/rbac/acl/updatevalidator.go b/coderd/rbac/acl/updatevalidator.go new file mode 100644 index 0000000000000..9785609f2e33a --- /dev/null +++ b/coderd/rbac/acl/updatevalidator.go @@ -0,0 +1,130 @@ +package acl + +import ( + "context" + "fmt" + + "github.com/google/uuid" + + "github.com/coder/coder/v2/coderd/database" + "github.com/coder/coder/v2/coderd/database/dbauthz" + "github.com/coder/coder/v2/codersdk" +) + +type UpdateValidator[Role codersdk.WorkspaceRole | codersdk.TemplateRole] interface { + // Users should return a map from user UUIDs (as strings) to the role they + // are being assigned. Additionally, it should return a string that will be + // used as the field name for the ValidationErrors returned from Validate. + Users() (map[string]Role, string) + // Groups should return a map from group UUIDs (as strings) to the role they + // are being assigned. Additionally, it should return a string that will be + // used as the field name for the ValidationErrors returned from Validate. + Groups() (map[string]Role, string) + // ValidateRole should return an error that will be used in the + // ValidationError if the role is invalid for the corresponding resource type. + ValidateRole(role Role) error +} + +func Validate[Role codersdk.WorkspaceRole | codersdk.TemplateRole]( + ctx context.Context, + db database.Store, + v UpdateValidator[Role], +) []codersdk.ValidationError { + // nolint:gocritic // Validate requires full read access to users and groups + ctx = dbauthz.AsSystemRestricted(ctx) + var validErrs []codersdk.ValidationError + + groupRoles, groupsField := v.Groups() + groupIDs := make([]uuid.UUID, 0, len(groupRoles)) + for idStr, role := range groupRoles { + // Validate the provided role names + if err := v.ValidateRole(role); err != nil { + validErrs = append(validErrs, codersdk.ValidationError{ + Field: groupsField, + Detail: err.Error(), + }) + } + // Validate that the IDs are UUIDs + id, err := uuid.Parse(idStr) + if err != nil { + validErrs = append(validErrs, codersdk.ValidationError{ + Field: groupsField, + Detail: fmt.Sprintf("%v is not a valid UUID.", idStr), + }) + continue + } + // Don't check if the ID exists when setting the role to + // WorkspaceRoleDeleted or TemplateRoleDeleted. They might've existing at + // some point and got deleted. If we report that as an error here then they + // can't be removed. + if string(role) == "" { + continue + } + groupIDs = append(groupIDs, id) + } + + // Validate that the groups exist + groupValidation, err := db.ValidateGroupIDs(ctx, groupIDs) + if err != nil { + validErrs = append(validErrs, codersdk.ValidationError{ + Field: groupsField, + Detail: fmt.Sprintf("failed to validate group IDs: %v", err.Error()), + }) + } + if !groupValidation.Ok { + for _, id := range groupValidation.InvalidGroupIds { + validErrs = append(validErrs, codersdk.ValidationError{ + Field: groupsField, + Detail: fmt.Sprintf("group with ID %v does not exist", id), + }) + } + } + + userRoles, usersField := v.Users() + userIDs := make([]uuid.UUID, 0, len(userRoles)) + for idStr, role := range userRoles { + // Validate the provided role names + if err := v.ValidateRole(role); err != nil { + validErrs = append(validErrs, codersdk.ValidationError{ + Field: usersField, + Detail: err.Error(), + }) + } + // Validate that the IDs are UUIDs + id, err := uuid.Parse(idStr) + if err != nil { + validErrs = append(validErrs, codersdk.ValidationError{ + Field: usersField, + Detail: fmt.Sprintf("%v is not a valid UUID.", idStr), + }) + continue + } + // Don't check if the ID exists when setting the role to + // WorkspaceRoleDeleted or TemplateRoleDeleted. They might've existing at + // some point and got deleted. If we report that as an error here then they + // can't be removed. + if string(role) == "" { + continue + } + userIDs = append(userIDs, id) + } + + // Validate that the groups exist + userValidation, err := db.ValidateUserIDs(ctx, userIDs) + if err != nil { + validErrs = append(validErrs, codersdk.ValidationError{ + Field: usersField, + Detail: fmt.Sprintf("failed to validate user IDs: %v", err.Error()), + }) + } + if !userValidation.Ok { + for _, id := range userValidation.InvalidUserIds { + validErrs = append(validErrs, codersdk.ValidationError{ + Field: usersField, + Detail: fmt.Sprintf("user with ID %v does not exist", id), + }) + } + } + + return validErrs +} diff --git a/coderd/rbac/acl/updatevalidator_test.go b/coderd/rbac/acl/updatevalidator_test.go new file mode 100644 index 0000000000000..0e394370b1356 --- /dev/null +++ b/coderd/rbac/acl/updatevalidator_test.go @@ -0,0 +1,91 @@ +package acl_test + +import ( + "testing" + + "github.com/google/uuid" + "github.com/stretchr/testify/require" + + "github.com/coder/coder/v2/coderd" + "github.com/coder/coder/v2/coderd/database" + "github.com/coder/coder/v2/coderd/database/dbgen" + "github.com/coder/coder/v2/coderd/database/dbtestutil" + "github.com/coder/coder/v2/coderd/rbac/acl" + "github.com/coder/coder/v2/codersdk" + "github.com/coder/coder/v2/testutil" +) + +func TestOK(t *testing.T) { + t.Parallel() + + db, _ := dbtestutil.NewDB(t) + o := dbgen.Organization(t, db, database.Organization{}) + g := dbgen.Group(t, db, database.Group{OrganizationID: o.ID}) + u := dbgen.User(t, db, database.User{}) + ctx := testutil.Context(t, testutil.WaitShort) + + update := codersdk.UpdateWorkspaceACL{ + UserRoles: map[string]codersdk.WorkspaceRole{ + u.ID.String(): codersdk.WorkspaceRoleAdmin, + // An unknown ID is allowed if and only if the specified role is either + // codersdk.WorkspaceRoleDeleted or codersdk.TemplateRoleDeleted. + uuid.NewString(): codersdk.WorkspaceRoleDeleted, + }, + GroupRoles: map[string]codersdk.WorkspaceRole{ + g.ID.String(): codersdk.WorkspaceRoleAdmin, + // An unknown ID is allowed if and only if the specified role is either + // codersdk.WorkspaceRoleDeleted or codersdk.TemplateRoleDeleted. + uuid.NewString(): codersdk.WorkspaceRoleDeleted, + }, + } + errors := acl.Validate(ctx, db, coderd.WorkspaceACLUpdateValidator(update)) + require.Empty(t, errors) +} + +func TestDeniesUnknownIDs(t *testing.T) { + t.Parallel() + + db, _ := dbtestutil.NewDB(t) + ctx := testutil.Context(t, testutil.WaitShort) + + update := codersdk.UpdateWorkspaceACL{ + UserRoles: map[string]codersdk.WorkspaceRole{ + uuid.NewString(): codersdk.WorkspaceRoleAdmin, + }, + GroupRoles: map[string]codersdk.WorkspaceRole{ + uuid.NewString(): codersdk.WorkspaceRoleAdmin, + }, + } + errors := acl.Validate(ctx, db, coderd.WorkspaceACLUpdateValidator(update)) + require.Len(t, errors, 2) + require.Equal(t, errors[0].Field, "group_roles") + require.ErrorContains(t, errors[0], "does not exist") + require.Equal(t, errors[1].Field, "user_roles") + require.ErrorContains(t, errors[1], "does not exist") +} + +func TestDeniesUnknownRolesAndInvalidIDs(t *testing.T) { + t.Parallel() + + db, _ := dbtestutil.NewDB(t) + ctx := testutil.Context(t, testutil.WaitShort) + + update := codersdk.UpdateWorkspaceACL{ + UserRoles: map[string]codersdk.WorkspaceRole{ + "Quifrey": "level 5", + }, + GroupRoles: map[string]codersdk.WorkspaceRole{ + "apprentices": "level 2", + }, + } + errors := acl.Validate(ctx, db, coderd.WorkspaceACLUpdateValidator(update)) + require.Len(t, errors, 4) + require.Equal(t, errors[0].Field, "group_roles") + require.ErrorContains(t, errors[0], "role \"level 2\" is not a valid workspace role") + require.Equal(t, errors[1].Field, "group_roles") + require.ErrorContains(t, errors[1], "not a valid UUID") + require.Equal(t, errors[2].Field, "user_roles") + require.ErrorContains(t, errors[2], "role \"level 5\" is not a valid workspace role") + require.Equal(t, errors[3].Field, "user_roles") + require.ErrorContains(t, errors[3], "not a valid UUID") +} diff --git a/coderd/rbac/authz.go b/coderd/rbac/authz.go index fcb6621a34cee..0b48a24aebe83 100644 --- a/coderd/rbac/authz.go +++ b/coderd/rbac/authz.go @@ -76,6 +76,7 @@ const ( SubjectTypeNotifier SubjectType = "notifier" SubjectTypeSubAgentAPI SubjectType = "sub_agent_api" SubjectTypeFileReader SubjectType = "file_reader" + SubjectTypeUsagePublisher SubjectType = "usage_publisher" ) const ( diff --git a/coderd/rbac/object_gen.go b/coderd/rbac/object_gen.go index 5fb3cc2bd8a3b..de05dced2693d 100644 --- a/coderd/rbac/object_gen.go +++ b/coderd/rbac/object_gen.go @@ -289,6 +289,15 @@ var ( Type: "template", } + // ResourceUsageEvent + // Valid Actions + // - "ActionCreate" :: create a usage event + // - "ActionRead" :: read usage events + // - "ActionUpdate" :: update usage events + ResourceUsageEvent = Object{ + Type: "usage_event", + } + // ResourceUser // Valid Actions // - "ActionCreate" :: create a new user @@ -301,6 +310,16 @@ var ( Type: "user", } + // ResourceUserSecret + // Valid Actions + // - "ActionCreate" :: create a user secret + // - "ActionDelete" :: delete a user secret + // - "ActionRead" :: read user secret metadata and value + // - "ActionUpdate" :: update user secret metadata and value + ResourceUserSecret = Object{ + Type: "user_secret", + } + // ResourceWebpushSubscription // Valid Actions // - "ActionCreate" :: create webpush subscriptions @@ -402,7 +421,9 @@ func AllResources() []Objecter { ResourceSystem, ResourceTailnetCoordinator, ResourceTemplate, + ResourceUsageEvent, ResourceUser, + ResourceUserSecret, ResourceWebpushSubscription, ResourceWorkspace, ResourceWorkspaceAgentDevcontainers, diff --git a/coderd/rbac/policy/policy.go b/coderd/rbac/policy/policy.go index 8f05bbdbe544f..25fb87bfc2d94 100644 --- a/coderd/rbac/policy/policy.go +++ b/coderd/rbac/policy/policy.go @@ -343,4 +343,19 @@ var RBACPermissions = map[string]PermissionDefinition{ ActionCreate: "create workspace agent devcontainers", }, }, + "user_secret": { + Actions: map[Action]ActionDefinition{ + ActionCreate: "create a user secret", + ActionRead: "read user secret metadata and value", + ActionUpdate: "update user secret metadata and value", + ActionDelete: "delete a user secret", + }, + }, + "usage_event": { + Actions: map[Action]ActionDefinition{ + ActionCreate: "create a usage event", + ActionRead: "read usage events", + ActionUpdate: "update usage events", + }, + }, } diff --git a/coderd/rbac/regosql/acl_group_var.go b/coderd/rbac/regosql/acl_group_var.go deleted file mode 100644 index 328dfbcd48d0a..0000000000000 --- a/coderd/rbac/regosql/acl_group_var.go +++ /dev/null @@ -1,104 +0,0 @@ -package regosql - -import ( - "fmt" - - "golang.org/x/xerrors" - - "github.com/open-policy-agent/opa/ast" - - "github.com/coder/coder/v2/coderd/rbac/regosql/sqltypes" -) - -var ( - _ sqltypes.VariableMatcher = ACLGroupVar{} - _ sqltypes.Node = ACLGroupVar{} -) - -// ACLGroupVar is a variable matcher that handles group_acl and user_acl. -// The sql type is a jsonb object with the following structure: -// -// "group_acl": { -// "": [""] -// } -// -// This is a custom variable matcher as json objects have arbitrary complexity. -type ACLGroupVar struct { - StructSQL string - // input.object.group_acl -> ["input", "object", "group_acl"] - StructPath []string - - // FieldReference handles referencing the subfields, which could be - // more variables. We pass one in as the global one might not be correctly - // scoped. - FieldReference sqltypes.VariableMatcher - - // Instance fields - Source sqltypes.RegoSource - GroupNode sqltypes.Node -} - -func ACLGroupMatcher(fieldReference sqltypes.VariableMatcher, structSQL string, structPath []string) ACLGroupVar { - return ACLGroupVar{StructSQL: structSQL, StructPath: structPath, FieldReference: fieldReference} -} - -func (ACLGroupVar) UseAs() sqltypes.Node { return ACLGroupVar{} } - -func (g ACLGroupVar) ConvertVariable(rego ast.Ref) (sqltypes.Node, bool) { - // "left" will be a map of group names to actions in rego. - // { - // "all_users": ["read"] - // } - left, err := sqltypes.RegoVarPath(g.StructPath, rego) - if err != nil { - return nil, false - } - - aclGrp := ACLGroupVar{ - StructSQL: g.StructSQL, - StructPath: g.StructPath, - FieldReference: g.FieldReference, - - Source: sqltypes.RegoSource(rego.String()), - } - - // We expect 1 more term. Either a ref or a string. - if len(left) != 1 { - return nil, false - } - - // If the remaining is a variable, then we need to convert it. - // Assuming we support variable fields. - ref, ok := left[0].Value.(ast.Ref) - if ok && g.FieldReference != nil { - groupNode, ok := g.FieldReference.ConvertVariable(ref) - if ok { - aclGrp.GroupNode = groupNode - return aclGrp, true - } - } - - // If it is a string, we assume it is a literal - groupName, ok := left[0].Value.(ast.String) - if ok { - aclGrp.GroupNode = sqltypes.String(string(groupName)) - return aclGrp, true - } - - // If we have not matched it yet, then it is something we do not recognize. - return nil, false -} - -func (g ACLGroupVar) SQLString(cfg *sqltypes.SQLGenerator) string { - return fmt.Sprintf("%s->%s", g.StructSQL, g.GroupNode.SQLString(cfg)) -} - -func (g ACLGroupVar) ContainsSQL(cfg *sqltypes.SQLGenerator, other sqltypes.Node) (string, error) { - switch other.UseAs().(type) { - // Only supports containing other strings. - case sqltypes.AstString: - return fmt.Sprintf("%s ? %s", g.SQLString(cfg), other.SQLString(cfg)), nil - default: - return "", xerrors.Errorf("unsupported acl group contains %T", other) - } -} diff --git a/coderd/rbac/regosql/acl_mapping_var.go b/coderd/rbac/regosql/acl_mapping_var.go new file mode 100644 index 0000000000000..301da929adfbd --- /dev/null +++ b/coderd/rbac/regosql/acl_mapping_var.go @@ -0,0 +1,131 @@ +package regosql + +import ( + "fmt" + + "golang.org/x/xerrors" + + "github.com/open-policy-agent/opa/ast" + + "github.com/coder/coder/v2/coderd/rbac/regosql/sqltypes" +) + +var ( + _ sqltypes.VariableMatcher = ACLMappingVar{} + _ sqltypes.Node = ACLMappingVar{} +) + +// ACLMappingVar is a variable matcher that matches ACL map variables to their +// SQL storage. Usually the actual backing implementation is a pair of `jsonb` +// columns named `group_acl` and `user_acl`. Each column contains an object that +// looks like... +// +// ```json +// +// { +// "": ["", ""] +// } +// +// ``` +type ACLMappingVar struct { + // SelectSQL is used to `SELECT` the ACL mapping from the table for the + // given resource. ie. if the full query might look like `SELECT group_acl + // FROM things;` then you would want this to be `"group_acl"`. + SelectSQL string + // IndexMatcher handles variable references when indexing into the mapping. + // (ie. `input.object.acl_group_list[input.object.org_owner]`). We need one + // from the local context because the global one might not be correctly + // scoped. + IndexMatcher sqltypes.VariableMatcher + // Used if the action list isn't directly in the ACL entry. For example, in + // the `workspaces.group_acl` and `workspaces.user_acl` columns they're stored + // under a `"permissions"` key. + Subfield string + + // StructPath represents the path of the value in rego + // ie. input.object.group_acl -> ["input", "object", "group_acl"] + StructPath []string + + // Instance fields + Source sqltypes.RegoSource + GroupNode sqltypes.Node +} + +func ACLMappingMatcher(indexMatcher sqltypes.VariableMatcher, selectSQL string, structPath []string) ACLMappingVar { + return ACLMappingVar{IndexMatcher: indexMatcher, SelectSQL: selectSQL, StructPath: structPath} +} + +func (g ACLMappingVar) UsingSubfield(subfield string) ACLMappingVar { + g.Subfield = subfield + return g +} + +func (ACLMappingVar) UseAs() sqltypes.Node { return ACLMappingVar{} } + +func (g ACLMappingVar) ConvertVariable(rego ast.Ref) (sqltypes.Node, bool) { + // left is the rego variable that maps the actor's id to the actions they + // are allowed to take. + // { + // "": ["", ""] + // } + left, err := sqltypes.RegoVarPath(g.StructPath, rego) + if err != nil { + return nil, false + } + + aclGrp := ACLMappingVar{ + SelectSQL: g.SelectSQL, + IndexMatcher: g.IndexMatcher, + Subfield: g.Subfield, + + StructPath: g.StructPath, + + Source: sqltypes.RegoSource(rego.String()), + } + + // We expect 1 more term. Either a ref or a string. + if len(left) != 1 { + return nil, false + } + + // If the remaining is a variable, then we need to convert it. + // Assuming we support variable fields. + ref, ok := left[0].Value.(ast.Ref) + if ok && g.IndexMatcher != nil { + groupNode, ok := g.IndexMatcher.ConvertVariable(ref) + if ok { + aclGrp.GroupNode = groupNode + return aclGrp, true + } + } + + // If it is a string, we assume it is a literal + groupName, ok := left[0].Value.(ast.String) + if ok { + aclGrp.GroupNode = sqltypes.String(string(groupName)) + return aclGrp, true + } + + // If we have not matched it yet, then it is something we do not recognize. + return nil, false +} + +func (g ACLMappingVar) SQLString(cfg *sqltypes.SQLGenerator) string { + if g.Subfield != "" { + // We can't use subsequent -> operators because the first one might return + // NULL, which would result in an error like "column does not exist"' from + // the second. + return fmt.Sprintf("%s#>array[%s, '%s']", g.SelectSQL, g.GroupNode.SQLString(cfg), g.Subfield) + } + return fmt.Sprintf("%s->%s", g.SelectSQL, g.GroupNode.SQLString(cfg)) +} + +func (g ACLMappingVar) ContainsSQL(cfg *sqltypes.SQLGenerator, other sqltypes.Node) (string, error) { + switch other.UseAs().(type) { + // Only supports containing other strings. + case sqltypes.AstString: + return fmt.Sprintf("%s ? %s", g.SQLString(cfg), other.SQLString(cfg)), nil + default: + return "", xerrors.Errorf("unsupported acl group contains %T", other) + } +} diff --git a/coderd/rbac/regosql/compile_test.go b/coderd/rbac/regosql/compile_test.go index 07e8e7245a53e..7bea7f76fd485 100644 --- a/coderd/rbac/regosql/compile_test.go +++ b/coderd/rbac/regosql/compile_test.go @@ -193,10 +193,30 @@ func TestRegoQueries(t *testing.T) { `"read" in input.object.acl_user_list["d5389ccc-57a4-4b13-8c3f-31747bcdc9f1"]`, `"*" in input.object.acl_user_list["d5389ccc-57a4-4b13-8c3f-31747bcdc9f1"]`, }, - ExpectedSQL: "((user_acl->'d5389ccc-57a4-4b13-8c3f-31747bcdc9f1' ? 'read') OR " + - "(user_acl->'d5389ccc-57a4-4b13-8c3f-31747bcdc9f1' ? '*'))", + ExpectedSQL: "((user_acl->'d5389ccc-57a4-4b13-8c3f-31747bcdc9f1' ? 'read')" + + " OR (user_acl->'d5389ccc-57a4-4b13-8c3f-31747bcdc9f1' ? '*'))", VariableConverter: regosql.DefaultVariableConverter(), }, + { + Name: "UserWorkspaceACLAllow", + Queries: []string{ + `"read" in input.object.acl_user_list["d5389ccc-57a4-4b13-8c3f-31747bcdc9f1"]`, + `"*" in input.object.acl_user_list["d5389ccc-57a4-4b13-8c3f-31747bcdc9f1"]`, + }, + ExpectedSQL: "((workspaces.user_acl#>array['d5389ccc-57a4-4b13-8c3f-31747bcdc9f1', 'permissions'] ? 'read')" + + " OR (workspaces.user_acl#>array['d5389ccc-57a4-4b13-8c3f-31747bcdc9f1', 'permissions'] ? '*'))", + VariableConverter: regosql.WorkspaceConverter(), + }, + { + Name: "GroupWorkspaceACLAllow", + Queries: []string{ + `"read" in input.object.acl_group_list["96c55a0e-73b4-44fc-abac-70d53c35c04c"]`, + `"*" in input.object.acl_group_list["96c55a0e-73b4-44fc-abac-70d53c35c04c"]`, + }, + ExpectedSQL: "((workspaces.group_acl#>array['96c55a0e-73b4-44fc-abac-70d53c35c04c', 'permissions'] ? 'read')" + + " OR (workspaces.group_acl#>array['96c55a0e-73b4-44fc-abac-70d53c35c04c', 'permissions'] ? '*'))", + VariableConverter: regosql.WorkspaceConverter(), + }, { Name: "NoACLConfig", Queries: []string{ diff --git a/coderd/rbac/regosql/configs.go b/coderd/rbac/regosql/configs.go index 69d425d9dba2f..1c1e126ff692e 100644 --- a/coderd/rbac/regosql/configs.go +++ b/coderd/rbac/regosql/configs.go @@ -14,12 +14,12 @@ func userOwnerMatcher() sqltypes.VariableMatcher { return sqltypes.StringVarMatcher("owner_id :: text", []string{"input", "object", "owner"}) } -func groupACLMatcher(m sqltypes.VariableMatcher) sqltypes.VariableMatcher { - return ACLGroupMatcher(m, "group_acl", []string{"input", "object", "acl_group_list"}) +func groupACLMatcher(m sqltypes.VariableMatcher) ACLMappingVar { + return ACLMappingMatcher(m, "group_acl", []string{"input", "object", "acl_group_list"}) } -func userACLMatcher(m sqltypes.VariableMatcher) sqltypes.VariableMatcher { - return ACLGroupMatcher(m, "user_acl", []string{"input", "object", "acl_user_list"}) +func userACLMatcher(m sqltypes.VariableMatcher) ACLMappingVar { + return ACLMappingMatcher(m, "user_acl", []string{"input", "object", "acl_user_list"}) } func TemplateConverter() *sqltypes.VariableConverter { @@ -36,6 +36,20 @@ func TemplateConverter() *sqltypes.VariableConverter { return matcher } +func WorkspaceConverter() *sqltypes.VariableConverter { + matcher := sqltypes.NewVariableConverter().RegisterMatcher( + resourceIDMatcher(), + sqltypes.StringVarMatcher("workspaces.organization_id :: text", []string{"input", "object", "org_owner"}), + userOwnerMatcher(), + ) + matcher.RegisterMatcher( + ACLMappingMatcher(matcher, "workspaces.group_acl", []string{"input", "object", "acl_group_list"}).UsingSubfield("permissions"), + ACLMappingMatcher(matcher, "workspaces.user_acl", []string{"input", "object", "acl_user_list"}).UsingSubfield("permissions"), + ) + + return matcher +} + func AuditLogConverter() *sqltypes.VariableConverter { matcher := sqltypes.NewVariableConverter().RegisterMatcher( resourceIDMatcher(), @@ -81,20 +95,6 @@ func UserConverter() *sqltypes.VariableConverter { return matcher } -func WorkspaceConverter() *sqltypes.VariableConverter { - matcher := sqltypes.NewVariableConverter().RegisterMatcher( - resourceIDMatcher(), - sqltypes.StringVarMatcher("workspaces.organization_id :: text", []string{"input", "object", "org_owner"}), - userOwnerMatcher(), - ) - matcher.RegisterMatcher( - sqltypes.AlwaysFalse(groupACLMatcher(matcher)), - sqltypes.AlwaysFalse(userACLMatcher(matcher)), - ) - - return matcher -} - // NoACLConverter should be used when the target SQL table does not contain // group or user ACL columns. func NoACLConverter() *sqltypes.VariableConverter { diff --git a/coderd/rbac/roles.go b/coderd/rbac/roles.go index b8d3f959ce477..c6770f31b0320 100644 --- a/coderd/rbac/roles.go +++ b/coderd/rbac/roles.go @@ -269,8 +269,9 @@ func ReloadBuiltinRoles(opts *RoleOptions) { DisplayName: "Owner", Site: append( // Workspace dormancy and workspace are omitted. - // Workspace is specifically handled based on the opts.NoOwnerWorkspaceExec - allPermsExcept(ResourceWorkspaceDormant, ResourcePrebuiltWorkspace, ResourceWorkspace), + // Workspace is specifically handled based on the opts.NoOwnerWorkspaceExec. + // Owners cannot access other users' secrets. + allPermsExcept(ResourceWorkspaceDormant, ResourcePrebuiltWorkspace, ResourceWorkspace, ResourceUserSecret, ResourceUsageEvent), // This adds back in the Workspace permissions. Permissions(map[string][]policy.Action{ ResourceWorkspace.Type: ownerWorkspaceActions, @@ -417,7 +418,7 @@ func ReloadBuiltinRoles(opts *RoleOptions) { }), Org: map[string][]Permission{ // Org admins should not have workspace exec perms. - organizationID.String(): append(allPermsExcept(ResourceWorkspace, ResourceWorkspaceDormant, ResourcePrebuiltWorkspace, ResourceAssignRole), Permissions(map[string][]policy.Action{ + organizationID.String(): append(allPermsExcept(ResourceWorkspace, ResourceWorkspaceDormant, ResourcePrebuiltWorkspace, ResourceAssignRole, ResourceUserSecret), Permissions(map[string][]policy.Action{ ResourceWorkspaceDormant.Type: {policy.ActionRead, policy.ActionDelete, policy.ActionCreate, policy.ActionUpdate, policy.ActionWorkspaceStop, policy.ActionCreateAgent, policy.ActionDeleteAgent}, ResourceWorkspace.Type: slice.Omit(ResourceWorkspace.AvailableActions(), policy.ActionApplicationConnect, policy.ActionSSH), // PrebuiltWorkspaces are a subset of Workspaces. diff --git a/coderd/rbac/roles_test.go b/coderd/rbac/roles_test.go index 267a99993e642..57a5022392b51 100644 --- a/coderd/rbac/roles_test.go +++ b/coderd/rbac/roles_test.go @@ -858,6 +858,36 @@ func TestRolePermissions(t *testing.T) { false: {setOtherOrg, setOrgNotMe, memberMe, orgMemberMe, templateAdmin, userAdmin}, }, }, + // Only the user themselves can access their own secrets — no one else. + { + Name: "UserSecrets", + Actions: []policy.Action{policy.ActionCreate, policy.ActionRead, policy.ActionUpdate, policy.ActionDelete}, + Resource: rbac.ResourceUserSecret.WithOwner(currentUser.String()), + AuthorizeMap: map[bool][]hasAuthSubjects{ + true: {memberMe, orgMemberMe}, + false: { + owner, orgAdmin, + otherOrgAdmin, otherOrgMember, orgAuditor, orgUserAdmin, orgTemplateAdmin, + templateAdmin, userAdmin, otherOrgAuditor, otherOrgUserAdmin, otherOrgTemplateAdmin, + }, + }, + }, + { + Name: "UsageEvents", + Actions: []policy.Action{policy.ActionCreate, policy.ActionRead, policy.ActionUpdate}, + Resource: rbac.ResourceUsageEvent, + AuthorizeMap: map[bool][]hasAuthSubjects{ + true: {}, + false: { + owner, + memberMe, orgMemberMe, otherOrgMember, + orgAdmin, otherOrgAdmin, + orgAuditor, otherOrgAuditor, + templateAdmin, orgTemplateAdmin, otherOrgTemplateAdmin, + userAdmin, orgUserAdmin, otherOrgUserAdmin, + }, + }, + }, } // We expect every permission to be tested above. diff --git a/coderd/schedule/autostop.go b/coderd/schedule/autostop.go index f6a01633f3179..25bd043c60975 100644 --- a/coderd/schedule/autostop.go +++ b/coderd/schedule/autostop.go @@ -50,8 +50,19 @@ type CalculateAutostopParams struct { // by autobuild.NextAutostart WorkspaceAutostart string - Now time.Time - Workspace database.WorkspaceTable + // WorkspaceBuildCompletedAt is the time when the workspace build was + // completed. + // + // We always want to calculate using the build completion time, and not just + // the current time, to avoid forcing a workspace build's max_deadline being + // pushed to the next potential cron instance. + // + // E.g. if this function is called for an existing workspace build, which + // currently has a max_deadline within the next 2 hours (see leeway + // above), and the current time is passed into this function, the + // max_deadline will be updated to be much later than expected. + WorkspaceBuildCompletedAt time.Time + Workspace database.WorkspaceTable } type AutostopTime struct { @@ -68,8 +79,8 @@ type AutostopTime struct { // Deadline is the time when the workspace will be stopped, as long as it // doesn't see any new activity (such as SSH, app requests, etc.). When activity // is detected the deadline is bumped by the workspace's TTL (this only happens -// when activity is detected and more than 20% of the TTL has passed to save -// database queries). +// when activity is detected and more than 5% of the TTL has passed to save +// database queries, see the ActivityBumpWorkspace query). // // MaxDeadline is the maximum value for deadline. The deadline cannot be bumped // past this value, so it denotes the absolute deadline that the workspace build @@ -77,55 +88,45 @@ type AutostopTime struct { // requirement" settings and the user's "quiet hours" settings to pick a time // outside of working hours. // -// Deadline is a cost saving measure, while max deadline is a -// compliance/updating measure. +// Note that the deadline is checked at the database level: +// +// (deadline IS NOT zero AND deadline <= max_deadline) UNLESS max_deadline is zero. +// +// Deadline is intended as a cost saving measure, not as a hard policy. It is +// derived from either the workspace's TTL or the template's TTL, depending on +// the template's policy, to ensure workspaces are stopped when they are idle. +// +// MaxDeadline is intended as a compliance policy. It is derived from the +// template's autostop requirement to cap workspace uptime and effectively force +// people to update often. +// +// Note that only the build's CURRENT deadline property influences automation in +// the autobuild package. As stated above, the MaxDeadline property is only used +// to cap the value of a build's deadline. func CalculateAutostop(ctx context.Context, params CalculateAutostopParams) (AutostopTime, error) { ctx, span := tracing.StartSpan(ctx, trace.WithAttributes(attribute.String("coder.workspace_id", params.Workspace.ID.String())), trace.WithAttributes(attribute.String("coder.template_id", params.Workspace.TemplateID.String())), ) defer span.End() - defer span.End() var ( - db = params.Database - workspace = params.Workspace - now = params.Now + db = params.Database + workspace = params.Workspace + buildCompletedAt = params.WorkspaceBuildCompletedAt autostop AutostopTime ) - var ttl time.Duration - if workspace.Ttl.Valid { - // When the workspace is made it copies the template's TTL, and the user - // can unset it to disable it (unless the template has - // UserAutoStopEnabled set to false, see below). - ttl = time.Duration(workspace.Ttl.Int64) - } - - if workspace.Ttl.Valid { - // When the workspace is made it copies the template's TTL, and the user - // can unset it to disable it (unless the template has - // UserAutoStopEnabled set to false, see below). - autostop.Deadline = now.Add(time.Duration(workspace.Ttl.Int64)) - } - templateSchedule, err := params.TemplateScheduleStore.Get(ctx, db, workspace.TemplateID) if err != nil { return autostop, xerrors.Errorf("get template schedule options: %w", err) } - if !templateSchedule.UserAutostopEnabled { - // The user is not permitted to set their own TTL, so use the template - // default. - ttl = 0 - if templateSchedule.DefaultTTL > 0 { - ttl = templateSchedule.DefaultTTL - } - } + ttl := workspaceTTL(workspace, templateSchedule) if ttl > 0 { // Only apply non-zero TTLs. - autostop.Deadline = now.Add(ttl) + autostop.Deadline = buildCompletedAt.Add(ttl) if params.WorkspaceAutostart != "" { // If the deadline passes the next autostart, we need to extend the deadline to // autostart + deadline. ActivityBumpWorkspace already covers this case @@ -137,14 +138,14 @@ func CalculateAutostop(ctx context.Context, params CalculateAutostopParams) (Aut // 3. User starts workspace at 9:45pm. // - The initial deadline is calculated to be 9:45am // - This crosses the autostart deadline, so the deadline is extended to 9pm - nextAutostart, ok := NextAutostart(params.Now, params.WorkspaceAutostart, templateSchedule) + nextAutostart, ok := NextAutostart(params.WorkspaceBuildCompletedAt, params.WorkspaceAutostart, templateSchedule) if ok && autostop.Deadline.After(nextAutostart) { autostop.Deadline = nextAutostart.Add(ttl) } } } - // Otherwise, use the autostop_requirement algorithm. + // Enforce the template autostop requirement if it's configured correctly. if templateSchedule.AutostopRequirement.DaysOfWeek != 0 { // The template has a autostop requirement, so determine the max deadline // of this workspace build. @@ -161,10 +162,10 @@ func CalculateAutostop(ctx context.Context, params CalculateAutostopParams) (Aut // workspace. if userQuietHoursSchedule.Schedule != nil { loc := userQuietHoursSchedule.Schedule.Location() - now := now.In(loc) + buildCompletedAtInLoc := buildCompletedAt.In(loc) // Add the leeway here so we avoid checking today's quiet hours if // the workspace was started <1h before midnight. - startOfStopDay := truncateMidnight(now.Add(autostopRequirementLeeway)) + startOfStopDay := truncateMidnight(buildCompletedAtInLoc.Add(autostopRequirementLeeway)) // If the template schedule wants to only autostop on n-th weeks // then change the startOfDay to be the Monday of the next @@ -183,7 +184,7 @@ func CalculateAutostop(ctx context.Context, params CalculateAutostopParams) (Aut // hour of the scheduled stop time will always bounce to the next // stop window). checkSchedule := userQuietHoursSchedule.Schedule.Next(startOfStopDay.Add(autostopRequirementBuffer)) - if checkSchedule.Before(now.Add(autostopRequirementLeeway)) { + if checkSchedule.Before(buildCompletedAtInLoc.Add(autostopRequirementLeeway)) { // Set the first stop day we try to tomorrow because today's // schedule is too close to now or has already passed. startOfStopDay = nextDayMidnight(startOfStopDay) @@ -213,14 +214,17 @@ func CalculateAutostop(ctx context.Context, params CalculateAutostopParams) (Aut startOfStopDay = nextDayMidnight(startOfStopDay) } - // If the startOfDay is within an hour of now, then we add an hour. + // If the startOfDay is within an hour of the build completion time, + // then we add an hour. checkTime := startOfStopDay - if checkTime.Before(now.Add(time.Hour)) { - checkTime = now.Add(time.Hour) + if checkTime.Before(buildCompletedAtInLoc.Add(time.Hour)) { + checkTime = buildCompletedAtInLoc.Add(time.Hour) } else { - // If it's not within an hour of now, subtract 15 minutes to - // give a little leeway. This prevents skipped stop events - // because autostart perfectly lines up with autostop. + // If it's not within an hour of the build completion time, + // subtract 15 minutes to give a little leeway. This prevents + // skipped stop events because the build time (e.g. autostart + // time) perfectly lines up with the max_deadline minus the + // leeway. checkTime = checkTime.Add(autostopRequirementBuffer) } @@ -238,15 +242,35 @@ func CalculateAutostop(ctx context.Context, params CalculateAutostopParams) (Aut autostop.Deadline = autostop.MaxDeadline } - if (!autostop.Deadline.IsZero() && autostop.Deadline.Before(now)) || (!autostop.MaxDeadline.IsZero() && autostop.MaxDeadline.Before(now)) { + if (!autostop.Deadline.IsZero() && autostop.Deadline.Before(buildCompletedAt)) || (!autostop.MaxDeadline.IsZero() && autostop.MaxDeadline.Before(buildCompletedAt)) { // Something went wrong with the deadline calculation, so we should // bail. - return autostop, xerrors.Errorf("deadline calculation error, computed deadline or max deadline is in the past for workspace build: deadline=%q maxDeadline=%q now=%q", autostop.Deadline, autostop.MaxDeadline, now) + return autostop, xerrors.Errorf("deadline calculation error, computed deadline or max deadline is in the past for workspace build: deadline=%q maxDeadline=%q now=%q", autostop.Deadline, autostop.MaxDeadline, buildCompletedAt) } return autostop, nil } +// workspaceTTL returns the TTL to use for a workspace. +// +// If the template forbids custom workspace TTLs, then we always use the +// template's configured TTL (or 0 if the template has no TTL configured). +func workspaceTTL(workspace database.WorkspaceTable, templateSchedule TemplateScheduleOptions) time.Duration { + // If the template forbids custom workspace TTLs, then we always use the + // template's configured TTL (or 0 if the template has no TTL configured). + if !templateSchedule.UserAutostopEnabled { + // This is intentionally a nested if statement because of the else if. + if templateSchedule.DefaultTTL > 0 { + return templateSchedule.DefaultTTL + } + return 0 + } + if workspace.Ttl.Valid { + return time.Duration(workspace.Ttl.Int64) + } + return 0 +} + // truncateMidnight truncates a time to midnight in the time object's timezone. // t.Truncate(24 * time.Hour) truncates based on the internal time and doesn't // factor daylight savings properly. diff --git a/coderd/schedule/autostop_test.go b/coderd/schedule/autostop_test.go index 85cc7b533a6ea..812f549f34dd2 100644 --- a/coderd/schedule/autostop_test.go +++ b/coderd/schedule/autostop_test.go @@ -76,8 +76,8 @@ func TestCalculateAutoStop(t *testing.T) { t.Log("saturdayMidnightAfterDstOut", saturdayMidnightAfterDstOut) cases := []struct { - name string - now time.Time + name string + buildCompletedAt time.Time wsAutostart string templateAutoStart schedule.TemplateAutostartRequirement @@ -98,7 +98,7 @@ func TestCalculateAutoStop(t *testing.T) { }{ { name: "OK", - now: now, + buildCompletedAt: now, templateAllowAutostop: true, templateDefaultTTL: 0, templateAutostopRequirement: schedule.TemplateAutostopRequirement{}, @@ -108,7 +108,7 @@ func TestCalculateAutoStop(t *testing.T) { }, { name: "Delete", - now: now, + buildCompletedAt: now, templateAllowAutostop: true, templateDefaultTTL: 0, templateAutostopRequirement: schedule.TemplateAutostopRequirement{}, @@ -118,7 +118,7 @@ func TestCalculateAutoStop(t *testing.T) { }, { name: "WorkspaceTTL", - now: now, + buildCompletedAt: now, templateAllowAutostop: true, templateDefaultTTL: 0, templateAutostopRequirement: schedule.TemplateAutostopRequirement{}, @@ -128,7 +128,7 @@ func TestCalculateAutoStop(t *testing.T) { }, { name: "TemplateDefaultTTLIgnored", - now: now, + buildCompletedAt: now, templateAllowAutostop: true, templateDefaultTTL: time.Hour, templateAutostopRequirement: schedule.TemplateAutostopRequirement{}, @@ -138,7 +138,7 @@ func TestCalculateAutoStop(t *testing.T) { }, { name: "WorkspaceTTLOverridesTemplateDefaultTTL", - now: now, + buildCompletedAt: now, templateAllowAutostop: true, templateDefaultTTL: 2 * time.Hour, templateAutostopRequirement: schedule.TemplateAutostopRequirement{}, @@ -148,7 +148,7 @@ func TestCalculateAutoStop(t *testing.T) { }, { name: "TemplateBlockWorkspaceTTL", - now: now, + buildCompletedAt: now, templateAllowAutostop: false, templateDefaultTTL: 3 * time.Hour, templateAutostopRequirement: schedule.TemplateAutostopRequirement{}, @@ -158,7 +158,7 @@ func TestCalculateAutoStop(t *testing.T) { }, { name: "TemplateAutostopRequirement", - now: wednesdayMidnightUTC, + buildCompletedAt: wednesdayMidnightUTC, templateAllowAutostop: true, templateDefaultTTL: 0, userQuietHoursSchedule: sydneyQuietHours, @@ -172,7 +172,7 @@ func TestCalculateAutoStop(t *testing.T) { }, { name: "TemplateAutostopRequirement1HourSkip", - now: saturdayMidnightSydney.Add(-59 * time.Minute), + buildCompletedAt: saturdayMidnightSydney.Add(-59 * time.Minute), templateAllowAutostop: true, templateDefaultTTL: 0, userQuietHoursSchedule: sydneyQuietHours, @@ -188,7 +188,7 @@ func TestCalculateAutoStop(t *testing.T) { // The next autostop requirement should be skipped if the // workspace is started within 1 hour of it. name: "TemplateAutostopRequirementDaily", - now: fridayEveningSydney, + buildCompletedAt: fridayEveningSydney, templateAllowAutostop: true, templateDefaultTTL: 0, userQuietHoursSchedule: sydneyQuietHours, @@ -202,7 +202,7 @@ func TestCalculateAutoStop(t *testing.T) { }, { name: "TemplateAutostopRequirementFortnightly/Skip", - now: wednesdayMidnightUTC, + buildCompletedAt: wednesdayMidnightUTC, templateAllowAutostop: true, templateDefaultTTL: 0, userQuietHoursSchedule: sydneyQuietHours, @@ -216,7 +216,7 @@ func TestCalculateAutoStop(t *testing.T) { }, { name: "TemplateAutostopRequirementFortnightly/NoSkip", - now: wednesdayMidnightUTC.AddDate(0, 0, 7), + buildCompletedAt: wednesdayMidnightUTC.AddDate(0, 0, 7), templateAllowAutostop: true, templateDefaultTTL: 0, userQuietHoursSchedule: sydneyQuietHours, @@ -230,7 +230,7 @@ func TestCalculateAutoStop(t *testing.T) { }, { name: "TemplateAutostopRequirementTriweekly/Skip", - now: wednesdayMidnightUTC, + buildCompletedAt: wednesdayMidnightUTC, templateAllowAutostop: true, templateDefaultTTL: 0, userQuietHoursSchedule: sydneyQuietHours, @@ -246,7 +246,7 @@ func TestCalculateAutoStop(t *testing.T) { }, { name: "TemplateAutostopRequirementTriweekly/NoSkip", - now: wednesdayMidnightUTC.AddDate(0, 0, 7), + buildCompletedAt: wednesdayMidnightUTC.AddDate(0, 0, 7), templateAllowAutostop: true, templateDefaultTTL: 0, userQuietHoursSchedule: sydneyQuietHours, @@ -262,7 +262,7 @@ func TestCalculateAutoStop(t *testing.T) { name: "TemplateAutostopRequirementOverridesWorkspaceTTL", // now doesn't have to be UTC, but it helps us ensure that // timezones are compared correctly in this test. - now: fridayEveningSydney.In(time.UTC), + buildCompletedAt: fridayEveningSydney.In(time.UTC), templateAllowAutostop: true, templateDefaultTTL: 0, userQuietHoursSchedule: sydneyQuietHours, @@ -276,7 +276,7 @@ func TestCalculateAutoStop(t *testing.T) { }, { name: "TemplateAutostopRequirementOverridesTemplateDefaultTTL", - now: fridayEveningSydney.In(time.UTC), + buildCompletedAt: fridayEveningSydney.In(time.UTC), templateAllowAutostop: true, templateDefaultTTL: 3 * time.Hour, userQuietHoursSchedule: sydneyQuietHours, @@ -293,7 +293,7 @@ func TestCalculateAutoStop(t *testing.T) { // The epoch is 2023-01-02 in each timezone. We set the time to // 1 second before 11pm the previous day, as this is the latest time // we allow due to our 2h leeway logic. - now: time.Date(2023, 1, 1, 21, 59, 59, 0, sydneyLoc), + buildCompletedAt: time.Date(2023, 1, 1, 21, 59, 59, 0, sydneyLoc), templateAllowAutostop: true, templateDefaultTTL: 0, userQuietHoursSchedule: sydneyQuietHours, @@ -306,7 +306,7 @@ func TestCalculateAutoStop(t *testing.T) { }, { name: "DaylightSavings/OK", - now: duringDst, + buildCompletedAt: duringDst, templateAllowAutostop: true, templateDefaultTTL: 0, userQuietHoursSchedule: sydneyQuietHours, @@ -320,7 +320,7 @@ func TestCalculateAutoStop(t *testing.T) { }, { name: "DaylightSavings/SwitchMidWeek/In", - now: beforeDstIn, + buildCompletedAt: beforeDstIn, templateAllowAutostop: true, templateDefaultTTL: 0, userQuietHoursSchedule: sydneyQuietHours, @@ -334,7 +334,7 @@ func TestCalculateAutoStop(t *testing.T) { }, { name: "DaylightSavings/SwitchMidWeek/Out", - now: beforeDstOut, + buildCompletedAt: beforeDstOut, templateAllowAutostop: true, templateDefaultTTL: 0, userQuietHoursSchedule: sydneyQuietHours, @@ -348,7 +348,7 @@ func TestCalculateAutoStop(t *testing.T) { }, { name: "DaylightSavings/QuietHoursFallsOnDstSwitch/In", - now: beforeDstIn.Add(-24 * time.Hour), + buildCompletedAt: beforeDstIn.Add(-24 * time.Hour), templateAllowAutostop: true, templateDefaultTTL: 0, userQuietHoursSchedule: dstInQuietHours, @@ -362,7 +362,7 @@ func TestCalculateAutoStop(t *testing.T) { }, { name: "DaylightSavings/QuietHoursFallsOnDstSwitch/Out", - now: beforeDstOut.Add(-24 * time.Hour), + buildCompletedAt: beforeDstOut.Add(-24 * time.Hour), templateAllowAutostop: true, templateDefaultTTL: 0, userQuietHoursSchedule: dstOutQuietHours, @@ -382,7 +382,7 @@ func TestCalculateAutoStop(t *testing.T) { // activity on the workspace. name: "AutostopCrossAutostartBorder", // Starting at 9:45pm, with the autostart at 9am. - now: pastDateNight, + buildCompletedAt: pastDateNight, templateAllowAutostop: false, templateDefaultTTL: time.Hour * 12, workspaceTTL: time.Hour * 12, @@ -405,7 +405,7 @@ func TestCalculateAutoStop(t *testing.T) { // Same as AutostopCrossAutostartBorder, but just misses the autostart. name: "AutostopCrossMissAutostartBorder", // Starting at 8:45pm, with the autostart at 9am. - now: time.Date(pastDateNight.Year(), pastDateNight.Month(), pastDateNight.Day(), 20, 30, 0, 0, chicago), + buildCompletedAt: time.Date(pastDateNight.Year(), pastDateNight.Month(), pastDateNight.Day(), 20, 30, 0, 0, chicago), templateAllowAutostop: false, templateDefaultTTL: time.Hour * 12, workspaceTTL: time.Hour * 12, @@ -429,7 +429,7 @@ func TestCalculateAutoStop(t *testing.T) { // The autostop deadline is before the autostart threshold. name: "AutostopCrossAutostartBorderMaxEarlyDeadline", // Starting at 9:45pm, with the autostart at 9am. - now: pastDateNight, + buildCompletedAt: pastDateNight, templateAllowAutostop: false, templateDefaultTTL: time.Hour * 12, workspaceTTL: time.Hour * 12, @@ -459,7 +459,7 @@ func TestCalculateAutoStop(t *testing.T) { // So the deadline is > 12 hours, but stops at the max deadline. name: "AutostopCrossAutostartBorderMaxDeadline", // Starting at 9:45pm, with the autostart at 9am. - now: pastDateNight, + buildCompletedAt: pastDateNight, templateAllowAutostop: false, templateDefaultTTL: time.Hour * 12, workspaceTTL: time.Hour * 12, @@ -571,7 +571,7 @@ func TestCalculateAutoStop(t *testing.T) { Database: db, TemplateScheduleStore: templateScheduleStore, UserQuietHoursScheduleStore: userQuietHoursScheduleStore, - Now: c.now, + WorkspaceBuildCompletedAt: c.buildCompletedAt, Workspace: workspace, WorkspaceAutostart: c.wsAutostart, }) diff --git a/coderd/searchquery/search.go b/coderd/searchquery/search.go index d35f3c94b5ff7..974872973606c 100644 --- a/coderd/searchquery/search.go +++ b/coderd/searchquery/search.go @@ -223,6 +223,7 @@ func Workspaces(ctx context.Context, db database.Store, query string, page coder Valid: values.Has("outdated"), } filter.HasAITask = parser.NullableBoolean(values, sql.NullBool{}, "has-ai-task") + filter.HasExternalAgent = parser.NullableBoolean(values, sql.NullBool{}, "has_external_agent") filter.OrganizationID = parseOrganization(ctx, db, parser, values, "organization") type paramMatch struct { @@ -263,7 +264,7 @@ func Workspaces(ctx context.Context, db database.Store, query string, page coder return filter, parser.Errors } -func Templates(ctx context.Context, db database.Store, query string) (database.GetTemplatesWithFilterParams, []codersdk.ValidationError) { +func Templates(ctx context.Context, db database.Store, actorID uuid.UUID, query string) (database.GetTemplatesWithFilterParams, []codersdk.ValidationError) { // Always lowercase for all searches. query = strings.ToLower(query) values, errors := searchTerms(query, func(term string, values url.Values) error { @@ -277,13 +278,21 @@ func Templates(ctx context.Context, db database.Store, query string) (database.G parser := httpapi.NewQueryParamParser() filter := database.GetTemplatesWithFilterParams{ - Deleted: parser.Boolean(values, false, "deleted"), - ExactName: parser.String(values, "", "exact_name"), - FuzzyName: parser.String(values, "", "name"), - IDs: parser.UUIDs(values, []uuid.UUID{}, "ids"), - Deprecated: parser.NullableBoolean(values, sql.NullBool{}, "deprecated"), - OrganizationID: parseOrganization(ctx, db, parser, values, "organization"), - HasAITask: parser.NullableBoolean(values, sql.NullBool{}, "has-ai-task"), + Deleted: parser.Boolean(values, false, "deleted"), + OrganizationID: parseOrganization(ctx, db, parser, values, "organization"), + ExactName: parser.String(values, "", "exact_name"), + FuzzyName: parser.String(values, "", "name"), + IDs: parser.UUIDs(values, []uuid.UUID{}, "ids"), + Deprecated: parser.NullableBoolean(values, sql.NullBool{}, "deprecated"), + HasAITask: parser.NullableBoolean(values, sql.NullBool{}, "has-ai-task"), + AuthorID: parser.UUID(values, uuid.Nil, "author_id"), + AuthorUsername: parser.String(values, "", "author"), + HasExternalAgent: parser.NullableBoolean(values, sql.NullBool{}, "has_external_agent"), + } + + if filter.AuthorUsername == codersdk.Me { + filter.AuthorID = actorID + filter.AuthorUsername = "" } parser.ErrorExcessParams(values) diff --git a/coderd/searchquery/search_test.go b/coderd/searchquery/search_test.go index 4744b57edff4a..2a8f4cd6cbb56 100644 --- a/coderd/searchquery/search_test.go +++ b/coderd/searchquery/search_test.go @@ -252,6 +252,36 @@ func TestSearchWorkspace(t *testing.T) { }, }, }, + { + Name: "HasExternalAgentTrue", + Query: "has_external_agent:true", + Expected: database.GetWorkspacesParams{ + HasExternalAgent: sql.NullBool{ + Bool: true, + Valid: true, + }, + }, + }, + { + Name: "HasExternalAgentFalse", + Query: "has_external_agent:false", + Expected: database.GetWorkspacesParams{ + HasExternalAgent: sql.NullBool{ + Bool: false, + Valid: true, + }, + }, + }, + { + Name: "HasExternalAgentMissing", + Query: "", + Expected: database.GetWorkspacesParams{ + HasExternalAgent: sql.NullBool{ + Bool: false, + Valid: false, + }, + }, + }, // Failures { @@ -640,6 +670,7 @@ func TestSearchUsers(t *testing.T) { func TestSearchTemplates(t *testing.T) { t.Parallel() + userID := uuid.New() testCases := []struct { Name string Query string @@ -688,6 +719,44 @@ func TestSearchTemplates(t *testing.T) { }, }, }, + { + Name: "HasExternalAgent", + Query: "has_external_agent:true", + Expected: database.GetTemplatesWithFilterParams{ + HasExternalAgent: sql.NullBool{ + Bool: true, + Valid: true, + }, + }, + }, + { + Name: "HasExternalAgentFalse", + Query: "has_external_agent:false", + Expected: database.GetTemplatesWithFilterParams{ + HasExternalAgent: sql.NullBool{ + Bool: false, + Valid: true, + }, + }, + }, + { + Name: "HasExternalAgentMissing", + Query: "", + Expected: database.GetTemplatesWithFilterParams{ + HasExternalAgent: sql.NullBool{ + Bool: false, + Valid: false, + }, + }, + }, + { + Name: "MyTemplates", + Query: "author:me", + Expected: database.GetTemplatesWithFilterParams{ + AuthorUsername: "", + AuthorID: userID, + }, + }, } for _, c := range testCases { @@ -696,7 +765,7 @@ func TestSearchTemplates(t *testing.T) { // Do not use a real database, this is only used for an // organization lookup. db, _ := dbtestutil.NewDB(t) - values, errs := searchquery.Templates(context.Background(), db, c.Query) + values, errs := searchquery.Templates(context.Background(), db, userID, c.Query) if c.ExpectedErrorContains != "" { require.True(t, len(errs) > 0, "expect some errors") var s strings.Builder diff --git a/coderd/tailnet.go b/coderd/tailnet.go index 172edea95a586..cdcf657fe732d 100644 --- a/coderd/tailnet.go +++ b/coderd/tailnet.go @@ -277,9 +277,9 @@ func (s *ServerTailnet) dialContext(ctx context.Context, network, addr string) ( }, nil } -func (s *ServerTailnet) AgentConn(ctx context.Context, agentID uuid.UUID) (*workspacesdk.AgentConn, func(), error) { +func (s *ServerTailnet) AgentConn(ctx context.Context, agentID uuid.UUID) (workspacesdk.AgentConn, func(), error) { var ( - conn *workspacesdk.AgentConn + conn workspacesdk.AgentConn ret func() ) diff --git a/coderd/taskname/taskname.go b/coderd/taskname/taskname.go new file mode 100644 index 0000000000000..dff57dfd0c7f5 --- /dev/null +++ b/coderd/taskname/taskname.go @@ -0,0 +1,173 @@ +package taskname + +import ( + "context" + "fmt" + "io" + "math/rand/v2" + "os" + "strings" + + "github.com/anthropics/anthropic-sdk-go" + anthropicoption "github.com/anthropics/anthropic-sdk-go/option" + "github.com/moby/moby/pkg/namesgenerator" + "golang.org/x/xerrors" + + "github.com/coder/aisdk-go" + "github.com/coder/coder/v2/codersdk" +) + +const ( + defaultModel = anthropic.ModelClaude3_5HaikuLatest + systemPrompt = `Generate a short workspace name from this AI task prompt. + +Requirements: +- Only lowercase letters, numbers, and hyphens +- Start with "task-" +- Maximum 28 characters total +- Descriptive of the main task + +Examples: +- "Help me debug a Python script" → "task-python-debug" +- "Create a React dashboard component" → "task-react-dashboard" +- "Analyze sales data from Q3" → "task-analyze-q3-sales" +- "Set up CI/CD pipeline" → "task-setup-cicd" + +If you cannot create a suitable name: +- Respond with "task-unnamed"` +) + +var ( + ErrNoAPIKey = xerrors.New("no api key provided") + ErrNoNameGenerated = xerrors.New("no task name generated") +) + +type options struct { + apiKey string + model anthropic.Model +} + +type Option func(o *options) + +func WithAPIKey(apiKey string) Option { + return func(o *options) { + o.apiKey = apiKey + } +} + +func WithModel(model anthropic.Model) Option { + return func(o *options) { + o.model = model + } +} + +func GetAnthropicAPIKeyFromEnv() string { + return os.Getenv("ANTHROPIC_API_KEY") +} + +func GetAnthropicModelFromEnv() anthropic.Model { + return anthropic.Model(os.Getenv("ANTHROPIC_MODEL")) +} + +// generateSuffix generates a random hex string between `0000` and `ffff`. +func generateSuffix() string { + numMin := 0x00000 + numMax := 0x10000 + //nolint:gosec // We don't need a cryptographically secure random number generator for generating a task name suffix. + num := rand.IntN(numMax-numMin) + numMin + + return fmt.Sprintf("%04x", num) +} + +func GenerateFallback() string { + // We have a 32 character limit for the name. + // We have a 5 character prefix `task-`. + // We have a 5 character suffix `-ffff`. + // This leaves us with 22 characters for the middle. + // + // Unfortunately, `namesgenerator.GetRandomName(0)` will + // generate names that are longer than 22 characters, so + // we just trim these down to length. + name := strings.ReplaceAll(namesgenerator.GetRandomName(0), "_", "-") + name = name[:min(len(name), 22)] + name = strings.TrimSuffix(name, "-") + + return fmt.Sprintf("task-%s-%s", name, generateSuffix()) +} + +func Generate(ctx context.Context, prompt string, opts ...Option) (string, error) { + o := options{} + for _, opt := range opts { + opt(&o) + } + + if o.model == "" { + o.model = defaultModel + } + if o.apiKey == "" { + return "", ErrNoAPIKey + } + + conversation := []aisdk.Message{ + { + Role: "system", + Parts: []aisdk.Part{{ + Type: aisdk.PartTypeText, + Text: systemPrompt, + }}, + }, + { + Role: "user", + Parts: []aisdk.Part{{ + Type: aisdk.PartTypeText, + Text: prompt, + }}, + }, + } + + anthropicOptions := anthropic.DefaultClientOptions() + anthropicOptions = append(anthropicOptions, anthropicoption.WithAPIKey(o.apiKey)) + anthropicClient := anthropic.NewClient(anthropicOptions...) + + stream, err := anthropicDataStream(ctx, anthropicClient, o.model, conversation) + if err != nil { + return "", xerrors.Errorf("create anthropic data stream: %w", err) + } + + var acc aisdk.DataStreamAccumulator + stream = stream.WithAccumulator(&acc) + + if err := stream.Pipe(io.Discard); err != nil { + return "", xerrors.Errorf("pipe data stream") + } + + if len(acc.Messages()) == 0 { + return "", ErrNoNameGenerated + } + + generatedName := acc.Messages()[0].Content + + if err := codersdk.NameValid(generatedName); err != nil { + return "", xerrors.Errorf("generated name %v not valid: %w", generatedName, err) + } + + if generatedName == "task-unnamed" { + return "", ErrNoNameGenerated + } + + return fmt.Sprintf("%s-%s", generatedName, generateSuffix()), nil +} + +func anthropicDataStream(ctx context.Context, client anthropic.Client, model anthropic.Model, input []aisdk.Message) (aisdk.DataStream, error) { + messages, system, err := aisdk.MessagesToAnthropic(input) + if err != nil { + return nil, xerrors.Errorf("convert messages to anthropic format: %w", err) + } + + return aisdk.AnthropicToDataStream(client.Messages.NewStreaming(ctx, anthropic.MessageNewParams{ + Model: model, + MaxTokens: 24, + System: system, + Messages: messages, + })), nil +} diff --git a/coderd/taskname/taskname_test.go b/coderd/taskname/taskname_test.go new file mode 100644 index 0000000000000..3eb26ef1d4ac7 --- /dev/null +++ b/coderd/taskname/taskname_test.go @@ -0,0 +1,56 @@ +package taskname_test + +import ( + "os" + "testing" + + "github.com/stretchr/testify/require" + + "github.com/coder/coder/v2/coderd/taskname" + "github.com/coder/coder/v2/codersdk" + "github.com/coder/coder/v2/testutil" +) + +const ( + anthropicEnvVar = "ANTHROPIC_API_KEY" +) + +func TestGenerateFallback(t *testing.T) { + t.Parallel() + + name := taskname.GenerateFallback() + err := codersdk.NameValid(name) + require.NoErrorf(t, err, "expected fallback to be valid workspace name, instead found %s", name) +} + +func TestGenerateTaskName(t *testing.T) { + t.Parallel() + + t.Run("Fallback", func(t *testing.T) { + t.Parallel() + + ctx := testutil.Context(t, testutil.WaitShort) + + name, err := taskname.Generate(ctx, "Some random prompt") + require.ErrorIs(t, err, taskname.ErrNoAPIKey) + require.Equal(t, "", name) + }) + + t.Run("Anthropic", func(t *testing.T) { + t.Parallel() + + apiKey := os.Getenv(anthropicEnvVar) + if apiKey == "" { + t.Skipf("Skipping test as %s not set", anthropicEnvVar) + } + + ctx := testutil.Context(t, testutil.WaitShort) + + name, err := taskname.Generate(ctx, "Create a finance planning app", taskname.WithAPIKey(apiKey)) + require.NoError(t, err) + require.NotEqual(t, "", name) + + err = codersdk.NameValid(name) + require.NoError(t, err, "name should be valid") + }) +} diff --git a/coderd/telemetry/telemetry.go b/coderd/telemetry/telemetry.go index 747cf2cb47de1..8f203126c99ba 100644 --- a/coderd/telemetry/telemetry.go +++ b/coderd/telemetry/telemetry.go @@ -768,7 +768,7 @@ func ConvertWorkspace(workspace database.Workspace) Workspace { // ConvertWorkspaceBuild anonymizes a workspace build. func ConvertWorkspaceBuild(build database.WorkspaceBuild) WorkspaceBuild { - return WorkspaceBuild{ + wb := WorkspaceBuild{ ID: build.ID, CreatedAt: build.CreatedAt, WorkspaceID: build.WorkspaceID, @@ -777,6 +777,10 @@ func ConvertWorkspaceBuild(build database.WorkspaceBuild) WorkspaceBuild { // #nosec G115 - Safe conversion as build numbers are expected to be positive and within uint32 range BuildNumber: uint32(build.BuildNumber), } + if build.HasAITask.Valid { + wb.HasAITask = ptr.Ref(build.HasAITask.Bool) + } + return wb } // ConvertProvisionerJob anonymizes a provisioner job. @@ -1105,6 +1109,9 @@ func ConvertTemplateVersion(version database.TemplateVersion) TemplateVersion { if version.SourceExampleID.Valid { snapVersion.SourceExampleID = &version.SourceExampleID.String } + if version.HasAITask.Valid { + snapVersion.HasAITask = ptr.Ref(version.HasAITask.Bool) + } return snapVersion } @@ -1357,6 +1364,7 @@ type WorkspaceBuild struct { TemplateVersionID uuid.UUID `json:"template_version_id"` JobID uuid.UUID `json:"job_id"` BuildNumber uint32 `json:"build_number"` + HasAITask *bool `json:"has_ai_task"` } type Workspace struct { @@ -1404,6 +1412,7 @@ type TemplateVersion struct { OrganizationID uuid.UUID `json:"organization_id"` JobID uuid.UUID `json:"job_id"` SourceExampleID *string `json:"source_example_id,omitempty"` + HasAITask *bool `json:"has_ai_task"` } type ProvisionerJob struct { diff --git a/coderd/telemetry/telemetry_test.go b/coderd/telemetry/telemetry_test.go index ac836317b680e..5508a7d8816f5 100644 --- a/coderd/telemetry/telemetry_test.go +++ b/coderd/telemetry/telemetry_test.go @@ -7,6 +7,7 @@ import ( "net/http" "net/http/httptest" "net/url" + "slices" "sort" "testing" "time" @@ -105,6 +106,52 @@ func TestTelemetry(t *testing.T) { OpenIn: database.WorkspaceAppOpenInSlimWindow, AgentID: wsagent.ID, }) + + taskJob := dbgen.ProvisionerJob(t, db, nil, database.ProvisionerJob{ + Provisioner: database.ProvisionerTypeTerraform, + StorageMethod: database.ProvisionerStorageMethodFile, + Type: database.ProvisionerJobTypeTemplateVersionDryRun, + OrganizationID: org.ID, + }) + taskTpl := dbgen.Template(t, db, database.Template{ + Provisioner: database.ProvisionerTypeTerraform, + OrganizationID: org.ID, + CreatedBy: user.ID, + }) + taskTV := dbgen.TemplateVersion(t, db, database.TemplateVersion{ + OrganizationID: org.ID, + TemplateID: uuid.NullUUID{UUID: taskTpl.ID, Valid: true}, + CreatedBy: user.ID, + JobID: taskJob.ID, + HasAITask: sql.NullBool{Bool: true, Valid: true}, + }) + taskWs := dbgen.Workspace(t, db, database.WorkspaceTable{ + OwnerID: user.ID, + OrganizationID: org.ID, + TemplateID: taskTpl.ID, + }) + taskWsResource := dbgen.WorkspaceResource(t, db, database.WorkspaceResource{ + JobID: taskJob.ID, + }) + taskWsAgent := dbgen.WorkspaceAgent(t, db, database.WorkspaceAgent{ + ResourceID: taskWsResource.ID, + }) + taskWsApp := dbgen.WorkspaceApp(t, db, database.WorkspaceApp{ + SharingLevel: database.AppSharingLevelOwner, + Health: database.WorkspaceAppHealthDisabled, + OpenIn: database.WorkspaceAppOpenInSlimWindow, + AgentID: taskWsAgent.ID, + }) + taskWB := dbgen.WorkspaceBuild(t, db, database.WorkspaceBuild{ + Transition: database.WorkspaceTransitionStart, + Reason: database.BuildReasonAutostart, + WorkspaceID: taskWs.ID, + TemplateVersionID: tv.ID, + JobID: taskJob.ID, + HasAITask: sql.NullBool{Valid: true, Bool: true}, + AITaskSidebarAppID: uuid.NullUUID{Valid: true, UUID: taskWsApp.ID}, + }) + group := dbgen.Group(t, db, database.Group{ OrganizationID: org.ID, }) @@ -148,19 +195,19 @@ func TestTelemetry(t *testing.T) { }) _, snapshot := collectSnapshot(ctx, t, db, nil) - require.Len(t, snapshot.ProvisionerJobs, 1) + require.Len(t, snapshot.ProvisionerJobs, 2) require.Len(t, snapshot.Licenses, 1) - require.Len(t, snapshot.Templates, 1) - require.Len(t, snapshot.TemplateVersions, 2) + require.Len(t, snapshot.Templates, 2) + require.Len(t, snapshot.TemplateVersions, 3) require.Len(t, snapshot.Users, 1) require.Len(t, snapshot.Groups, 2) // 1 member in the everyone group + 1 member in the custom group require.Len(t, snapshot.GroupMembers, 2) - require.Len(t, snapshot.Workspaces, 1) - require.Len(t, snapshot.WorkspaceApps, 1) - require.Len(t, snapshot.WorkspaceAgents, 1) - require.Len(t, snapshot.WorkspaceBuilds, 1) - require.Len(t, snapshot.WorkspaceResources, 1) + require.Len(t, snapshot.Workspaces, 2) + require.Len(t, snapshot.WorkspaceApps, 2) + require.Len(t, snapshot.WorkspaceAgents, 2) + require.Len(t, snapshot.WorkspaceBuilds, 2) + require.Len(t, snapshot.WorkspaceResources, 2) require.Len(t, snapshot.WorkspaceAgentStats, 1) require.Len(t, snapshot.WorkspaceProxies, 1) require.Len(t, snapshot.WorkspaceModules, 1) @@ -169,11 +216,24 @@ func TestTelemetry(t *testing.T) { require.Len(t, snapshot.TelemetryItems, 2) require.Len(t, snapshot.WorkspaceAgentMemoryResourceMonitors, 1) require.Len(t, snapshot.WorkspaceAgentVolumeResourceMonitors, 1) - wsa := snapshot.WorkspaceAgents[0] + wsa := snapshot.WorkspaceAgents[1] require.Len(t, wsa.Subsystems, 2) require.Equal(t, string(database.WorkspaceAgentSubsystemEnvbox), wsa.Subsystems[0]) require.Equal(t, string(database.WorkspaceAgentSubsystemExectrace), wsa.Subsystems[1]) + require.True(t, slices.ContainsFunc(snapshot.TemplateVersions, func(ttv telemetry.TemplateVersion) bool { + if ttv.ID != taskTV.ID { + return false + } + return assert.NotNil(t, ttv.HasAITask) && assert.True(t, *ttv.HasAITask) + })) + require.True(t, slices.ContainsFunc(snapshot.WorkspaceBuilds, func(twb telemetry.WorkspaceBuild) bool { + if twb.ID != taskWB.ID { + return false + } + return assert.NotNil(t, twb.HasAITask) && assert.True(t, *twb.HasAITask) + })) + tvs := snapshot.TemplateVersions sort.Slice(tvs, func(i, j int) bool { // Sort by SourceExampleID presence (non-nil comes before nil) @@ -403,7 +463,6 @@ func TestTelemetryItem(t *testing.T) { func TestPrebuiltWorkspacesTelemetry(t *testing.T) { t.Parallel() - ctx := testutil.Context(t, testutil.WaitMedium) db, _ := dbtestutil.NewDB(t) cases := []struct { @@ -435,6 +494,7 @@ func TestPrebuiltWorkspacesTelemetry(t *testing.T) { for _, tc := range cases { t.Run(tc.name, func(t *testing.T) { t.Parallel() + ctx := testutil.Context(t, testutil.WaitShort) deployment, snapshot := collectSnapshot(ctx, t, db, func(opts telemetry.Options) telemetry.Options { opts.Database = tc.storeFn(db) diff --git a/coderd/templates.go b/coderd/templates.go index bba38bb033614..9202fc48234a6 100644 --- a/coderd/templates.go +++ b/coderd/templates.go @@ -7,6 +7,7 @@ import ( "fmt" "net/http" "sort" + "strings" "time" "github.com/go-chi/chi/v5" @@ -29,6 +30,7 @@ import ( "github.com/coder/coder/v2/coderd/searchquery" "github.com/coder/coder/v2/coderd/telemetry" "github.com/coder/coder/v2/coderd/util/ptr" + "github.com/coder/coder/v2/coderd/util/slice" "github.com/coder/coder/v2/coderd/workspacestats" "github.com/coder/coder/v2/codersdk" "github.com/coder/coder/v2/examples" @@ -36,8 +38,8 @@ import ( // Returns a single template. // -// @Summary Get template metadata by ID -// @ID get-template-metadata-by-id +// @Summary Get template settings by ID +// @ID get-template-settings-by-id // @Security CoderSessionToken // @Produce json // @Tags Templates @@ -197,8 +199,8 @@ func (api *API) postTemplateByOrganization(rw http.ResponseWriter, r *http.Reque return } - // Default is true until dynamic parameters are promoted to stable. - useClassicParameterFlow := ptr.NilToDefault(createTemplate.UseClassicParameterFlow, true) + // Default is false as dynamic parameters are now the preferred approach. + useClassicParameterFlow := ptr.NilToDefault(createTemplate.UseClassicParameterFlow, false) // Make a temporary struct to represent the template. This is used for // auditing if any of the following checks fail. It will be overwritten when @@ -322,6 +324,7 @@ func (api *API) postTemplateByOrganization(rw http.ResponseWriter, r *http.Reque autostopRequirementDaysOfWeekParsed uint8 autostartRequirementDaysOfWeekParsed uint8 maxPortShareLevel = database.AppSharingLevelOwner // default + corsBehavior = database.CorsBehaviorSimple // default ) if defaultTTL < 0 { validErrs = append(validErrs, codersdk.ValidationError{Field: "default_ttl_ms", Detail: "Must be a positive integer."}) @@ -351,6 +354,20 @@ func (api *API) postTemplateByOrganization(rw http.ResponseWriter, r *http.Reque } } + // Default the CORS behavior here to Simple so we don't break all existing templates. + val := database.CorsBehaviorSimple + if createTemplate.CORSBehavior != nil { + val = database.CorsBehavior(*createTemplate.CORSBehavior) + } + if !val.Valid() { + validErrs = append(validErrs, codersdk.ValidationError{ + Field: "cors_behavior", + Detail: fmt.Sprintf("Invalid CORS behavior %q. Must be one of [%s]", *createTemplate.CORSBehavior, strings.Join(slice.ToStrings(database.AllCorsBehaviorValues()), ", ")), + }) + } else { + corsBehavior = val + } + if autostopRequirementWeeks < 0 { validErrs = append(validErrs, codersdk.ValidationError{Field: "autostop_requirement.weeks", Detail: "Must be a positive integer."}) } @@ -409,6 +426,7 @@ func (api *API) postTemplateByOrganization(rw http.ResponseWriter, r *http.Reque AllowUserCancelWorkspaceJobs: allowUserCancelWorkspaceJobs, MaxPortSharingLevel: maxPortShareLevel, UseClassicParameterFlow: useClassicParameterFlow, + CorsBehavior: corsBehavior, }) if err != nil { return xerrors.Errorf("insert template: %s", err) @@ -526,9 +544,10 @@ func (api *API) templatesByOrganization() http.HandlerFunc { func (api *API) fetchTemplates(mutate func(r *http.Request, arg *database.GetTemplatesWithFilterParams)) http.HandlerFunc { return func(rw http.ResponseWriter, r *http.Request) { ctx := r.Context() + key := httpmw.APIKey(r) queryStr := r.URL.Query().Get("q") - filter, errs := searchquery.Templates(ctx, api.Database, queryStr) + filter, errs := searchquery.Templates(ctx, api.Database, key.UserID, queryStr) if len(errs) > 0 { httpapi.Write(ctx, rw, http.StatusBadRequest, codersdk.Response{ Message: "Invalid template search query.", @@ -610,12 +629,14 @@ func (api *API) templateByOrganizationAndName(rw http.ResponseWriter, r *http.Re httpapi.Write(ctx, rw, http.StatusOK, api.convertTemplate(template)) } -// @Summary Update template metadata by ID -// @ID update-template-metadata-by-id +// @Summary Update template settings by ID +// @ID update-template-settings-by-id // @Security CoderSessionToken +// @Accept json // @Produce json // @Tags Templates // @Param template path string true "Template ID" format(uuid) +// @Param request body codersdk.UpdateTemplateMeta true "Patch template settings request" // @Success 200 {object} codersdk.Template // @Router /templates/{template} [patch] func (api *API) patchTemplateMeta(rw http.ResponseWriter, r *http.Request) { @@ -725,6 +746,19 @@ func (api *API) patchTemplateMeta(rw http.ResponseWriter, r *http.Request) { } } + corsBehavior := template.CorsBehavior + if req.CORSBehavior != nil && *req.CORSBehavior != "" { + val := database.CorsBehavior(*req.CORSBehavior) + if !val.Valid() { + validErrs = append(validErrs, codersdk.ValidationError{ + Field: "cors_behavior", + Detail: fmt.Sprintf("Invalid CORS behavior %q. Must be one of [%s]", *req.CORSBehavior, strings.Join(slice.ToStrings(database.AllCorsBehaviorValues()), ", ")), + }) + } else { + corsBehavior = val + } + } + if len(validErrs) > 0 { httpapi.Write(ctx, rw, http.StatusBadRequest, codersdk.Response{ Message: "Invalid request to update template metadata!", @@ -739,12 +773,16 @@ func (api *API) patchTemplateMeta(rw http.ResponseWriter, r *http.Request) { classicTemplateFlow = *req.UseClassicParameterFlow } + displayName := ptr.NilToDefault(req.DisplayName, template.DisplayName) + description := ptr.NilToDefault(req.Description, template.Description) + icon := ptr.NilToDefault(req.Icon, template.Icon) + var updated database.Template err = api.Database.InTx(func(tx database.Store) error { if req.Name == template.Name && - req.Description == template.Description && - req.DisplayName == template.DisplayName && - req.Icon == template.Icon && + description == template.Description && + displayName == template.DisplayName && + icon == template.Icon && req.AllowUserAutostart == template.AllowUserAutostart && req.AllowUserAutostop == template.AllowUserAutostop && req.AllowUserCancelWorkspaceJobs == template.AllowUserCancelWorkspaceJobs && @@ -759,7 +797,8 @@ func (api *API) patchTemplateMeta(rw http.ResponseWriter, r *http.Request) { req.RequireActiveVersion == template.RequireActiveVersion && (deprecationMessage == template.Deprecated) && (classicTemplateFlow == template.UseClassicParameterFlow) && - maxPortShareLevel == template.MaxPortSharingLevel { + maxPortShareLevel == template.MaxPortSharingLevel && + corsBehavior == template.CorsBehavior { return nil } @@ -794,13 +833,14 @@ func (api *API) patchTemplateMeta(rw http.ResponseWriter, r *http.Request) { ID: template.ID, UpdatedAt: dbtime.Now(), Name: name, - DisplayName: req.DisplayName, - Description: req.Description, - Icon: req.Icon, + DisplayName: displayName, + Description: description, + Icon: icon, AllowUserCancelWorkspaceJobs: req.AllowUserCancelWorkspaceJobs, GroupACL: groupACL, MaxPortSharingLevel: maxPortShareLevel, UseClassicParameterFlow: classicTemplateFlow, + CorsBehavior: corsBehavior, }) if err != nil { return xerrors.Errorf("update template metadata: %w", err) @@ -1084,6 +1124,7 @@ func (api *API) convertTemplate( DeprecationMessage: templateAccessControl.Deprecated, MaxPortShareLevel: maxPortShareLevel, UseClassicParameterFlow: template.UseClassicParameterFlow, + CORSBehavior: codersdk.CORSBehavior(template.CorsBehavior), } } diff --git a/coderd/templates_test.go b/coderd/templates_test.go index 5e7fcea75609d..c470dd17c664a 100644 --- a/coderd/templates_test.go +++ b/coderd/templates_test.go @@ -77,7 +77,7 @@ func TestPostTemplateByOrganization(t *testing.T) { assert.Equal(t, expected.Name, got.Name) assert.Equal(t, expected.Description, got.Description) assert.Equal(t, expected.ActivityBumpMillis, got.ActivityBumpMillis) - assert.Equal(t, expected.UseClassicParameterFlow, true) // Current default is true + assert.Equal(t, expected.UseClassicParameterFlow, false) // Current default is false require.Len(t, auditor.AuditLogs(), 3) assert.Equal(t, database.AuditActionCreate, auditor.AuditLogs()[0].Action) @@ -814,6 +814,46 @@ func TestTemplatesByOrganization(t *testing.T) { require.False(t, templates[0].Deprecated) require.Empty(t, templates[0].DeprecationMessage) }) + + t.Run("ListByAuthor", func(t *testing.T) { + t.Parallel() + client := coderdtest.New(t, nil) + owner := coderdtest.CreateFirstUser(t, client) + adminAlpha, adminAlphaData := coderdtest.CreateAnotherUser(t, client, owner.OrganizationID, rbac.RoleTemplateAdmin()) + adminBravo, _ := coderdtest.CreateAnotherUser(t, client, owner.OrganizationID, rbac.RoleTemplateAdmin()) + adminCharlie, _ := coderdtest.CreateAnotherUser(t, client, owner.OrganizationID, rbac.RoleTemplateAdmin()) + + versionA := coderdtest.CreateTemplateVersion(t, client, owner.OrganizationID, nil) + versionB := coderdtest.CreateTemplateVersion(t, client, owner.OrganizationID, nil) + versionC := coderdtest.CreateTemplateVersion(t, client, owner.OrganizationID, nil) + foo := coderdtest.CreateTemplate(t, adminAlpha, owner.OrganizationID, versionA.ID, func(request *codersdk.CreateTemplateRequest) { + request.Name = "foo" + }) + bar := coderdtest.CreateTemplate(t, adminBravo, owner.OrganizationID, versionB.ID, func(request *codersdk.CreateTemplateRequest) { + request.Name = "bar" + }) + _ = coderdtest.CreateTemplate(t, adminCharlie, owner.OrganizationID, versionC.ID, func(request *codersdk.CreateTemplateRequest) { + request.Name = "baz" + }) + + ctx := testutil.Context(t, testutil.WaitLong) + + // List alpha + alpha, err := client.Templates(ctx, codersdk.TemplateFilter{ + AuthorUsername: adminAlphaData.Username, + }) + require.NoError(t, err) + require.Len(t, alpha, 1) + require.Equal(t, foo.ID, alpha[0].ID) + + // List bravo + bravo, err := adminBravo.Templates(ctx, codersdk.TemplateFilter{ + AuthorUsername: codersdk.Me, + }) + require.NoError(t, err) + require.Len(t, bravo, 1) + require.Equal(t, bar.ID, bravo[0].ID) + }) } func TestTemplateByOrganizationAndName(t *testing.T) { @@ -861,9 +901,9 @@ func TestPatchTemplateMeta(t *testing.T) { req := codersdk.UpdateTemplateMeta{ Name: "new-template-name", - DisplayName: "Displayed Name 456", - Description: "lorem ipsum dolor sit amet et cetera", - Icon: "/icon/new-icon.png", + DisplayName: ptr.Ref("Displayed Name 456"), + Description: ptr.Ref("lorem ipsum dolor sit amet et cetera"), + Icon: ptr.Ref("/icon/new-icon.png"), DefaultTTLMillis: 12 * time.Hour.Milliseconds(), ActivityBumpMillis: 3 * time.Hour.Milliseconds(), AllowUserCancelWorkspaceJobs: false, @@ -878,9 +918,9 @@ func TestPatchTemplateMeta(t *testing.T) { require.NoError(t, err) assert.Greater(t, updated.UpdatedAt, template.UpdatedAt) assert.Equal(t, req.Name, updated.Name) - assert.Equal(t, req.DisplayName, updated.DisplayName) - assert.Equal(t, req.Description, updated.Description) - assert.Equal(t, req.Icon, updated.Icon) + assert.Equal(t, *req.DisplayName, updated.DisplayName) + assert.Equal(t, *req.Description, updated.Description) + assert.Equal(t, *req.Icon, updated.Icon) assert.Equal(t, req.DefaultTTLMillis, updated.DefaultTTLMillis) assert.Equal(t, req.ActivityBumpMillis, updated.ActivityBumpMillis) assert.False(t, req.AllowUserCancelWorkspaceJobs) @@ -890,9 +930,9 @@ func TestPatchTemplateMeta(t *testing.T) { require.NoError(t, err) assert.Greater(t, updated.UpdatedAt, template.UpdatedAt) assert.Equal(t, req.Name, updated.Name) - assert.Equal(t, req.DisplayName, updated.DisplayName) - assert.Equal(t, req.Description, updated.Description) - assert.Equal(t, req.Icon, updated.Icon) + assert.Equal(t, *req.DisplayName, updated.DisplayName) + assert.Equal(t, *req.Description, updated.Description) + assert.Equal(t, *req.Icon, updated.Icon) assert.Equal(t, req.DefaultTTLMillis, updated.DefaultTTLMillis) assert.Equal(t, req.ActivityBumpMillis, updated.ActivityBumpMillis) assert.False(t, req.AllowUserCancelWorkspaceJobs) @@ -1127,9 +1167,9 @@ func TestPatchTemplateMeta(t *testing.T) { got, err := client.UpdateTemplateMeta(ctx, template.ID, codersdk.UpdateTemplateMeta{ Name: template.Name, - DisplayName: template.DisplayName, - Description: template.Description, - Icon: template.Icon, + DisplayName: &template.DisplayName, + Description: &template.Description, + Icon: &template.Icon, DefaultTTLMillis: 0, AutostopRequirement: &template.AutostopRequirement, AllowUserCancelWorkspaceJobs: template.AllowUserCancelWorkspaceJobs, @@ -1162,9 +1202,9 @@ func TestPatchTemplateMeta(t *testing.T) { got, err := client.UpdateTemplateMeta(ctx, template.ID, codersdk.UpdateTemplateMeta{ Name: template.Name, - DisplayName: template.DisplayName, - Description: template.Description, - Icon: template.Icon, + DisplayName: &template.DisplayName, + Description: &template.Description, + Icon: &template.Icon, DefaultTTLMillis: template.DefaultTTLMillis, AutostopRequirement: &template.AutostopRequirement, AllowUserCancelWorkspaceJobs: template.AllowUserCancelWorkspaceJobs, @@ -1223,9 +1263,9 @@ func TestPatchTemplateMeta(t *testing.T) { allowAutostop.Store(false) got, err := client.UpdateTemplateMeta(ctx, template.ID, codersdk.UpdateTemplateMeta{ Name: template.Name, - DisplayName: template.DisplayName, - Description: template.Description, - Icon: template.Icon, + DisplayName: &template.DisplayName, + Description: &template.Description, + Icon: &template.Icon, DefaultTTLMillis: template.DefaultTTLMillis, AutostopRequirement: &template.AutostopRequirement, AllowUserCancelWorkspaceJobs: template.AllowUserCancelWorkspaceJobs, @@ -1254,9 +1294,9 @@ func TestPatchTemplateMeta(t *testing.T) { got, err := client.UpdateTemplateMeta(ctx, template.ID, codersdk.UpdateTemplateMeta{ Name: template.Name, - DisplayName: template.DisplayName, - Description: template.Description, - Icon: template.Icon, + DisplayName: &template.DisplayName, + Description: &template.Description, + Icon: &template.Icon, // Increase the default TTL to avoid error "not modified". DefaultTTLMillis: template.DefaultTTLMillis + 1, AutostopRequirement: &template.AutostopRequirement, @@ -1286,8 +1326,8 @@ func TestPatchTemplateMeta(t *testing.T) { req := codersdk.UpdateTemplateMeta{ Name: template.Name, - Description: template.Description, - Icon: template.Icon, + Description: &template.Description, + Icon: &template.Icon, DefaultTTLMillis: template.DefaultTTLMillis, ActivityBumpMillis: template.ActivityBumpMillis, AutostopRequirement: nil, @@ -1347,7 +1387,7 @@ func TestPatchTemplateMeta(t *testing.T) { ctr.Icon = "/icon/code.png" }) req := codersdk.UpdateTemplateMeta{ - Icon: "", + Icon: ptr.Ref(""), } ctx := testutil.Context(t, testutil.WaitLong) @@ -1402,9 +1442,9 @@ func TestPatchTemplateMeta(t *testing.T) { require.EqualValues(t, 1, template.AutostopRequirement.Weeks) req := codersdk.UpdateTemplateMeta{ Name: template.Name, - DisplayName: template.DisplayName, - Description: template.Description, - Icon: template.Icon, + DisplayName: &template.DisplayName, + Description: &template.Description, + Icon: &template.Icon, AllowUserCancelWorkspaceJobs: template.AllowUserCancelWorkspaceJobs, DefaultTTLMillis: time.Hour.Milliseconds(), AutostopRequirement: &codersdk.TemplateAutostopRequirement{ @@ -1479,9 +1519,9 @@ func TestPatchTemplateMeta(t *testing.T) { require.EqualValues(t, 2, template.AutostopRequirement.Weeks) req := codersdk.UpdateTemplateMeta{ Name: template.Name, - DisplayName: template.DisplayName, - Description: template.Description, - Icon: template.Icon, + DisplayName: &template.DisplayName, + Description: &template.Description, + Icon: &template.Icon, AllowUserCancelWorkspaceJobs: template.AllowUserCancelWorkspaceJobs, DefaultTTLMillis: time.Hour.Milliseconds(), AutostopRequirement: &codersdk.TemplateAutostopRequirement{ @@ -1516,9 +1556,9 @@ func TestPatchTemplateMeta(t *testing.T) { require.EqualValues(t, 1, template.AutostopRequirement.Weeks) req := codersdk.UpdateTemplateMeta{ Name: template.Name, - DisplayName: template.DisplayName, - Description: template.Description, - Icon: template.Icon, + DisplayName: &template.DisplayName, + Description: &template.Description, + Icon: &template.Icon, AllowUserCancelWorkspaceJobs: template.AllowUserCancelWorkspaceJobs, DefaultTTLMillis: time.Hour.Milliseconds(), AutostopRequirement: &codersdk.TemplateAutostopRequirement{ @@ -1551,7 +1591,7 @@ func TestPatchTemplateMeta(t *testing.T) { user := coderdtest.CreateFirstUser(t, client) version := coderdtest.CreateTemplateVersion(t, client, user.OrganizationID, nil) template := coderdtest.CreateTemplate(t, client, user.OrganizationID, version.ID) - require.True(t, template.UseClassicParameterFlow, "default is true") + require.False(t, template.UseClassicParameterFlow, "default is false") bTrue := true bFalse := false @@ -1578,6 +1618,106 @@ func TestPatchTemplateMeta(t *testing.T) { require.NoError(t, err) assert.False(t, updated.UseClassicParameterFlow, "expected false") }) + + t.Run("SupportEmptyOrDefaultFields", func(t *testing.T) { + t.Parallel() + + client := coderdtest.New(t, nil) + user := coderdtest.CreateFirstUser(t, client) + version := coderdtest.CreateTemplateVersion(t, client, user.OrganizationID, nil) + + displayName := "Test Display Name" + description := "test-description" + icon := "/icon/icon.png" + defaultTTLMillis := 10 * time.Hour.Milliseconds() + + reference := coderdtest.CreateTemplate(t, client, user.OrganizationID, version.ID, func(ctr *codersdk.CreateTemplateRequest) { + ctr.DisplayName = displayName + ctr.Description = description + ctr.Icon = icon + ctr.DefaultTTLMillis = ptr.Ref(defaultTTLMillis) + }) + require.Equal(t, displayName, reference.DisplayName) + require.Equal(t, description, reference.Description) + require.Equal(t, icon, reference.Icon) + + restoreReq := codersdk.UpdateTemplateMeta{ + DisplayName: &displayName, + Description: &description, + Icon: &icon, + DefaultTTLMillis: defaultTTLMillis, + } + + type expected struct { + displayName string + description string + icon string + defaultTTLMillis int64 + } + + type testCase struct { + name string + req codersdk.UpdateTemplateMeta + expected expected + } + + tests := []testCase{ + { + name: "Only update default_ttl_ms", + req: codersdk.UpdateTemplateMeta{DefaultTTLMillis: 99 * time.Hour.Milliseconds()}, + expected: expected{displayName: reference.DisplayName, description: reference.Description, icon: reference.Icon, defaultTTLMillis: 99 * time.Hour.Milliseconds()}, + }, + { + name: "Clear display name", + req: codersdk.UpdateTemplateMeta{DisplayName: ptr.Ref("")}, + expected: expected{displayName: "", description: reference.Description, icon: reference.Icon, defaultTTLMillis: 0}, + }, + { + name: "Clear description", + req: codersdk.UpdateTemplateMeta{Description: ptr.Ref("")}, + expected: expected{displayName: reference.DisplayName, description: "", icon: reference.Icon, defaultTTLMillis: 0}, + }, + { + name: "Clear icon", + req: codersdk.UpdateTemplateMeta{Icon: ptr.Ref("")}, + expected: expected{displayName: reference.DisplayName, description: reference.Description, icon: "", defaultTTLMillis: 0}, + }, + { + name: "Nil display name defaults to reference display name", + req: codersdk.UpdateTemplateMeta{DisplayName: nil}, + expected: expected{displayName: reference.DisplayName, description: reference.Description, icon: reference.Icon, defaultTTLMillis: 0}, + }, + { + name: "Nil description defaults to reference description", + req: codersdk.UpdateTemplateMeta{Description: nil}, + expected: expected{displayName: reference.DisplayName, description: reference.Description, icon: reference.Icon, defaultTTLMillis: 0}, + }, + { + name: "Nil icon defaults to reference icon", + req: codersdk.UpdateTemplateMeta{Icon: nil}, + expected: expected{displayName: reference.DisplayName, description: reference.Description, icon: reference.Icon, defaultTTLMillis: 0}, + }, + } + + for _, tc := range tests { + //nolint:tparallel,paralleltest + t.Run(tc.name, func(t *testing.T) { + defer func() { + ctx := testutil.Context(t, testutil.WaitLong) + // Restore reference after each test case + _, err := client.UpdateTemplateMeta(ctx, reference.ID, restoreReq) + require.NoError(t, err) + }() + ctx := testutil.Context(t, testutil.WaitLong) + updated, err := client.UpdateTemplateMeta(ctx, reference.ID, tc.req) + require.NoError(t, err) + assert.Equal(t, tc.expected.displayName, updated.DisplayName) + assert.Equal(t, tc.expected.description, updated.Description) + assert.Equal(t, tc.expected.icon, updated.Icon) + assert.Equal(t, tc.expected.defaultTTLMillis, updated.DefaultTTLMillis) + }) + } + }) } func TestDeleteTemplate(t *testing.T) { @@ -1875,3 +2015,59 @@ func TestTemplateFilterHasAITask(t *testing.T) { require.Contains(t, templates, templateWithAITask) require.Contains(t, templates, templateWithoutAITask) } + +func TestTemplateFilterHasExternalAgent(t *testing.T) { + t.Parallel() + + db, pubsub := dbtestutil.NewDB(t) + client := coderdtest.New(t, &coderdtest.Options{ + Database: db, + Pubsub: pubsub, + IncludeProvisionerDaemon: true, + }) + user := coderdtest.CreateFirstUser(t, client) + + jobWithExternalAgent := dbgen.ProvisionerJob(t, db, pubsub, database.ProvisionerJob{ + OrganizationID: user.OrganizationID, + InitiatorID: user.UserID, + Tags: database.StringMap{}, + Type: database.ProvisionerJobTypeTemplateVersionImport, + }) + jobWithoutExternalAgent := dbgen.ProvisionerJob(t, db, pubsub, database.ProvisionerJob{ + OrganizationID: user.OrganizationID, + InitiatorID: user.UserID, + Tags: database.StringMap{}, + Type: database.ProvisionerJobTypeTemplateVersionImport, + }) + versionWithExternalAgent := dbgen.TemplateVersion(t, db, database.TemplateVersion{ + OrganizationID: user.OrganizationID, + CreatedBy: user.UserID, + HasExternalAgent: sql.NullBool{Bool: true, Valid: true}, + JobID: jobWithExternalAgent.ID, + }) + versionWithoutExternalAgent := dbgen.TemplateVersion(t, db, database.TemplateVersion{ + OrganizationID: user.OrganizationID, + CreatedBy: user.UserID, + HasExternalAgent: sql.NullBool{Bool: false, Valid: true}, + JobID: jobWithoutExternalAgent.ID, + }) + templateWithExternalAgent := coderdtest.CreateTemplate(t, client, user.OrganizationID, versionWithExternalAgent.ID) + templateWithoutExternalAgent := coderdtest.CreateTemplate(t, client, user.OrganizationID, versionWithoutExternalAgent.ID) + + ctx, cancel := context.WithTimeout(context.Background(), testutil.WaitLong) + defer cancel() + + templates, err := client.Templates(ctx, codersdk.TemplateFilter{ + SearchQuery: "has_external_agent:true", + }) + require.NoError(t, err) + require.Len(t, templates, 1) + require.Equal(t, templateWithExternalAgent.ID, templates[0].ID) + + templates, err = client.Templates(ctx, codersdk.TemplateFilter{ + SearchQuery: "has_external_agent:false", + }) + require.NoError(t, err) + require.Len(t, templates, 1) + require.Equal(t, templateWithoutExternalAgent.ID, templates[0].ID) +} diff --git a/coderd/templateversions.go b/coderd/templateversions.go index e787a6b813b18..17a4d9b451e9c 100644 --- a/coderd/templateversions.go +++ b/coderd/templateversions.go @@ -552,6 +552,7 @@ func (api *API) postTemplateVersionDryRun(rw http.ResponseWriter, r *http.Reques Valid: true, RawMessage: metadataRaw, }, + LogsOverflowed: false, }) if err != nil { httpapi.Write(ctx, rw, http.StatusInternalServerError, codersdk.Response{ @@ -1471,7 +1472,7 @@ func (api *API) postTemplateVersionsByOrganization(rw http.ResponseWriter, r *ht return } - var dynamicTemplate bool + dynamicTemplate := true // Default to using dynamic templates if req.TemplateID != uuid.Nil { tpl, err := api.Database.GetTemplateByID(ctx, req.TemplateID) if httpapi.Is404Error(err) { @@ -1646,6 +1647,7 @@ func (api *API) postTemplateVersionsByOrganization(rw http.ResponseWriter, r *ht Valid: true, RawMessage: traceMetadataRaw, }, + LogsOverflowed: false, }) if err != nil { httpapi.Write(ctx, rw, http.StatusInternalServerError, codersdk.Response{ @@ -1822,6 +1824,14 @@ func (api *API) dynamicTemplateVersionTags(ctx context.Context, rw http.Response return nil, false } + // Fails early if presets are invalid to prevent downstream workspace creation errors + presetErr := dynamicparameters.CheckPresets(output, nil) + if presetErr != nil { + code, resp := presetErr.Response() + httpapi.Write(ctx, rw, code, resp) + return nil, false + } + return output.WorkspaceTags.Tags(), true } @@ -1953,6 +1963,7 @@ func convertTemplateVersion(version database.TemplateVersion, job codersdk.Provi Archived: version.Archived, Warnings: warnings, MatchedProvisioners: matchedProvisioners, + HasExternalAgent: version.HasExternalAgent.Bool, } } diff --git a/coderd/templateversions_test.go b/coderd/templateversions_test.go index 1ad06bae38aee..48f690d26d2eb 100644 --- a/coderd/templateversions_test.go +++ b/coderd/templateversions_test.go @@ -275,6 +275,7 @@ func TestPostTemplateVersionsByOrganization(t *testing.T) { files map[string]string reqTags map[string]string wantTags map[string]string + variables []codersdk.VariableValue expectError string }{ { @@ -290,6 +291,7 @@ func TestPostTemplateVersionsByOrganization(t *testing.T) { default = "1" } data "coder_parameter" "b" { + name = "b" type = string default = "2" } @@ -311,6 +313,7 @@ func TestPostTemplateVersionsByOrganization(t *testing.T) { default = "1" } data "coder_parameter" "b" { + name = "b" type = string default = "2" } @@ -335,6 +338,7 @@ func TestPostTemplateVersionsByOrganization(t *testing.T) { default = "1" } data "coder_parameter" "b" { + name = "b" type = string default = "2" } @@ -365,6 +369,7 @@ func TestPostTemplateVersionsByOrganization(t *testing.T) { default = "1" } data "coder_parameter" "b" { + name = "b" type = string default = "2" } @@ -395,6 +400,7 @@ func TestPostTemplateVersionsByOrganization(t *testing.T) { default = "1" } data "coder_parameter" "b" { + name = "b" type = string default = "2" } @@ -429,11 +435,12 @@ func TestPostTemplateVersionsByOrganization(t *testing.T) { } }`, }, - reqTags: map[string]string{"a": "b"}, - wantTags: map[string]string{"owner": "", "scope": "organization", "a": "b"}, + reqTags: map[string]string{"a": "b"}, + wantTags: map[string]string{"owner": "", "scope": "organization", "a": "b"}, + variables: []codersdk.VariableValue{{Name: "a", Value: "b"}}, }, { - name: "main.tf with disallowed workspace tag value", + name: "main.tf with resource reference", files: map[string]string{ `main.tf`: ` variable "a" { @@ -441,6 +448,7 @@ func TestPostTemplateVersionsByOrganization(t *testing.T) { default = "1" } data "coder_parameter" "b" { + name = "b" type = string default = "2" } @@ -461,38 +469,8 @@ func TestPostTemplateVersionsByOrganization(t *testing.T) { } }`, }, - expectError: `Unknown variable; There is no variable named "null_resource".`, - }, - { - name: "main.tf with disallowed function in tag value", - files: map[string]string{ - `main.tf`: ` - variable "a" { - type = string - default = "1" - } - data "coder_parameter" "b" { - type = string - default = "2" - } - data "coder_parameter" "unrelated" { - name = "unrelated" - type = "list(string)" - default = jsonencode(["a", "b"]) - } - resource "null_resource" "test" { - name = "foo" - } - data "coder_workspace_tags" "tags" { - tags = { - "foo": "bar", - "a": var.a, - "b": data.coder_parameter.b.value, - "test": pathexpand("~/file.txt"), - } - }`, - }, - expectError: `function "pathexpand" may not be used here`, + reqTags: map[string]string{"foo": "bar", "a": "1", "b": "2", "test": "foo"}, + wantTags: map[string]string{"owner": "", "scope": "organization", "foo": "bar", "a": "1", "b": "2", "test": "foo"}, }, // We will allow coder_workspace_tags to set the scope on a template version import job // BUT the user ID will be ultimately determined by the API key in the scope. @@ -618,11 +596,12 @@ func TestPostTemplateVersionsByOrganization(t *testing.T) { // Create a template version from the archive tvName := testutil.GetRandomNameHyphenated(t) tv, err := templateAdmin.CreateTemplateVersion(ctx, owner.OrganizationID, codersdk.CreateTemplateVersionRequest{ - Name: tvName, - StorageMethod: codersdk.ProvisionerStorageMethodFile, - Provisioner: codersdk.ProvisionerTypeTerraform, - FileID: fi.ID, - ProvisionerTags: tt.reqTags, + Name: tvName, + StorageMethod: codersdk.ProvisionerStorageMethodFile, + Provisioner: codersdk.ProvisionerTypeTerraform, + FileID: fi.ID, + ProvisionerTags: tt.reqTags, + UserVariableValues: tt.variables, }) if tt.expectError == "" { @@ -641,6 +620,119 @@ func TestPostTemplateVersionsByOrganization(t *testing.T) { }) } }) + + t.Run("Presets", func(t *testing.T) { + t.Parallel() + store, ps := dbtestutil.NewDB(t) + client := coderdtest.New(t, &coderdtest.Options{ + Database: store, + Pubsub: ps, + }) + owner := coderdtest.CreateFirstUser(t, client) + templateAdmin, _ := coderdtest.CreateAnotherUser(t, client, owner.OrganizationID, rbac.RoleTemplateAdmin()) + + for _, tt := range []struct { + name string + files map[string]string + expectError string + }{ + { + name: "valid preset", + files: map[string]string{ + `main.tf`: ` + terraform { + required_providers { + coder = { + source = "coder/coder" + version = "2.8.0" + } + } + } + data "coder_parameter" "valid_parameter" { + name = "valid_parameter_name" + default = "valid_option_value" + option { + name = "valid_option_name" + value = "valid_option_value" + } + } + data "coder_workspace_preset" "valid_preset" { + name = "valid_preset" + parameters = { + "valid_parameter_name" = "valid_option_value" + } + } + `, + }, + }, + { + name: "invalid preset", + files: map[string]string{ + `main.tf`: ` + terraform { + required_providers { + coder = { + source = "coder/coder" + version = "2.8.0" + } + } + } + data "coder_parameter" "valid_parameter" { + name = "valid_parameter_name" + default = "valid_option_value" + option { + name = "valid_option_name" + value = "valid_option_value" + } + } + data "coder_workspace_preset" "invalid_parameter_name" { + name = "invalid_parameter_name" + parameters = { + "invalid_parameter_name" = "irrelevant_value" + } + } + `, + }, + expectError: "Undefined Parameter", + }, + } { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + ctx := testutil.Context(t, testutil.WaitShort) + + // Create an archive from the files provided in the test case. + tarFile := testutil.CreateTar(t, tt.files) + + // Post the archive file + fi, err := templateAdmin.Upload(ctx, "application/x-tar", bytes.NewReader(tarFile)) + require.NoError(t, err) + + // Create a template version from the archive + tvName := testutil.GetRandomNameHyphenated(t) + tv, err := templateAdmin.CreateTemplateVersion(ctx, owner.OrganizationID, codersdk.CreateTemplateVersionRequest{ + Name: tvName, + StorageMethod: codersdk.ProvisionerStorageMethodFile, + Provisioner: codersdk.ProvisionerTypeTerraform, + FileID: fi.ID, + }) + + if tt.expectError == "" { + require.NoError(t, err) + // Assert the expected provisioner job is created from the template version import + pj, err := store.GetProvisionerJobByID(ctx, tv.Job.ID) + require.NoError(t, err) + require.NotNil(t, pj) + // Also assert that we get the expected information back from the API endpoint + require.Zero(t, tv.MatchedProvisioners.Count) + require.Zero(t, tv.MatchedProvisioners.Available) + require.Zero(t, tv.MatchedProvisioners.MostRecentlySeen.Time) + } else { + require.ErrorContains(t, err, tt.expectError) + require.Equal(t, tv.Job.ID, uuid.Nil) + } + }) + } + }) } func TestPatchCancelTemplateVersion(t *testing.T) { @@ -2129,3 +2221,36 @@ func TestTemplateArchiveVersions(t *testing.T) { require.NoError(t, err, "fetch all versions") require.Len(t, remaining, totalVersions-len(expArchived)-len(allFailed)+1, "remaining versions") } + +func TestTemplateVersionHasExternalAgent(t *testing.T) { + t.Parallel() + + client := coderdtest.New(t, &coderdtest.Options{IncludeProvisionerDaemon: true}) + user := coderdtest.CreateFirstUser(t, client) + + ctx := testutil.Context(t, testutil.WaitMedium) + version := coderdtest.CreateTemplateVersion(t, client, user.OrganizationID, &echo.Responses{ + Parse: echo.ParseComplete, + ProvisionPlan: []*proto.Response{ + { + Type: &proto.Response_Plan{ + Plan: &proto.PlanComplete{ + Resources: []*proto.Resource{ + { + Name: "example", + Type: "coder_external_agent", + }, + }, + HasExternalAgents: true, + }, + }, + }, + }, + ProvisionApply: echo.ApplyComplete, + }) + coderdtest.AwaitTemplateVersionJobCompleted(t, client, version.ID) + + version, err := client.TemplateVersion(ctx, version.ID) + require.NoError(t, err) + require.True(t, version.HasExternalAgent) +} diff --git a/coderd/usage/inserter.go b/coderd/usage/inserter.go new file mode 100644 index 0000000000000..7a0f42daf4724 --- /dev/null +++ b/coderd/usage/inserter.go @@ -0,0 +1,32 @@ +package usage + +import ( + "context" + + "github.com/coder/coder/v2/coderd/database" + "github.com/coder/coder/v2/coderd/usage/usagetypes" +) + +// Inserter accepts usage events generated by the product. +type Inserter interface { + // InsertDiscreteUsageEvent writes a discrete usage event to the database + // within the given transaction. + // The caller context must be authorized to create usage events in the + // database. + InsertDiscreteUsageEvent(ctx context.Context, tx database.Store, event usagetypes.DiscreteEvent) error +} + +// AGPLInserter is a no-op implementation of Inserter. +type AGPLInserter struct{} + +var _ Inserter = AGPLInserter{} + +func NewAGPLInserter() Inserter { + return AGPLInserter{} +} + +// InsertDiscreteUsageEvent is a no-op implementation of +// InsertDiscreteUsageEvent. +func (AGPLInserter) InsertDiscreteUsageEvent(_ context.Context, _ database.Store, _ usagetypes.DiscreteEvent) error { + return nil +} diff --git a/coderd/usage/usagetypes/events.go b/coderd/usage/usagetypes/events.go new file mode 100644 index 0000000000000..ef5ac79d455fa --- /dev/null +++ b/coderd/usage/usagetypes/events.go @@ -0,0 +1,152 @@ +// Package usagetypes contains the types for usage events. These are kept in +// their own package to avoid importing any real code from coderd. +// +// Imports in this package should be limited to the standard library and the +// following packages ONLY: +// - github.com/google/uuid +// - golang.org/x/xerrors +// +// This package is imported by the Tallyman codebase. +package usagetypes + +// Please read the package documentation before adding imports. +import ( + "bytes" + "encoding/json" + "fmt" + "strings" + + "golang.org/x/xerrors" +) + +// UsageEventType is an enum of all usage event types. It mirrors the database +// type `usage_event_type`. +type UsageEventType string + +// All event types. +// +// When adding a new event type, ensure you add it to the Valid method and the +// ParseEventWithType function. +const ( + UsageEventTypeDCManagedAgentsV1 UsageEventType = "dc_managed_agents_v1" +) + +func (e UsageEventType) Valid() bool { + switch e { + case UsageEventTypeDCManagedAgentsV1: + return true + default: + return false + } +} + +func (e UsageEventType) IsDiscrete() bool { + return e.Valid() && strings.HasPrefix(string(e), "dc_") +} + +func (e UsageEventType) IsHeartbeat() bool { + return e.Valid() && strings.HasPrefix(string(e), "hb_") +} + +// ParseEvent parses the raw event data into the provided event. It fails if +// there is any unknown fields or extra data at the end of the JSON. The +// returned event is validated. +func ParseEvent(data json.RawMessage, out Event) error { + dec := json.NewDecoder(bytes.NewReader(data)) + dec.DisallowUnknownFields() + + err := dec.Decode(out) + if err != nil { + return xerrors.Errorf("unmarshal %T event: %w", out, err) + } + if dec.More() { + return xerrors.Errorf("extra data after %T event", out) + } + err = out.Valid() + if err != nil { + return xerrors.Errorf("invalid %T event: %w", out, err) + } + + return nil +} + +// UnknownEventTypeError is returned by ParseEventWithType when an unknown event +// type is encountered. +type UnknownEventTypeError struct { + EventType string +} + +var _ error = UnknownEventTypeError{} + +// Error implements error. +func (e UnknownEventTypeError) Error() string { + return fmt.Sprintf("unknown usage event type: %q", e.EventType) +} + +// ParseEventWithType parses the raw event data into the specified Go type. It +// fails if there is any unknown fields or extra data after the event. The +// returned event is validated. +// +// If the event type is unknown, UnknownEventTypeError is returned. +func ParseEventWithType(eventType UsageEventType, data json.RawMessage) (Event, error) { + switch eventType { + case UsageEventTypeDCManagedAgentsV1: + var event DCManagedAgentsV1 + if err := ParseEvent(data, &event); err != nil { + return nil, err + } + return event, nil + default: + return nil, UnknownEventTypeError{EventType: string(eventType)} + } +} + +// Event is a usage event that can be collected by the usage collector. +// +// Note that the following event types should not be updated once they are +// merged into the product. Please consult Dean before making any changes. +// +// This type cannot be implemented outside of this package as it this package +// is the source of truth for the coder/tallyman repo. +type Event interface { + usageEvent() // to prevent external types from implementing this interface + EventType() UsageEventType + Valid() error + Fields() map[string]any // fields to be marshaled and sent to tallyman/Metronome +} + +// DiscreteEvent is a usage event that is collected as a discrete event. +type DiscreteEvent interface { + Event + discreteUsageEvent() // marker method, also prevents external types from implementing this interface +} + +// DCManagedAgentsV1 is a discrete usage event for the number of managed agents. +// This event is sent in the following situations: +// - Once on first startup after usage tracking is added to the product with +// the count of all existing managed agents (count=N) +// - A new managed agent is created (count=1) +type DCManagedAgentsV1 struct { + Count uint64 `json:"count"` +} + +var _ DiscreteEvent = DCManagedAgentsV1{} + +func (DCManagedAgentsV1) usageEvent() {} +func (DCManagedAgentsV1) discreteUsageEvent() {} +func (DCManagedAgentsV1) EventType() UsageEventType { + return UsageEventTypeDCManagedAgentsV1 +} + +func (e DCManagedAgentsV1) Valid() error { + if e.Count == 0 { + return xerrors.New("count must be greater than 0") + } + return nil +} + +func (e DCManagedAgentsV1) Fields() map[string]any { + return map[string]any{ + "count": e.Count, + } +} diff --git a/coderd/usage/usagetypes/events_test.go b/coderd/usage/usagetypes/events_test.go new file mode 100644 index 0000000000000..a04e5d4df025b --- /dev/null +++ b/coderd/usage/usagetypes/events_test.go @@ -0,0 +1,68 @@ +package usagetypes_test + +import ( + "testing" + + "github.com/stretchr/testify/require" + + "github.com/coder/coder/v2/coderd/usage/usagetypes" +) + +func TestParseEvent(t *testing.T) { + t.Parallel() + + t.Run("ExtraFields", func(t *testing.T) { + t.Parallel() + var event usagetypes.DCManagedAgentsV1 + err := usagetypes.ParseEvent([]byte(`{"count": 1, "extra": "field"}`), &event) + require.ErrorContains(t, err, "unmarshal *usagetypes.DCManagedAgentsV1 event") + }) + + t.Run("ExtraData", func(t *testing.T) { + t.Parallel() + var event usagetypes.DCManagedAgentsV1 + err := usagetypes.ParseEvent([]byte(`{"count": 1}{"count": 2}`), &event) + require.ErrorContains(t, err, "extra data after *usagetypes.DCManagedAgentsV1 event") + }) + + t.Run("DCManagedAgentsV1", func(t *testing.T) { + t.Parallel() + + var event usagetypes.DCManagedAgentsV1 + err := usagetypes.ParseEvent([]byte(`{"count": 1}`), &event) + require.NoError(t, err) + require.Equal(t, usagetypes.DCManagedAgentsV1{Count: 1}, event) + require.Equal(t, map[string]any{"count": uint64(1)}, event.Fields()) + + event = usagetypes.DCManagedAgentsV1{} + err = usagetypes.ParseEvent([]byte(`{"count": "invalid"}`), &event) + require.ErrorContains(t, err, "unmarshal *usagetypes.DCManagedAgentsV1 event") + + event = usagetypes.DCManagedAgentsV1{} + err = usagetypes.ParseEvent([]byte(`{}`), &event) + require.ErrorContains(t, err, "invalid *usagetypes.DCManagedAgentsV1 event: count must be greater than 0") + }) +} + +func TestParseEventWithType(t *testing.T) { + t.Parallel() + + t.Run("UnknownEvent", func(t *testing.T) { + t.Parallel() + _, err := usagetypes.ParseEventWithType(usagetypes.UsageEventType("fake"), []byte(`{}`)) + var unknownEventTypeError usagetypes.UnknownEventTypeError + require.ErrorAs(t, err, &unknownEventTypeError) + require.Equal(t, "fake", unknownEventTypeError.EventType) + }) + + t.Run("DCManagedAgentsV1", func(t *testing.T) { + t.Parallel() + + eventType := usagetypes.UsageEventTypeDCManagedAgentsV1 + event, err := usagetypes.ParseEventWithType(eventType, []byte(`{"count": 1}`)) + require.NoError(t, err) + require.Equal(t, usagetypes.DCManagedAgentsV1{Count: 1}, event) + require.Equal(t, eventType, event.EventType()) + require.Equal(t, map[string]any{"count": uint64(1)}, event.Fields()) + }) +} diff --git a/coderd/usage/usagetypes/tallyman.go b/coderd/usage/usagetypes/tallyman.go new file mode 100644 index 0000000000000..38358b7a6d518 --- /dev/null +++ b/coderd/usage/usagetypes/tallyman.go @@ -0,0 +1,70 @@ +package usagetypes + +// Please read the package documentation before adding imports. +import ( + "encoding/json" + "time" + + "golang.org/x/xerrors" +) + +const ( + TallymanCoderLicenseKeyHeader = "Coder-License-Key" + TallymanCoderDeploymentIDHeader = "Coder-Deployment-ID" +) + +// TallymanV1Response is a generic response with a message from the Tallyman +// API. It is typically returned when there is an error. +type TallymanV1Response struct { + Message string `json:"message"` +} + +// TallymanV1IngestRequest is a request to the Tallyman API to ingest usage +// events. +type TallymanV1IngestRequest struct { + Events []TallymanV1IngestEvent `json:"events"` +} + +// TallymanV1IngestEvent is an event to be ingested into the Tallyman API. +type TallymanV1IngestEvent struct { + ID string `json:"id"` + EventType UsageEventType `json:"event_type"` + EventData json.RawMessage `json:"event_data"` + CreatedAt time.Time `json:"created_at"` +} + +// Valid validates the TallymanV1IngestEvent. It does not validate the event +// body. +func (e TallymanV1IngestEvent) Valid() error { + if e.ID == "" { + return xerrors.New("id is required") + } + if !e.EventType.Valid() { + return xerrors.Errorf("event_type %q is invalid", e.EventType) + } + if e.CreatedAt.IsZero() { + return xerrors.New("created_at cannot be zero") + } + return nil +} + +// TallymanV1IngestResponse is a response from the Tallyman API to ingest usage +// events. +type TallymanV1IngestResponse struct { + AcceptedEvents []TallymanV1IngestAcceptedEvent `json:"accepted_events"` + RejectedEvents []TallymanV1IngestRejectedEvent `json:"rejected_events"` +} + +// TallymanV1IngestAcceptedEvent is an event that was accepted by the Tallyman +// API. +type TallymanV1IngestAcceptedEvent struct { + ID string `json:"id"` +} + +// TallymanV1IngestRejectedEvent is an event that was rejected by the Tallyman +// API. +type TallymanV1IngestRejectedEvent struct { + ID string `json:"id"` + Message string `json:"message"` + Permanent bool `json:"permanent"` +} diff --git a/coderd/usage/usagetypes/tallyman_test.go b/coderd/usage/usagetypes/tallyman_test.go new file mode 100644 index 0000000000000..f8f09446dff51 --- /dev/null +++ b/coderd/usage/usagetypes/tallyman_test.go @@ -0,0 +1,85 @@ +package usagetypes_test + +import ( + "encoding/json" + "testing" + "time" + + "github.com/stretchr/testify/require" + + "github.com/coder/coder/v2/coderd/usage/usagetypes" +) + +func TestTallymanV1UsageEvent(t *testing.T) { + t.Parallel() + + cases := []struct { + name string + event usagetypes.TallymanV1IngestEvent + errorMessage string + }{ + { + name: "OK", + event: usagetypes.TallymanV1IngestEvent{ + ID: "123", + EventType: usagetypes.UsageEventTypeDCManagedAgentsV1, + // EventData is not validated. + EventData: json.RawMessage{}, + CreatedAt: time.Now(), + }, + errorMessage: "", + }, + { + name: "NoID", + event: usagetypes.TallymanV1IngestEvent{ + EventType: usagetypes.UsageEventTypeDCManagedAgentsV1, + EventData: json.RawMessage{}, + CreatedAt: time.Now(), + }, + errorMessage: "id is required", + }, + { + name: "NoEventType", + event: usagetypes.TallymanV1IngestEvent{ + ID: "123", + EventType: usagetypes.UsageEventType(""), + EventData: json.RawMessage{}, + CreatedAt: time.Now(), + }, + errorMessage: `event_type "" is invalid`, + }, + { + name: "UnknownEventType", + event: usagetypes.TallymanV1IngestEvent{ + ID: "123", + EventType: usagetypes.UsageEventType("unknown"), + EventData: json.RawMessage{}, + CreatedAt: time.Now(), + }, + errorMessage: `event_type "unknown" is invalid`, + }, + { + name: "NoCreatedAt", + event: usagetypes.TallymanV1IngestEvent{ + ID: "123", + EventType: usagetypes.UsageEventTypeDCManagedAgentsV1, + EventData: json.RawMessage{}, + CreatedAt: time.Time{}, + }, + errorMessage: "created_at cannot be zero", + }, + } + + for _, tc := range cases { + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + + err := tc.event.Valid() + if tc.errorMessage == "" { + require.NoError(t, err) + } else { + require.ErrorContains(t, err, tc.errorMessage) + } + }) + } +} diff --git a/coderd/userauth_test.go b/coderd/userauth_test.go index 4c9412fda3fb7..504b102e9ee5b 100644 --- a/coderd/userauth_test.go +++ b/coderd/userauth_test.go @@ -335,7 +335,6 @@ func TestUserOAuth2Github(t *testing.T) { ctx := testutil.Context(t, testutil.WaitLong) - // nolint:gocritic // Unit test count, err := db.GetUserCount(dbauthz.AsSystemRestricted(ctx), false) require.NoError(t, err) require.Equal(t, int64(1), count) @@ -897,7 +896,6 @@ func TestUserOAuth2Github(t *testing.T) { require.Empty(t, links) // Make sure a user_link cannot be created with a deleted user. - // nolint:gocritic // Unit test _, err = db.InsertUserLink(dbauthz.AsSystemRestricted(ctx), database.InsertUserLinkParams{ UserID: deleted.ID, LoginType: "github", diff --git a/coderd/users.go b/coderd/users.go index 7fbb8e7d04cdf..d38d40a1fc826 100644 --- a/coderd/users.go +++ b/coderd/users.go @@ -148,7 +148,7 @@ func (api *API) postFirstUser(rw http.ResponseWriter, r *http.Request) { err = userpassword.Validate(createUser.Password) if err != nil { httpapi.Write(ctx, rw, http.StatusBadRequest, codersdk.Response{ - Message: "Password not strong enough!", + Message: "Password is invalid", Validations: []codersdk.ValidationError{{ Field: "password", Detail: err.Error(), @@ -448,7 +448,7 @@ func (api *API) postUser(rw http.ResponseWriter, r *http.Request) { err = userpassword.Validate(req.Password) if err != nil { httpapi.Write(ctx, rw, http.StatusBadRequest, codersdk.Response{ - Message: "Password not strong enough!", + Message: "Password is invalid", Validations: []codersdk.ValidationError{{ Field: "password", Detail: err.Error(), @@ -542,7 +542,10 @@ func (api *API) deleteUser(rw http.ResponseWriter, r *http.Request) { return } - workspaces, err := api.Database.GetWorkspaces(ctx, database.GetWorkspacesParams{ + // This query is ONLY done to get the workspace count, so we use a system + // context to return ALL workspaces. Not just workspaces the user can view. + // nolint:gocritic + workspaces, err := api.Database.GetWorkspaces(dbauthz.AsSystemRestricted(ctx), database.GetWorkspacesParams{ OwnerID: user.ID, }) if err != nil { diff --git a/coderd/users_test.go b/coderd/users_test.go index 9d695f37c9906..22c9fad5eebea 100644 --- a/coderd/users_test.go +++ b/coderd/users_test.go @@ -377,6 +377,43 @@ func TestDeleteUser(t *testing.T) { require.ErrorAs(t, err, &apiErr, "should be a coderd error") require.Equal(t, http.StatusForbidden, apiErr.StatusCode(), "should be forbidden") }) + t.Run("CountCheckIncludesAllWorkspaces", func(t *testing.T) { + t.Parallel() + client, _ := coderdtest.NewWithProvisionerCloser(t, nil) + firstUser := coderdtest.CreateFirstUser(t, client) + + // Create a target user who will own a workspace + targetUserClient, targetUser := coderdtest.CreateAnotherUser(t, client, firstUser.OrganizationID) + + // Create a User Admin who should not have permission to see the target user's workspace + userAdminClient, userAdmin := coderdtest.CreateAnotherUser(t, client, firstUser.OrganizationID) + + // Grant User Admin role to the userAdmin + userAdmin, err := client.UpdateUserRoles(context.Background(), userAdmin.ID.String(), codersdk.UpdateRoles{ + Roles: []string{rbac.RoleUserAdmin().String()}, + }) + require.NoError(t, err) + + // Create a template and workspace owned by the target user + version := coderdtest.CreateTemplateVersion(t, client, firstUser.OrganizationID, nil) + coderdtest.AwaitTemplateVersionJobCompleted(t, client, version.ID) + template := coderdtest.CreateTemplate(t, client, firstUser.OrganizationID, version.ID) + _ = coderdtest.CreateWorkspace(t, targetUserClient, template.ID) + + workspaces, err := userAdminClient.Workspaces(context.Background(), codersdk.WorkspaceFilter{ + Owner: targetUser.Username, + }) + require.NoError(t, err) + require.Len(t, workspaces.Workspaces, 0) + + // Attempt to delete the target user - this should fail because the + // user has a workspace not visible to the deleting user. + err = userAdminClient.DeleteUser(context.Background(), targetUser.ID) + var apiErr *codersdk.Error + require.ErrorAs(t, err, &apiErr) + require.Equal(t, http.StatusExpectationFailed, apiErr.StatusCode()) + require.Contains(t, apiErr.Message, "has workspaces") + }) } func TestNotifyUserStatusChanged(t *testing.T) { @@ -1507,7 +1544,6 @@ func TestUsersFilter(t *testing.T) { } userClient, userData := coderdtest.CreateAnotherUser(t, client, first.OrganizationID, roles...) // Set the last seen for each user to a unique day - // nolint:gocritic // Unit test _, err := api.Database.UpdateUserLastSeenAt(dbauthz.AsSystemRestricted(ctx), database.UpdateUserLastSeenAtParams{ ID: userData.ID, LastSeenAt: lastSeenNow.Add(-1 * time.Hour * 24 * time.Duration(i)), @@ -1535,7 +1571,6 @@ func TestUsersFilter(t *testing.T) { // Add users with different creation dates for testing date filters for i := 0; i < 3; i++ { - // nolint:gocritic // Using system context is necessary to seed data in tests user1, err := api.Database.InsertUser(dbauthz.AsSystemRestricted(ctx), database.InsertUserParams{ ID: uuid.New(), Email: fmt.Sprintf("before%d@coder.com", i), @@ -1557,7 +1592,6 @@ func TestUsersFilter(t *testing.T) { require.NoError(t, err) users = append(users, sdkUser1) - // nolint:gocritic //Using system context is necessary to seed data in tests user2, err := api.Database.InsertUser(dbauthz.AsSystemRestricted(ctx), database.InsertUserParams{ ID: uuid.New(), Email: fmt.Sprintf("during%d@coder.com", i), @@ -1578,7 +1612,6 @@ func TestUsersFilter(t *testing.T) { require.NoError(t, err) users = append(users, sdkUser2) - // nolint:gocritic // Using system context is necessary to seed data in tests user3, err := api.Database.InsertUser(dbauthz.AsSystemRestricted(ctx), database.InsertUserParams{ ID: uuid.New(), Email: fmt.Sprintf("after%d@coder.com", i), @@ -1875,7 +1908,6 @@ func TestGetUsers(t *testing.T) { Email: "test2@coder.com", Username: "test2", }) - // nolint:gocritic // Unit test err := db.UpdateUserGithubComUserID(dbauthz.AsSystemRestricted(ctx), database.UpdateUserGithubComUserIDParams{ ID: first.UserID, GithubComUserID: sql.NullInt64{ diff --git a/coderd/util/strings/strings.go b/coderd/util/strings/strings.go index f416bba463bbf..49aad579e83f5 100644 --- a/coderd/util/strings/strings.go +++ b/coderd/util/strings/strings.go @@ -2,7 +2,12 @@ package strings import ( "fmt" + "strconv" "strings" + "unicode" + + "github.com/acarl005/stripansi" + "github.com/microcosm-cc/bluemonday" ) // JoinWithConjunction joins a slice of strings with commas except for the last @@ -28,3 +33,38 @@ func Truncate(s string, n int) string { } return s[:n] } + +var bmPolicy = bluemonday.StrictPolicy() + +// UISanitize sanitizes a string for display in the UI. +// The following transformations are applied, in order: +// - HTML tags are removed using bluemonday's strict policy. +// - ANSI escape codes are stripped using stripansi. +// - Consecutive backslashes are replaced with a single backslash. +// - Non-printable characters are removed. +// - Whitespace characters are replaced with spaces. +// - Multiple spaces are collapsed into a single space. +// - Leading and trailing whitespace is trimmed. +func UISanitize(in string) string { + if unq, err := strconv.Unquote(`"` + in + `"`); err == nil { + in = unq + } + in = bmPolicy.Sanitize(in) + in = stripansi.Strip(in) + var b strings.Builder + var spaceSeen bool + for _, r := range in { + if unicode.IsSpace(r) { + if !spaceSeen { + _, _ = b.WriteRune(' ') + spaceSeen = true + } + continue + } + spaceSeen = false + if unicode.IsPrint(r) { + _, _ = b.WriteRune(r) + } + } + return strings.TrimSpace(b.String()) +} diff --git a/coderd/util/strings/strings_test.go b/coderd/util/strings/strings_test.go index 5172fb08e1e69..7a20a06a25f28 100644 --- a/coderd/util/strings/strings_test.go +++ b/coderd/util/strings/strings_test.go @@ -3,6 +3,7 @@ package strings_test import ( "testing" + "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" "github.com/coder/coder/v2/coderd/util/strings" @@ -37,3 +38,41 @@ func TestTruncate(t *testing.T) { }) } } + +func TestUISanitize(t *testing.T) { + t.Parallel() + + for _, tt := range []struct { + s string + expected string + }{ + {"normal text", "normal text"}, + {"\tfoo \r\\nbar ", "foo bar"}, + {"通常のテキスト", "通常のテキスト"}, + {"foo\nbar", "foo bar"}, + {"foo\tbar", "foo bar"}, + {"foo\rbar", "foo bar"}, + {"foo\x00bar", "foobar"}, + {"\u202Eabc", "abc"}, + {"\u200Bzero width", "zero width"}, + {"foo\x1b[31mred\x1b[0mbar", "fooredbar"}, + {"foo\u0008bar", "foobar"}, + {"foo\x07bar", "foobar"}, + {"foo\uFEFFbar", "foobar"}, + {"link", "link"}, + {"", ""}, + {"HTML", "HTML"}, + {"
line break", "line break"}, + {"", ""}, + {"", ""}, + {"visible", "visible"}, + {"", ""}, + {"", ""}, + } { + t.Run(tt.expected, func(t *testing.T) { + t.Parallel() + actual := strings.UISanitize(tt.s) + assert.Equal(t, tt.expected, actual) + }) + } +} diff --git a/coderd/workspaceagents.go b/coderd/workspaceagents.go index 3ae57d8394d43..f2ee1ac18e823 100644 --- a/coderd/workspaceagents.go +++ b/coderd/workspaceagents.go @@ -41,6 +41,7 @@ import ( "github.com/coder/coder/v2/coderd/rbac/policy" "github.com/coder/coder/v2/coderd/telemetry" maputil "github.com/coder/coder/v2/coderd/util/maps" + strutil "github.com/coder/coder/v2/coderd/util/strings" "github.com/coder/coder/v2/coderd/wspubsub" "github.com/coder/coder/v2/codersdk" "github.com/coder/coder/v2/codersdk/agentsdk" @@ -383,6 +384,9 @@ func (api *API) patchWorkspaceAgentAppStatus(rw http.ResponseWriter, r *http.Req return } + // Treat the message as untrusted input. + cleaned := strutil.UISanitize(req.Message) + // nolint:gocritic // This is a system restricted operation. _, err = api.Database.InsertWorkspaceAppStatus(dbauthz.AsSystemRestricted(ctx), database.InsertWorkspaceAppStatusParams{ ID: uuid.New(), @@ -391,7 +395,7 @@ func (api *API) patchWorkspaceAgentAppStatus(rw http.ResponseWriter, r *http.Req AgentID: workspaceAgent.ID, AppID: app.ID, State: database.WorkspaceAppStatusState(req.State), - Message: req.Message, + Message: cleaned, Uri: sql.NullString{ String: req.URI, Valid: req.URI != "", @@ -892,7 +896,11 @@ func (api *API) watchWorkspaceAgentContainers(rw http.ResponseWriter, r *http.Re case <-ctx.Done(): return - case containers := <-containersCh: + case containers, ok := <-containersCh: + if !ok { + return + } + if err := encoder.Encode(containers); err != nil { api.Logger.Error(ctx, "encode containers", slog.Error(err)) return diff --git a/coderd/workspaceagents_internal_test.go b/coderd/workspaceagents_internal_test.go new file mode 100644 index 0000000000000..c7520f05ab503 --- /dev/null +++ b/coderd/workspaceagents_internal_test.go @@ -0,0 +1,186 @@ +package coderd + +import ( + "bytes" + "context" + "database/sql" + "fmt" + "io" + "net/http" + "net/http/httptest" + "net/http/httputil" + "net/url" + "strings" + "testing" + + "github.com/go-chi/chi/v5" + "github.com/google/uuid" + "github.com/stretchr/testify/require" + "go.uber.org/mock/gomock" + + "cdr.dev/slog" + "cdr.dev/slog/sloggers/slogtest" + "github.com/coder/coder/v2/coderd/database" + "github.com/coder/coder/v2/coderd/database/dbmock" + "github.com/coder/coder/v2/coderd/database/dbtime" + "github.com/coder/coder/v2/coderd/httpmw" + "github.com/coder/coder/v2/coderd/workspaceapps/appurl" + "github.com/coder/coder/v2/codersdk" + "github.com/coder/coder/v2/codersdk/workspacesdk" + "github.com/coder/coder/v2/codersdk/workspacesdk/agentconnmock" + "github.com/coder/coder/v2/codersdk/wsjson" + "github.com/coder/coder/v2/tailnet" + "github.com/coder/coder/v2/tailnet/tailnettest" + "github.com/coder/coder/v2/testutil" + "github.com/coder/websocket" +) + +type fakeAgentProvider struct { + agentConn func(ctx context.Context, agentID uuid.UUID) (_ workspacesdk.AgentConn, release func(), _ error) +} + +func (fakeAgentProvider) ReverseProxy(targetURL, dashboardURL *url.URL, agentID uuid.UUID, app appurl.ApplicationURL, wildcardHost string) *httputil.ReverseProxy { + panic("unimplemented") +} + +func (f fakeAgentProvider) AgentConn(ctx context.Context, agentID uuid.UUID) (_ workspacesdk.AgentConn, release func(), _ error) { + if f.agentConn != nil { + return f.agentConn(ctx, agentID) + } + + panic("unimplemented") +} + +func (fakeAgentProvider) ServeHTTPDebug(w http.ResponseWriter, r *http.Request) { + panic("unimplemented") +} + +func (fakeAgentProvider) Close() error { + return nil +} + +func TestWatchAgentContainers(t *testing.T) { + t.Parallel() + + t.Run("WebSocketClosesProperly", func(t *testing.T) { + t.Parallel() + + // This test ensures that the agent containers `/watch` websocket can gracefully + // handle the underlying websocket unexpectedly closing. This test was created in + // response to this issue: https://github.com/coder/coder/issues/19372 + + var ( + ctx = testutil.Context(t, testutil.WaitShort) + logger = slogtest.Make(t, &slogtest.Options{IgnoreErrors: true}).Leveled(slog.LevelDebug).Named("coderd") + + mCtrl = gomock.NewController(t) + mDB = dbmock.NewMockStore(mCtrl) + mCoordinator = tailnettest.NewMockCoordinator(mCtrl) + mAgentConn = agentconnmock.NewMockAgentConn(mCtrl) + + fAgentProvider = fakeAgentProvider{ + agentConn: func(ctx context.Context, agentID uuid.UUID) (_ workspacesdk.AgentConn, release func(), _ error) { + return mAgentConn, func() {}, nil + }, + } + + workspaceID = uuid.New() + agentID = uuid.New() + resourceID = uuid.New() + jobID = uuid.New() + buildID = uuid.New() + + containersCh = make(chan codersdk.WorkspaceAgentListContainersResponse) + + r = chi.NewMux() + + api = API{ + ctx: ctx, + Options: &Options{ + AgentInactiveDisconnectTimeout: testutil.WaitShort, + Database: mDB, + Logger: logger, + DeploymentValues: &codersdk.DeploymentValues{}, + TailnetCoordinator: tailnettest.NewFakeCoordinator(), + }, + } + ) + + var tailnetCoordinator tailnet.Coordinator = mCoordinator + api.TailnetCoordinator.Store(&tailnetCoordinator) + api.agentProvider = fAgentProvider + + // Setup: Allow `ExtractWorkspaceAgentParams` to complete. + mDB.EXPECT().GetWorkspaceAgentByID(gomock.Any(), agentID).Return(database.WorkspaceAgent{ + ID: agentID, + ResourceID: resourceID, + LifecycleState: database.WorkspaceAgentLifecycleStateReady, + FirstConnectedAt: sql.NullTime{Valid: true, Time: dbtime.Now()}, + LastConnectedAt: sql.NullTime{Valid: true, Time: dbtime.Now()}, + }, nil) + mDB.EXPECT().GetWorkspaceResourceByID(gomock.Any(), resourceID).Return(database.WorkspaceResource{ + ID: resourceID, + JobID: jobID, + }, nil) + mDB.EXPECT().GetProvisionerJobByID(gomock.Any(), jobID).Return(database.ProvisionerJob{ + ID: jobID, + Type: database.ProvisionerJobTypeWorkspaceBuild, + }, nil) + mDB.EXPECT().GetWorkspaceBuildByJobID(gomock.Any(), jobID).Return(database.WorkspaceBuild{ + WorkspaceID: workspaceID, + ID: buildID, + }, nil) + + // And: Allow `db2dsk.WorkspaceAgent` to complete. + mCoordinator.EXPECT().Node(gomock.Any()).Return(nil) + + // And: Allow `WatchContainers` to be called, returing our `containersCh` channel. + mAgentConn.EXPECT().WatchContainers(gomock.Any(), gomock.Any()). + Return(containersCh, io.NopCloser(&bytes.Buffer{}), nil) + + // And: We mount the HTTP Handler + r.With(httpmw.ExtractWorkspaceAgentParam(mDB)). + Get("/workspaceagents/{workspaceagent}/containers/watch", api.watchWorkspaceAgentContainers) + + // Given: We create the HTTP server + srv := httptest.NewServer(r) + defer srv.Close() + + // And: Dial the WebSocket + wsURL := strings.Replace(srv.URL, "http://", "ws://", 1) + conn, resp, err := websocket.Dial(ctx, fmt.Sprintf("%s/workspaceagents/%s/containers/watch", wsURL, agentID), nil) + require.NoError(t, err) + if resp.Body != nil { + defer resp.Body.Close() + } + + // And: Create a streaming decoder + decoder := wsjson.NewDecoder[codersdk.WorkspaceAgentListContainersResponse](conn, websocket.MessageText, logger) + defer decoder.Close() + decodeCh := decoder.Chan() + + // And: We can successfully send through the channel. + testutil.RequireSend(ctx, t, containersCh, codersdk.WorkspaceAgentListContainersResponse{ + Containers: []codersdk.WorkspaceAgentContainer{{ + ID: "test-container-id", + }}, + }) + + // And: Receive the data. + containerResp := testutil.RequireReceive(ctx, t, decodeCh) + require.Len(t, containerResp.Containers, 1) + require.Equal(t, "test-container-id", containerResp.Containers[0].ID) + + // When: We close the `containersCh` + close(containersCh) + + // Then: We expect `decodeCh` to be closed. + select { + case <-ctx.Done(): + t.Fail() + + case _, ok := <-decodeCh: + require.False(t, ok, "channel is expected to be closed") + } + }) +} diff --git a/coderd/workspaceagents_test.go b/coderd/workspaceagents_test.go index 30859cb6391e6..6f28b12af5ae0 100644 --- a/coderd/workspaceagents_test.go +++ b/coderd/workspaceagents_test.go @@ -562,7 +562,6 @@ func TestWorkspaceAgentConnectRPC(t *testing.T) { seed := database.WorkspaceTable{OrganizationID: user.OrganizationID, OwnerID: user.UserID} wsb := dbfake.WorkspaceBuild(t, db, seed).WithAgent().Do() // When: the workspace is marked as soft-deleted - // nolint:gocritic // this is a test err := db.UpdateWorkspaceDeletedByID( dbauthz.AsProvisionerd(ctx), database.UpdateWorkspaceDeletedByIDParams{ID: wsb.Workspace.ID, Deleted: true}, @@ -593,7 +592,7 @@ func TestWorkspaceAgentTailnet(t *testing.T) { _ = agenttest.New(t, client.URL, r.AgentToken) resources := coderdtest.AwaitWorkspaceAgents(t, client, r.Workspace.ID) - conn, err := func() (*workspacesdk.AgentConn, error) { + conn, err := func() (workspacesdk.AgentConn, error) { ctx, cancel := context.WithTimeout(context.Background(), testutil.WaitLong) defer cancel() // Connection should remain open even if the dial context is canceled. @@ -633,7 +632,6 @@ func TestWorkspaceAgentClientCoordinate_BadVersion(t *testing.T) { ctx := testutil.Context(t, testutil.WaitShort) agentToken, err := uuid.Parse(r.AgentToken) require.NoError(t, err) - //nolint: gocritic // testing ao, err := db.GetWorkspaceAgentAndLatestBuildByAuthToken(dbauthz.AsSystemRestricted(ctx), agentToken) require.NoError(t, err) @@ -724,7 +722,7 @@ func TestWorkspaceAgentClientCoordinate_ResumeToken(t *testing.T) { agentTokenUUID, err := uuid.Parse(r.AgentToken) require.NoError(t, err) ctx := testutil.Context(t, testutil.WaitLong) - agentAndBuild, err := api.Database.GetWorkspaceAgentAndLatestBuildByAuthToken(dbauthz.AsSystemRestricted(ctx), agentTokenUUID) //nolint + agentAndBuild, err := api.Database.GetWorkspaceAgentAndLatestBuildByAuthToken(dbauthz.AsSystemRestricted(ctx), agentTokenUUID) require.NoError(t, err) // Connect with no resume token, and ensure that the peer ID is set to a @@ -796,7 +794,7 @@ func TestWorkspaceAgentClientCoordinate_ResumeToken(t *testing.T) { agentTokenUUID, err := uuid.Parse(r.AgentToken) require.NoError(t, err) ctx := testutil.Context(t, testutil.WaitLong) - agentAndBuild, err := api.Database.GetWorkspaceAgentAndLatestBuildByAuthToken(dbauthz.AsSystemRestricted(ctx), agentTokenUUID) //nolint + agentAndBuild, err := api.Database.GetWorkspaceAgentAndLatestBuildByAuthToken(dbauthz.AsSystemRestricted(ctx), agentTokenUUID) require.NoError(t, err) // Connect with no resume token, and ensure that the peer ID is set to a @@ -1389,169 +1387,147 @@ func TestWorkspaceAgentContainers(t *testing.T) { func TestWatchWorkspaceAgentDevcontainers(t *testing.T) { t.Parallel() - var ( - ctx = testutil.Context(t, testutil.WaitLong) - logger = slogtest.Make(t, &slogtest.Options{IgnoreErrors: true}).Leveled(slog.LevelDebug) - mClock = quartz.NewMock(t) - updaterTickerTrap = mClock.Trap().TickerFunc("updaterLoop") - mCtrl = gomock.NewController(t) - mCCLI = acmock.NewMockContainerCLI(mCtrl) - - client, db = coderdtest.NewWithDatabase(t, &coderdtest.Options{Logger: &logger}) - user = coderdtest.CreateFirstUser(t, client) - r = dbfake.WorkspaceBuild(t, db, database.WorkspaceTable{ - OrganizationID: user.OrganizationID, - OwnerID: user.UserID, - }).WithAgent(func(agents []*proto.Agent) []*proto.Agent { - return agents - }).Do() + t.Run("OK", func(t *testing.T) { + t.Parallel() - fakeContainer1 = codersdk.WorkspaceAgentContainer{ - ID: "container1", - CreatedAt: dbtime.Now(), - FriendlyName: "container1", - Image: "busybox:latest", - Labels: map[string]string{ - agentcontainers.DevcontainerLocalFolderLabel: "/home/coder/project1", - agentcontainers.DevcontainerConfigFileLabel: "/home/coder/project1/.devcontainer/devcontainer.json", - }, - Running: true, - Status: "running", - } + var ( + ctx = testutil.Context(t, testutil.WaitLong) + logger = slogtest.Make(t, &slogtest.Options{IgnoreErrors: true}).Leveled(slog.LevelDebug) + mClock = quartz.NewMock(t) + updaterTickerTrap = mClock.Trap().TickerFunc("updaterLoop") + mCtrl = gomock.NewController(t) + mCCLI = acmock.NewMockContainerCLI(mCtrl) + + client, db = coderdtest.NewWithDatabase(t, &coderdtest.Options{Logger: &logger}) + user = coderdtest.CreateFirstUser(t, client) + r = dbfake.WorkspaceBuild(t, db, database.WorkspaceTable{ + OrganizationID: user.OrganizationID, + OwnerID: user.UserID, + }).WithAgent(func(agents []*proto.Agent) []*proto.Agent { + return agents + }).Do() + + fakeContainer1 = codersdk.WorkspaceAgentContainer{ + ID: "container1", + CreatedAt: dbtime.Now(), + FriendlyName: "container1", + Image: "busybox:latest", + Labels: map[string]string{ + agentcontainers.DevcontainerLocalFolderLabel: "/home/coder/project1", + agentcontainers.DevcontainerConfigFileLabel: "/home/coder/project1/.devcontainer/devcontainer.json", + }, + Running: true, + Status: "running", + } - fakeContainer2 = codersdk.WorkspaceAgentContainer{ - ID: "container1", - CreatedAt: dbtime.Now(), - FriendlyName: "container2", - Image: "busybox:latest", - Labels: map[string]string{ - agentcontainers.DevcontainerLocalFolderLabel: "/home/coder/project2", - agentcontainers.DevcontainerConfigFileLabel: "/home/coder/project2/.devcontainer/devcontainer.json", - }, - Running: true, - Status: "running", - } - ) + fakeContainer2 = codersdk.WorkspaceAgentContainer{ + ID: "container1", + CreatedAt: dbtime.Now(), + FriendlyName: "container2", + Image: "busybox:latest", + Labels: map[string]string{ + agentcontainers.DevcontainerLocalFolderLabel: "/home/coder/project2", + agentcontainers.DevcontainerConfigFileLabel: "/home/coder/project2/.devcontainer/devcontainer.json", + }, + Running: true, + Status: "running", + } + ) - stages := []struct { - containers []codersdk.WorkspaceAgentContainer - expected codersdk.WorkspaceAgentListContainersResponse - }{ - { - containers: []codersdk.WorkspaceAgentContainer{fakeContainer1}, - expected: codersdk.WorkspaceAgentListContainersResponse{ - Containers: []codersdk.WorkspaceAgentContainer{fakeContainer1}, - Devcontainers: []codersdk.WorkspaceAgentDevcontainer{ - { - Name: "project1", - WorkspaceFolder: fakeContainer1.Labels[agentcontainers.DevcontainerLocalFolderLabel], - ConfigPath: fakeContainer1.Labels[agentcontainers.DevcontainerConfigFileLabel], - Status: "running", - Container: &fakeContainer1, + stages := []struct { + containers []codersdk.WorkspaceAgentContainer + expected codersdk.WorkspaceAgentListContainersResponse + }{ + { + containers: []codersdk.WorkspaceAgentContainer{fakeContainer1}, + expected: codersdk.WorkspaceAgentListContainersResponse{ + Containers: []codersdk.WorkspaceAgentContainer{fakeContainer1}, + Devcontainers: []codersdk.WorkspaceAgentDevcontainer{ + { + Name: "project1", + WorkspaceFolder: fakeContainer1.Labels[agentcontainers.DevcontainerLocalFolderLabel], + ConfigPath: fakeContainer1.Labels[agentcontainers.DevcontainerConfigFileLabel], + Status: "running", + Container: &fakeContainer1, + }, }, }, }, - }, - { - containers: []codersdk.WorkspaceAgentContainer{fakeContainer1, fakeContainer2}, - expected: codersdk.WorkspaceAgentListContainersResponse{ - Containers: []codersdk.WorkspaceAgentContainer{fakeContainer1, fakeContainer2}, - Devcontainers: []codersdk.WorkspaceAgentDevcontainer{ - { - Name: "project1", - WorkspaceFolder: fakeContainer1.Labels[agentcontainers.DevcontainerLocalFolderLabel], - ConfigPath: fakeContainer1.Labels[agentcontainers.DevcontainerConfigFileLabel], - Status: "running", - Container: &fakeContainer1, - }, - { - Name: "project2", - WorkspaceFolder: fakeContainer2.Labels[agentcontainers.DevcontainerLocalFolderLabel], - ConfigPath: fakeContainer2.Labels[agentcontainers.DevcontainerConfigFileLabel], - Status: "running", - Container: &fakeContainer2, + { + containers: []codersdk.WorkspaceAgentContainer{fakeContainer1, fakeContainer2}, + expected: codersdk.WorkspaceAgentListContainersResponse{ + Containers: []codersdk.WorkspaceAgentContainer{fakeContainer1, fakeContainer2}, + Devcontainers: []codersdk.WorkspaceAgentDevcontainer{ + { + Name: "project1", + WorkspaceFolder: fakeContainer1.Labels[agentcontainers.DevcontainerLocalFolderLabel], + ConfigPath: fakeContainer1.Labels[agentcontainers.DevcontainerConfigFileLabel], + Status: "running", + Container: &fakeContainer1, + }, + { + Name: "project2", + WorkspaceFolder: fakeContainer2.Labels[agentcontainers.DevcontainerLocalFolderLabel], + ConfigPath: fakeContainer2.Labels[agentcontainers.DevcontainerConfigFileLabel], + Status: "running", + Container: &fakeContainer2, + }, }, }, }, - }, - { - containers: []codersdk.WorkspaceAgentContainer{fakeContainer2}, - expected: codersdk.WorkspaceAgentListContainersResponse{ - Containers: []codersdk.WorkspaceAgentContainer{fakeContainer2}, - Devcontainers: []codersdk.WorkspaceAgentDevcontainer{ - { - Name: "", - WorkspaceFolder: fakeContainer1.Labels[agentcontainers.DevcontainerLocalFolderLabel], - ConfigPath: fakeContainer1.Labels[agentcontainers.DevcontainerConfigFileLabel], - Status: "stopped", - Container: nil, - }, - { - Name: "project2", - WorkspaceFolder: fakeContainer2.Labels[agentcontainers.DevcontainerLocalFolderLabel], - ConfigPath: fakeContainer2.Labels[agentcontainers.DevcontainerConfigFileLabel], - Status: "running", - Container: &fakeContainer2, + { + containers: []codersdk.WorkspaceAgentContainer{fakeContainer2}, + expected: codersdk.WorkspaceAgentListContainersResponse{ + Containers: []codersdk.WorkspaceAgentContainer{fakeContainer2}, + Devcontainers: []codersdk.WorkspaceAgentDevcontainer{ + { + Name: "", + WorkspaceFolder: fakeContainer1.Labels[agentcontainers.DevcontainerLocalFolderLabel], + ConfigPath: fakeContainer1.Labels[agentcontainers.DevcontainerConfigFileLabel], + Status: "stopped", + Container: nil, + }, + { + Name: "project2", + WorkspaceFolder: fakeContainer2.Labels[agentcontainers.DevcontainerLocalFolderLabel], + ConfigPath: fakeContainer2.Labels[agentcontainers.DevcontainerConfigFileLabel], + Status: "running", + Container: &fakeContainer2, + }, }, }, }, - }, - } - - // Set up initial state for immediate send on connection - mCCLI.EXPECT().List(gomock.Any()).Return(codersdk.WorkspaceAgentListContainersResponse{Containers: stages[0].containers}, nil) - mCCLI.EXPECT().DetectArchitecture(gomock.Any(), gomock.Any()).Return("", nil).AnyTimes() - - _ = agenttest.New(t, client.URL, r.AgentToken, func(o *agent.Options) { - o.Logger = logger.Named("agent") - o.Devcontainers = true - o.DevcontainerAPIOptions = []agentcontainers.Option{ - agentcontainers.WithClock(mClock), - agentcontainers.WithContainerCLI(mCCLI), - agentcontainers.WithWatcher(watcher.NewNoop()), } - }) - - resources := coderdtest.NewWorkspaceAgentWaiter(t, client, r.Workspace.ID).Wait() - require.Len(t, resources, 1, "expected one resource") - require.Len(t, resources[0].Agents, 1, "expected one agent") - agentID := resources[0].Agents[0].ID - updaterTickerTrap.MustWait(ctx).MustRelease(ctx) - defer updaterTickerTrap.Close() + // Set up initial state for immediate send on connection + mCCLI.EXPECT().List(gomock.Any()).Return(codersdk.WorkspaceAgentListContainersResponse{Containers: stages[0].containers}, nil) + mCCLI.EXPECT().DetectArchitecture(gomock.Any(), gomock.Any()).Return("", nil).AnyTimes() - containers, closer, err := client.WatchWorkspaceAgentContainers(ctx, agentID) - require.NoError(t, err) - defer func() { - closer.Close() - }() + _ = agenttest.New(t, client.URL, r.AgentToken, func(o *agent.Options) { + o.Logger = logger.Named("agent") + o.Devcontainers = true + o.DevcontainerAPIOptions = []agentcontainers.Option{ + agentcontainers.WithClock(mClock), + agentcontainers.WithContainerCLI(mCCLI), + agentcontainers.WithWatcher(watcher.NewNoop()), + } + }) - // Read initial state sent immediately on connection - var got codersdk.WorkspaceAgentListContainersResponse - select { - case <-ctx.Done(): - case got = <-containers: - } - require.NoError(t, ctx.Err()) - - require.Equal(t, stages[0].expected.Containers, got.Containers) - require.Len(t, got.Devcontainers, len(stages[0].expected.Devcontainers)) - for j, expectedDev := range stages[0].expected.Devcontainers { - gotDev := got.Devcontainers[j] - require.Equal(t, expectedDev.Name, gotDev.Name) - require.Equal(t, expectedDev.WorkspaceFolder, gotDev.WorkspaceFolder) - require.Equal(t, expectedDev.ConfigPath, gotDev.ConfigPath) - require.Equal(t, expectedDev.Status, gotDev.Status) - require.Equal(t, expectedDev.Container, gotDev.Container) - } + resources := coderdtest.NewWorkspaceAgentWaiter(t, client, r.Workspace.ID).Wait() + require.Len(t, resources, 1, "expected one resource") + require.Len(t, resources[0].Agents, 1, "expected one agent") + agentID := resources[0].Agents[0].ID - // Process remaining stages through updater loop - for i, stage := range stages[1:] { - mCCLI.EXPECT().List(gomock.Any()).Return(codersdk.WorkspaceAgentListContainersResponse{Containers: stage.containers}, nil) + updaterTickerTrap.MustWait(ctx).MustRelease(ctx) + defer updaterTickerTrap.Close() - _, aw := mClock.AdvanceNext() - aw.MustWait(ctx) + containers, closer, err := client.WatchWorkspaceAgentContainers(ctx, agentID) + require.NoError(t, err) + defer func() { + closer.Close() + }() + // Read initial state sent immediately on connection var got codersdk.WorkspaceAgentListContainersResponse select { case <-ctx.Done(): @@ -1559,9 +1535,9 @@ func TestWatchWorkspaceAgentDevcontainers(t *testing.T) { } require.NoError(t, ctx.Err()) - require.Equal(t, stages[i+1].expected.Containers, got.Containers) - require.Len(t, got.Devcontainers, len(stages[i+1].expected.Devcontainers)) - for j, expectedDev := range stages[i+1].expected.Devcontainers { + require.Equal(t, stages[0].expected.Containers, got.Containers) + require.Len(t, got.Devcontainers, len(stages[0].expected.Devcontainers)) + for j, expectedDev := range stages[0].expected.Devcontainers { gotDev := got.Devcontainers[j] require.Equal(t, expectedDev.Name, gotDev.Name) require.Equal(t, expectedDev.WorkspaceFolder, gotDev.WorkspaceFolder) @@ -1569,7 +1545,33 @@ func TestWatchWorkspaceAgentDevcontainers(t *testing.T) { require.Equal(t, expectedDev.Status, gotDev.Status) require.Equal(t, expectedDev.Container, gotDev.Container) } - } + + // Process remaining stages through updater loop + for i, stage := range stages[1:] { + mCCLI.EXPECT().List(gomock.Any()).Return(codersdk.WorkspaceAgentListContainersResponse{Containers: stage.containers}, nil) + + _, aw := mClock.AdvanceNext() + aw.MustWait(ctx) + + var got codersdk.WorkspaceAgentListContainersResponse + select { + case <-ctx.Done(): + case got = <-containers: + } + require.NoError(t, ctx.Err()) + + require.Equal(t, stages[i+1].expected.Containers, got.Containers) + require.Len(t, got.Devcontainers, len(stages[i+1].expected.Devcontainers)) + for j, expectedDev := range stages[i+1].expected.Devcontainers { + gotDev := got.Devcontainers[j] + require.Equal(t, expectedDev.Name, gotDev.Name) + require.Equal(t, expectedDev.WorkspaceFolder, gotDev.WorkspaceFolder) + require.Equal(t, expectedDev.ConfigPath, gotDev.ConfigPath) + require.Equal(t, expectedDev.Status, gotDev.Status) + require.Equal(t, expectedDev.Container, gotDev.Container) + } + } + }) } func TestWorkspaceAgentRecreateDevcontainer(t *testing.T) { @@ -2417,7 +2419,7 @@ func TestWorkspaceAgent_UpdatedDERP(t *testing.T) { agentID := resources[0].Agents[0].ID // Connect from a client. - conn1, err := func() (*workspacesdk.AgentConn, error) { + conn1, err := func() (workspacesdk.AgentConn, error) { ctx, cancel := context.WithTimeout(context.Background(), testutil.WaitLong) defer cancel() // Connection should remain open even if the dial context is canceled. @@ -2458,7 +2460,7 @@ func TestWorkspaceAgent_UpdatedDERP(t *testing.T) { // Wait for the DERP map to be updated on the existing client. require.Eventually(t, func() bool { - regionIDs := conn1.Conn.DERPMap().RegionIDs() + regionIDs := conn1.TailnetConn().DERPMap().RegionIDs() return len(regionIDs) == 1 && regionIDs[0] == 2 }, testutil.WaitLong, testutil.IntervalFast) @@ -2475,7 +2477,7 @@ func TestWorkspaceAgent_UpdatedDERP(t *testing.T) { defer conn2.Close() ok = conn2.AwaitReachable(ctx) require.True(t, ok) - require.Equal(t, []int{2}, conn2.DERPMap().RegionIDs()) + require.Equal(t, []int{2}, conn2.TailnetConn().DERPMap().RegionIDs()) } func TestWorkspaceAgentExternalAuthListen(t *testing.T) { diff --git a/coderd/workspaceagentsrpc.go b/coderd/workspaceagentsrpc.go index 0806118f2a832..8dacbe9812ca9 100644 --- a/coderd/workspaceagentsrpc.go +++ b/coderd/workspaceagentsrpc.go @@ -6,7 +6,6 @@ import ( "fmt" "io" "net/http" - "runtime/pprof" "sync" "sync/atomic" "time" @@ -348,16 +347,14 @@ func (m *agentConnectionMonitor) init() { func (m *agentConnectionMonitor) start(ctx context.Context) { ctx, m.cancel = context.WithCancel(ctx) m.wg.Add(2) - go pprof.Do(ctx, pprof.Labels("agent", m.workspaceAgent.ID.String()), - func(ctx context.Context) { - defer m.wg.Done() - m.sendPings(ctx) - }) - go pprof.Do(ctx, pprof.Labels("agent", m.workspaceAgent.ID.String()), - func(ctx context.Context) { - defer m.wg.Done() - m.monitor(ctx) - }) + go func(ctx context.Context) { + defer m.wg.Done() + m.sendPings(ctx) + }(ctx) + go func(ctx context.Context) { + defer m.wg.Done() + m.monitor(ctx) + }(ctx) } func (m *agentConnectionMonitor) monitor(ctx context.Context) { diff --git a/coderd/workspaceapps/apptest/apptest.go b/coderd/workspaceapps/apptest/apptest.go index d0f3acda77278..eab91de30df97 100644 --- a/coderd/workspaceapps/apptest/apptest.go +++ b/coderd/workspaceapps/apptest/apptest.go @@ -472,6 +472,409 @@ func Run(t *testing.T, appHostIsPrimary bool, factory DeploymentFactory) { }) }) + t.Run("WorkspaceApplicationCORS", func(t *testing.T) { + t.Parallel() + + const external = "https://example.com" + + unauthenticatedClient := func(t *testing.T, appDetails *Details) *codersdk.Client { + c := appDetails.AppClient(t) + c.SetSessionToken("") + return c + } + + authenticatedClient := func(t *testing.T, appDetails *Details) *codersdk.Client { + uc, _ := coderdtest.CreateAnotherUser(t, appDetails.SDKClient, appDetails.FirstUser.OrganizationID, rbac.RoleMember()) + c := appDetails.AppClient(t) + c.SetSessionToken(uc.SessionToken()) + return c + } + + ownSubdomain := func(details *Details, app App) string { + url := details.SubdomainAppURL(app) + return url.Scheme + "://" + url.Host + } + + externalOrigin := func(*Details, App) string { + return external + } + + tests := []struct { + name string + app func(details *Details) App + client func(t *testing.T, appDetails *Details) *codersdk.Client + behavior codersdk.CORSBehavior + httpMethod string + origin func(details *Details, app App) string + expectedStatusCode int + checkRequestHeaders func(t *testing.T, origin string, req http.Header) + checkResponseHeaders func(t *testing.T, origin string, resp http.Header) + }{ + // Public + { // fails + // The default behavior is to accept preflight requests from the request origin if it matches the app's own subdomain. + name: "Default/Public/Preflight/Subdomain", + app: func(details *Details) App { return details.Apps.PublicCORSDefault }, + behavior: codersdk.CORSBehaviorSimple, + client: unauthenticatedClient, + httpMethod: http.MethodOptions, + origin: ownSubdomain, + expectedStatusCode: http.StatusOK, + checkResponseHeaders: func(t *testing.T, origin string, resp http.Header) { + assert.Equal(t, origin, resp.Get("Access-Control-Allow-Origin")) + assert.Contains(t, resp.Get("Access-Control-Allow-Methods"), http.MethodGet) + assert.Equal(t, "true", resp.Get("Access-Control-Allow-Credentials")) + }, + }, + { // passes + // The default behavior is to reject preflight requests from origins other than the app's own subdomain. + name: "Default/Public/Preflight/External", + app: func(details *Details) App { return details.Apps.PublicCORSDefault }, + behavior: codersdk.CORSBehaviorSimple, + client: unauthenticatedClient, + httpMethod: http.MethodOptions, + origin: externalOrigin, + expectedStatusCode: http.StatusOK, + checkResponseHeaders: func(t *testing.T, origin string, resp http.Header) { + // We don't add a valid Allow-Origin header for requests we won't proxy. + assert.Empty(t, resp.Get("Access-Control-Allow-Origin")) + }, + }, + { // fails + // A request without an Origin header would be rejected by an actual browser since it lacks CORS headers. + name: "Default/Public/GET/NoOrigin", + app: func(details *Details) App { return details.Apps.PublicCORSDefault }, + behavior: codersdk.CORSBehaviorSimple, + client: unauthenticatedClient, + origin: func(*Details, App) string { return "" }, + httpMethod: http.MethodGet, + expectedStatusCode: http.StatusOK, + checkResponseHeaders: func(t *testing.T, origin string, resp http.Header) { + assert.Empty(t, resp.Get("Access-Control-Allow-Origin")) + assert.Empty(t, resp.Get("Access-Control-Allow-Headers")) + assert.Empty(t, resp.Get("Access-Control-Allow-Credentials")) + // Added by the app handler. + assert.Equal(t, "simple", resp.Get("X-CORS-Handler")) + }, + }, + { // fails + // The passthru behavior will pass through the request headers to the upstream app. + name: "Passthru/Public/Preflight/Subdomain", + app: func(details *Details) App { return details.Apps.PublicCORSPassthru }, + behavior: codersdk.CORSBehaviorPassthru, + client: unauthenticatedClient, + origin: ownSubdomain, + httpMethod: http.MethodOptions, + expectedStatusCode: http.StatusOK, + checkRequestHeaders: func(t *testing.T, origin string, req http.Header) { + assert.Equal(t, origin, req.Get("Origin")) + assert.Equal(t, "GET", req.Get("Access-Control-Request-Method")) + }, + checkResponseHeaders: func(t *testing.T, origin string, resp http.Header) { + assert.Equal(t, origin, resp.Get("Access-Control-Allow-Origin")) + assert.Equal(t, http.MethodGet, resp.Get("Access-Control-Allow-Methods")) + // Added by the app handler. + assert.Equal(t, "passthru", resp.Get("X-CORS-Handler")) + }, + }, + { // fails + // Identical to the previous test, but the origin is different. + name: "Passthru/Public/PreflightOther", + app: func(details *Details) App { return details.Apps.PublicCORSPassthru }, + behavior: codersdk.CORSBehaviorPassthru, + client: unauthenticatedClient, + origin: externalOrigin, + httpMethod: http.MethodOptions, + expectedStatusCode: http.StatusOK, + checkRequestHeaders: func(t *testing.T, origin string, req http.Header) { + assert.Equal(t, origin, req.Get("Origin")) + assert.Equal(t, "GET", req.Get("Access-Control-Request-Method")) + assert.Equal(t, "X-Got-Host", req.Get("Access-Control-Request-Headers")) + }, + checkResponseHeaders: func(t *testing.T, origin string, resp http.Header) { + assert.Equal(t, origin, resp.Get("Access-Control-Allow-Origin")) + assert.Equal(t, http.MethodGet, resp.Get("Access-Control-Allow-Methods")) + // Added by the app handler. + assert.Equal(t, "passthru", resp.Get("X-CORS-Handler")) + }, + }, + { + // A request without an Origin header would be rejected by an actual browser since it lacks CORS headers. + name: "Passthru/Public/GET/NoOrigin", + app: func(details *Details) App { return details.Apps.PublicCORSPassthru }, + behavior: codersdk.CORSBehaviorPassthru, + client: unauthenticatedClient, + origin: func(*Details, App) string { return "" }, + httpMethod: http.MethodGet, + expectedStatusCode: http.StatusOK, + checkResponseHeaders: func(t *testing.T, origin string, resp http.Header) { + assert.Empty(t, resp.Get("Access-Control-Allow-Origin")) + assert.Empty(t, resp.Get("Access-Control-Allow-Headers")) + assert.Empty(t, resp.Get("Access-Control-Allow-Credentials")) + // Added by the app handler. + assert.Equal(t, "passthru", resp.Get("X-CORS-Handler")) + }, + }, + // Authenticated + { + // Same behavior as Default/Public/Preflight/Subdomain. + name: "Default/Authenticated/Preflight/Subdomain", + app: func(details *Details) App { return details.Apps.AuthenticatedCORSDefault }, + behavior: codersdk.CORSBehaviorSimple, + client: authenticatedClient, + origin: ownSubdomain, + httpMethod: http.MethodOptions, + expectedStatusCode: http.StatusOK, + checkResponseHeaders: func(t *testing.T, origin string, resp http.Header) { + assert.Equal(t, origin, resp.Get("Access-Control-Allow-Origin")) + assert.Contains(t, resp.Get("Access-Control-Allow-Methods"), http.MethodGet) + assert.Equal(t, "true", resp.Get("Access-Control-Allow-Credentials")) + assert.Equal(t, "X-Got-Host", resp.Get("Access-Control-Allow-Headers")) + }, + }, + { + // Same behavior as Default/Public/Preflight/External. + name: "Default/Authenticated/Preflight/External", + app: func(details *Details) App { return details.Apps.AuthenticatedCORSDefault }, + behavior: codersdk.CORSBehaviorSimple, + client: authenticatedClient, + origin: externalOrigin, + httpMethod: http.MethodOptions, + expectedStatusCode: http.StatusOK, + checkResponseHeaders: func(t *testing.T, origin string, resp http.Header) { + assert.Empty(t, resp.Get("Access-Control-Allow-Origin")) + }, + }, + { + // An authenticated request to the app is allowed from its own subdomain. + name: "Default/Authenticated/GET/Subdomain", + app: func(details *Details) App { return details.Apps.AuthenticatedCORSDefault }, + behavior: codersdk.CORSBehaviorSimple, + client: authenticatedClient, + origin: ownSubdomain, + httpMethod: http.MethodGet, + expectedStatusCode: http.StatusOK, + checkResponseHeaders: func(t *testing.T, origin string, resp http.Header) { + assert.Equal(t, origin, resp.Get("Access-Control-Allow-Origin")) + assert.Equal(t, "true", resp.Get("Access-Control-Allow-Credentials")) + // Added by the app handler. + assert.Equal(t, "simple", resp.Get("X-CORS-Handler")) + }, + }, + { + // An authenticated request to the app is allowed from an external origin. + // The origin doesn't match the app's own subdomain, so the CORS headers are not added. + name: "Default/Authenticated/GET/External", + app: func(details *Details) App { return details.Apps.AuthenticatedCORSDefault }, + behavior: codersdk.CORSBehaviorSimple, + client: authenticatedClient, + origin: externalOrigin, + httpMethod: http.MethodGet, + expectedStatusCode: http.StatusOK, + checkResponseHeaders: func(t *testing.T, origin string, resp http.Header) { + assert.Empty(t, resp.Get("Access-Control-Allow-Origin")) + assert.Empty(t, resp.Get("Access-Control-Allow-Headers")) + assert.Empty(t, resp.Get("Access-Control-Allow-Credentials")) + // Added by the app handler. + assert.Equal(t, "simple", resp.Get("X-CORS-Handler")) + }, + }, + { + // The request is rejected because the client is unauthenticated. + name: "Passthru/Unauthenticated/Preflight/Subdomain", + app: func(details *Details) App { return details.Apps.AuthenticatedCORSPassthru }, + behavior: codersdk.CORSBehaviorPassthru, + client: unauthenticatedClient, + origin: ownSubdomain, + httpMethod: http.MethodOptions, + expectedStatusCode: http.StatusSeeOther, + checkResponseHeaders: func(t *testing.T, origin string, resp http.Header) { + assert.NotEmpty(t, resp.Get("Location")) + }, + }, + { + // Same behavior as the above test, but the origin is different. + name: "Passthru/Unauthenticated/Preflight/External", + app: func(details *Details) App { return details.Apps.AuthenticatedCORSPassthru }, + behavior: codersdk.CORSBehaviorPassthru, + client: unauthenticatedClient, + origin: externalOrigin, + httpMethod: http.MethodOptions, + expectedStatusCode: http.StatusSeeOther, + checkResponseHeaders: func(t *testing.T, origin string, resp http.Header) { + assert.NotEmpty(t, resp.Get("Location")) + }, + }, + { + // The request is rejected because the client is unauthenticated. + name: "Passthru/Unauthenticated/GET/Subdomain", + app: func(details *Details) App { return details.Apps.AuthenticatedCORSPassthru }, + behavior: codersdk.CORSBehaviorPassthru, + client: unauthenticatedClient, + origin: ownSubdomain, + httpMethod: http.MethodGet, + expectedStatusCode: http.StatusSeeOther, + checkResponseHeaders: func(t *testing.T, origin string, resp http.Header) { + assert.NotEmpty(t, resp.Get("Location")) + }, + }, + { + // Same behavior as the above test, but the origin is different. + name: "Passthru/Unauthenticated/GET/External", + app: func(details *Details) App { return details.Apps.AuthenticatedCORSPassthru }, + behavior: codersdk.CORSBehaviorPassthru, + client: unauthenticatedClient, + origin: externalOrigin, + httpMethod: http.MethodGet, + expectedStatusCode: http.StatusSeeOther, + checkResponseHeaders: func(t *testing.T, origin string, resp http.Header) { + assert.NotEmpty(t, resp.Get("Location")) + }, + }, + { + // The request is allowed because the client is authenticated. + name: "Passthru/Authenticated/Preflight/Subdomain", + app: func(details *Details) App { return details.Apps.AuthenticatedCORSPassthru }, + behavior: codersdk.CORSBehaviorPassthru, + client: authenticatedClient, + origin: ownSubdomain, + httpMethod: http.MethodOptions, + expectedStatusCode: http.StatusOK, + checkResponseHeaders: func(t *testing.T, origin string, resp http.Header) { + assert.Equal(t, origin, resp.Get("Access-Control-Allow-Origin")) + assert.Equal(t, http.MethodGet, resp.Get("Access-Control-Allow-Methods")) + // Added by the app handler. + assert.Equal(t, "passthru", resp.Get("X-CORS-Handler")) + }, + }, + { + // Same behavior as the above test, but the origin is different. + name: "Passthru/Authenticated/Preflight/External", + app: func(details *Details) App { return details.Apps.AuthenticatedCORSPassthru }, + behavior: codersdk.CORSBehaviorPassthru, + client: authenticatedClient, + origin: externalOrigin, + httpMethod: http.MethodOptions, + expectedStatusCode: http.StatusOK, + checkResponseHeaders: func(t *testing.T, origin string, resp http.Header) { + assert.Equal(t, origin, resp.Get("Access-Control-Allow-Origin")) + assert.Equal(t, http.MethodGet, resp.Get("Access-Control-Allow-Methods")) + // Added by the app handler. + assert.Equal(t, "passthru", resp.Get("X-CORS-Handler")) + }, + }, + { + // The request is allowed because the client is authenticated. + name: "Passthru/Authenticated/GET/Subdomain", + app: func(details *Details) App { return details.Apps.AuthenticatedCORSPassthru }, + behavior: codersdk.CORSBehaviorPassthru, + client: authenticatedClient, + origin: ownSubdomain, + httpMethod: http.MethodGet, + expectedStatusCode: http.StatusOK, + checkResponseHeaders: func(t *testing.T, origin string, resp http.Header) { + assert.Equal(t, origin, resp.Get("Access-Control-Allow-Origin")) + assert.Equal(t, http.MethodGet, resp.Get("Access-Control-Allow-Methods")) + // Added by the app handler. + assert.Equal(t, "passthru", resp.Get("X-CORS-Handler")) + }, + }, + { + // Same behavior as the above test, but the origin is different. + name: "Passthru/Authenticated/GET/External", + app: func(details *Details) App { return details.Apps.AuthenticatedCORSPassthru }, + behavior: codersdk.CORSBehaviorPassthru, + client: authenticatedClient, + origin: externalOrigin, + httpMethod: http.MethodGet, + expectedStatusCode: http.StatusOK, + checkResponseHeaders: func(t *testing.T, origin string, resp http.Header) { + assert.Equal(t, origin, resp.Get("Access-Control-Allow-Origin")) + assert.Equal(t, http.MethodGet, resp.Get("Access-Control-Allow-Methods")) + // Added by the app handler. + assert.Equal(t, "passthru", resp.Get("X-CORS-Handler")) + }, + }, + } + + for _, tc := range tests { + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + + ctx := testutil.Context(t, testutil.WaitLong) + + var reqHeaders http.Header + // Setup an HTTP handler which is the "app"; this handler conditionally responds + // to requests based on the CORS behavior + appDetails := setupProxyTest(t, &DeploymentOptions{ + handler: http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + _, err := r.Cookie(codersdk.SessionTokenCookie) + assert.ErrorIs(t, err, http.ErrNoCookie) + + // Store the request headers for later assertions + reqHeaders = r.Header + + switch tc.behavior { + case codersdk.CORSBehaviorPassthru: + w.Header().Set("X-CORS-Handler", "passthru") + + // Only allow GET and OPTIONS requests + if r.Method != http.MethodGet && r.Method != http.MethodOptions { + w.WriteHeader(http.StatusMethodNotAllowed) + return + } + + // If the Origin header is present, add the CORS headers. + if origin := r.Header.Get("Origin"); origin != "" { + w.Header().Set("Access-Control-Allow-Credentials", "true") + w.Header().Set("Access-Control-Allow-Origin", origin) + w.Header().Set("Access-Control-Allow-Methods", http.MethodGet) + } + + w.WriteHeader(http.StatusOK) + case codersdk.CORSBehaviorSimple: + w.Header().Set("X-CORS-Handler", "simple") + } + }), + }) + + // Update the template CORS behavior. + b := tc.behavior + template, err := appDetails.SDKClient.UpdateTemplateMeta(ctx, appDetails.Workspace.TemplateID, codersdk.UpdateTemplateMeta{ + CORSBehavior: &b, + }) + require.NoError(t, err) + require.Equal(t, tc.behavior, template.CORSBehavior) + + // Given: a client and a workspace app + client := tc.client(t, appDetails) + path := appDetails.SubdomainAppURL(tc.app(appDetails)).String() + origin := tc.origin(appDetails, tc.app(appDetails)) + + fmt.Println("method: ", tc.httpMethod) + // When: a preflight request is made to an app with a specified CORS behavior + resp, err := requestWithRetries(ctx, t, client, tc.httpMethod, path, nil, func(r *http.Request) { + // Mimic non-browser clients that don't send the Origin header. + if origin != "" { + r.Header.Set("Origin", origin) + } + r.Header.Set("Access-Control-Request-Method", "GET") + r.Header.Set("Access-Control-Request-Headers", "X-Got-Host") + }) + require.NoError(t, err) + defer resp.Body.Close() + + // Then: the request & response must match expectations + assert.Equal(t, tc.expectedStatusCode, resp.StatusCode) + assert.NoError(t, err) + if tc.checkRequestHeaders != nil { + tc.checkRequestHeaders(t, origin, reqHeaders) + } + tc.checkResponseHeaders(t, origin, resp.Header) + }) + } + }) + t.Run("WorkspaceApplicationAuth", func(t *testing.T) { t.Parallel() @@ -1340,6 +1743,153 @@ func Run(t *testing.T, appHostIsPrimary bool, factory DeploymentFactory) { }) }) + t.Run("CORS", func(t *testing.T) { + t.Parallel() + + // Set up test headers that should be returned by the app + testHeaders := http.Header{ + "Access-Control-Allow-Origin": []string{"*"}, + "Access-Control-Allow-Methods": []string{"GET, POST, OPTIONS"}, + } + + unauthenticatedClient := func(t *testing.T, appDetails *Details) *codersdk.Client { + c := appDetails.AppClient(t) + c.SetSessionToken("") + return c + } + + authenticatedClient := func(t *testing.T, appDetails *Details) *codersdk.Client { + uc, _ := coderdtest.CreateAnotherUser(t, appDetails.SDKClient, appDetails.FirstUser.OrganizationID, rbac.RoleMember()) + c := appDetails.AppClient(t) + c.SetSessionToken(uc.SessionToken()) + return c + } + + ownerClient := func(t *testing.T, appDetails *Details) *codersdk.Client { + c := appDetails.AppClient(t) // <-- Use same server as others + c.SetSessionToken(appDetails.SDKClient.SessionToken()) // But with owner auth + return c + } + + tests := []struct { + name string + shareLevel codersdk.WorkspaceAgentPortShareLevel + behavior codersdk.CORSBehavior + client func(t *testing.T, appDetails *Details) *codersdk.Client + expectedStatusCode int + expectedCORSHeaders bool + }{ + // Public + { + name: "Default/Public", + shareLevel: codersdk.WorkspaceAgentPortShareLevelPublic, + behavior: codersdk.CORSBehaviorSimple, + expectedCORSHeaders: false, + client: unauthenticatedClient, + expectedStatusCode: http.StatusOK, + }, + { // fails + name: "Passthru/Public", + shareLevel: codersdk.WorkspaceAgentPortShareLevelPublic, + behavior: codersdk.CORSBehaviorPassthru, + expectedCORSHeaders: true, + client: unauthenticatedClient, + expectedStatusCode: http.StatusOK, + }, + // Authenticated + { + name: "Default/Authenticated", + shareLevel: codersdk.WorkspaceAgentPortShareLevelAuthenticated, + behavior: codersdk.CORSBehaviorSimple, + expectedCORSHeaders: false, + client: authenticatedClient, + expectedStatusCode: http.StatusOK, + }, + { + name: "Passthru/Authenticated", + shareLevel: codersdk.WorkspaceAgentPortShareLevelAuthenticated, + behavior: codersdk.CORSBehaviorPassthru, + expectedCORSHeaders: true, + client: authenticatedClient, + expectedStatusCode: http.StatusOK, + }, + { + // The CORS behavior will not affect unauthenticated requests. + // The request will be redirected to the login page. + name: "Passthru/Unauthenticated", + shareLevel: codersdk.WorkspaceAgentPortShareLevelAuthenticated, + behavior: codersdk.CORSBehaviorPassthru, + expectedCORSHeaders: false, + client: unauthenticatedClient, + expectedStatusCode: http.StatusSeeOther, + }, + // Owner + { + name: "Default/Owner", + shareLevel: codersdk.WorkspaceAgentPortShareLevelAuthenticated, // Owner is not a valid share level for ports. + behavior: codersdk.CORSBehaviorSimple, + expectedCORSHeaders: false, + client: ownerClient, + expectedStatusCode: http.StatusOK, + }, + { // fails + name: "Passthru/Owner", + shareLevel: codersdk.WorkspaceAgentPortShareLevelAuthenticated, // Owner is not a valid share level for ports. + behavior: codersdk.CORSBehaviorPassthru, + expectedCORSHeaders: true, + client: ownerClient, + expectedStatusCode: http.StatusOK, + }, + } + + for _, tc := range tests { + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + + ctx, cancel := context.WithTimeout(context.Background(), testutil.WaitLong) + defer cancel() + + appDetails := setupProxyTest(t, &DeploymentOptions{ + headers: testHeaders, + }) + port, err := strconv.ParseInt(appDetails.Apps.Port.AppSlugOrPort, 10, 32) + require.NoError(t, err) + + // Update the template CORS behavior. + b := tc.behavior + template, err := appDetails.SDKClient.UpdateTemplateMeta(ctx, appDetails.Workspace.TemplateID, codersdk.UpdateTemplateMeta{ + CORSBehavior: &b, + }) + require.NoError(t, err) + require.Equal(t, tc.behavior, template.CORSBehavior) + + // Set the port we have to be shared. + _, err = appDetails.SDKClient.UpsertWorkspaceAgentPortShare(ctx, appDetails.Workspace.ID, codersdk.UpsertWorkspaceAgentPortShareRequest{ + AgentName: proxyTestAgentName, + Port: int32(port), + ShareLevel: tc.shareLevel, + Protocol: codersdk.WorkspaceAgentPortShareProtocolHTTP, + }) + require.NoError(t, err) + + client := tc.client(t, appDetails) + + resp, err := requestWithRetries(ctx, t, client, http.MethodGet, appDetails.SubdomainAppURL(appDetails.Apps.Port).String(), nil) + require.NoError(t, err) + defer resp.Body.Close() + require.Equal(t, tc.expectedStatusCode, resp.StatusCode) + + if tc.expectedCORSHeaders { + require.Equal(t, testHeaders.Get("Access-Control-Allow-Origin"), resp.Header.Get("Access-Control-Allow-Origin"), "allow origin did not match") + require.Equal(t, testHeaders.Get("Access-Control-Allow-Methods"), resp.Header.Get("Access-Control-Allow-Methods"), "allow methods did not match") + } else { + require.Empty(t, resp.Header.Get("Access-Control-Allow-Origin")) + require.Empty(t, resp.Header.Get("Access-Control-Allow-Methods")) + } + }) + } + }) + t.Run("AppSharing", func(t *testing.T) { t.Parallel() @@ -1386,7 +1936,7 @@ func Run(t *testing.T, appHostIsPrimary bool, factory DeploymentFactory) { forceURLTransport(t, client) // Create workspace. - port := appServer(t, nil, false) + port := appServer(t, nil, false, nil) workspace, _ = createWorkspaceWithApps(t, client, user.OrganizationIDs[0], user, port, false) // Verify that the apps have the correct sharing levels set. @@ -1397,10 +1947,14 @@ func Run(t *testing.T, appHostIsPrimary bool, factory DeploymentFactory) { agnt = workspaceBuild.Resources[0].Agents[0] found := map[string]codersdk.WorkspaceAppSharingLevel{} expected := map[string]codersdk.WorkspaceAppSharingLevel{ - proxyTestAppNameFake: codersdk.WorkspaceAppSharingLevelOwner, - proxyTestAppNameOwner: codersdk.WorkspaceAppSharingLevelOwner, - proxyTestAppNameAuthenticated: codersdk.WorkspaceAppSharingLevelAuthenticated, - proxyTestAppNamePublic: codersdk.WorkspaceAppSharingLevelPublic, + proxyTestAppNameFake: codersdk.WorkspaceAppSharingLevelOwner, + proxyTestAppNameOwner: codersdk.WorkspaceAppSharingLevelOwner, + proxyTestAppNameAuthenticated: codersdk.WorkspaceAppSharingLevelAuthenticated, + proxyTestAppNamePublic: codersdk.WorkspaceAppSharingLevelPublic, + proxyTestAppNameAuthenticatedCORSPassthru: codersdk.WorkspaceAppSharingLevelAuthenticated, + proxyTestAppNamePublicCORSPassthru: codersdk.WorkspaceAppSharingLevelPublic, + proxyTestAppNameAuthenticatedCORSDefault: codersdk.WorkspaceAppSharingLevelAuthenticated, + proxyTestAppNamePublicCORSDefault: codersdk.WorkspaceAppSharingLevelPublic, } for _, app := range agnt.Apps { found[app.DisplayName] = app.SharingLevel diff --git a/coderd/workspaceapps/apptest/setup.go b/coderd/workspaceapps/apptest/setup.go index 9d1df9e7fe09d..296934591e873 100644 --- a/coderd/workspaceapps/apptest/setup.go +++ b/coderd/workspaceapps/apptest/setup.go @@ -36,8 +36,13 @@ const ( proxyTestAppNameOwner = "test-app-owner" proxyTestAppNameAuthenticated = "test-app-authenticated" proxyTestAppNamePublic = "test-app-public" - proxyTestAppQuery = "query=true" - proxyTestAppBody = "hello world from apps test" + // nolint:gosec // Not a secret + proxyTestAppNameAuthenticatedCORSPassthru = "test-app-authenticated-cors-passthru" + proxyTestAppNamePublicCORSPassthru = "test-app-public-cors-passthru" + proxyTestAppNameAuthenticatedCORSDefault = "test-app-authenticated-cors-default" + proxyTestAppNamePublicCORSDefault = "test-app-public-cors-default" + proxyTestAppQuery = "query=true" + proxyTestAppBody = "hello world from apps test" proxyTestSubdomainRaw = "*.test.coder.com" proxyTestSubdomain = "test.coder.com" @@ -60,6 +65,7 @@ type DeploymentOptions struct { noWorkspace bool port uint16 headers http.Header + handler http.Handler } // Deployment is a license-agnostic deployment with all the fields that apps @@ -93,6 +99,9 @@ type App struct { // Prefix should have ---. Prefix string Query string + + // Control the behavior of CORS handling. + CORSBehavior codersdk.CORSBehavior } // Details are the full test details returned from setupProxyTestWithFactory. @@ -109,12 +118,16 @@ type Details struct { AppPort uint16 Apps struct { - Fake App - Owner App - Authenticated App - Public App - Port App - PortHTTPS App + Fake App + Owner App + Authenticated App + Public App + Port App + PortHTTPS App + PublicCORSPassthru App + AuthenticatedCORSPassthru App + PublicCORSDefault App + AuthenticatedCORSDefault App } } @@ -201,7 +214,7 @@ func setupProxyTestWithFactory(t *testing.T, factory DeploymentFactory, opts *De } if opts.port == 0 { - opts.port = appServer(t, opts.headers, opts.ServeHTTPS) + opts.port = appServer(t, opts.headers, opts.ServeHTTPS, opts.handler) } workspace, agnt := createWorkspaceWithApps(t, deployment.SDKClient, deployment.FirstUser.OrganizationID, me, opts.port, opts.ServeHTTPS) @@ -252,30 +265,64 @@ func setupProxyTestWithFactory(t *testing.T, factory DeploymentFactory, opts *De AgentName: agnt.Name, AppSlugOrPort: strconv.Itoa(int(opts.port)) + "s", } + details.Apps.PublicCORSPassthru = App{ + Username: me.Username, + WorkspaceName: workspace.Name, + AgentName: agnt.Name, + AppSlugOrPort: proxyTestAppNamePublicCORSPassthru, + CORSBehavior: codersdk.CORSBehaviorPassthru, + Query: proxyTestAppQuery, + } + details.Apps.AuthenticatedCORSPassthru = App{ + Username: me.Username, + WorkspaceName: workspace.Name, + AgentName: agnt.Name, + AppSlugOrPort: proxyTestAppNameAuthenticatedCORSPassthru, + CORSBehavior: codersdk.CORSBehaviorPassthru, + Query: proxyTestAppQuery, + } + details.Apps.PublicCORSDefault = App{ + Username: me.Username, + WorkspaceName: workspace.Name, + AgentName: agnt.Name, + AppSlugOrPort: proxyTestAppNamePublicCORSDefault, + Query: proxyTestAppQuery, + } + details.Apps.AuthenticatedCORSDefault = App{ + Username: me.Username, + WorkspaceName: workspace.Name, + AgentName: agnt.Name, + AppSlugOrPort: proxyTestAppNameAuthenticatedCORSDefault, + Query: proxyTestAppQuery, + } return details } //nolint:revive -func appServer(t *testing.T, headers http.Header, isHTTPS bool) uint16 { - server := httptest.NewUnstartedServer( - http.HandlerFunc( - func(w http.ResponseWriter, r *http.Request) { - _, err := r.Cookie(codersdk.SessionTokenCookie) - assert.ErrorIs(t, err, http.ErrNoCookie) - w.Header().Set("X-Forwarded-For", r.Header.Get("X-Forwarded-For")) - w.Header().Set("X-Got-Host", r.Host) - for name, values := range headers { - for _, value := range values { - w.Header().Add(name, value) - } +func appServer(t *testing.T, headers http.Header, isHTTPS bool, handler http.Handler) uint16 { + defaultHandler := http.HandlerFunc( + func(w http.ResponseWriter, r *http.Request) { + _, err := r.Cookie(codersdk.SessionTokenCookie) + assert.ErrorIs(t, err, http.ErrNoCookie) + w.Header().Set("X-Forwarded-For", r.Header.Get("X-Forwarded-For")) + w.Header().Set("X-Got-Host", r.Host) + for name, values := range headers { + for _, value := range values { + w.Header().Add(name, value) } - w.WriteHeader(http.StatusOK) - _, _ = w.Write([]byte(proxyTestAppBody)) - }, - ), + } + w.WriteHeader(http.StatusOK) + _, _ = w.Write([]byte(proxyTestAppBody)) + }, ) + if handler == nil { + handler = defaultHandler + } + + server := httptest.NewUnstartedServer(handler) + server.Config.ReadHeaderTimeout = time.Minute if isHTTPS { server.StartTLS() @@ -361,6 +408,36 @@ func createWorkspaceWithApps(t *testing.T, client *codersdk.Client, orgID uuid.U Url: appURL, Subdomain: true, }, + { + Slug: proxyTestAppNamePublicCORSPassthru, + DisplayName: proxyTestAppNamePublicCORSPassthru, + SharingLevel: proto.AppSharingLevel_PUBLIC, + Url: appURL, + Subdomain: true, + // CorsBehavior: proto.AppCORSBehavior_PASSTHRU, + }, + { + Slug: proxyTestAppNameAuthenticatedCORSPassthru, + DisplayName: proxyTestAppNameAuthenticatedCORSPassthru, + SharingLevel: proto.AppSharingLevel_AUTHENTICATED, + Url: appURL, + Subdomain: true, + // CorsBehavior: proto.AppCORSBehavior_PASSTHRU, + }, + { + Slug: proxyTestAppNamePublicCORSDefault, + DisplayName: proxyTestAppNamePublicCORSDefault, + SharingLevel: proto.AppSharingLevel_PUBLIC, + Url: appURL, + Subdomain: true, + }, + { + Slug: proxyTestAppNameAuthenticatedCORSDefault, + DisplayName: proxyTestAppNameAuthenticatedCORSDefault, + SharingLevel: proto.AppSharingLevel_AUTHENTICATED, + Url: appURL, + Subdomain: true, + }, } version := coderdtest.CreateTemplateVersion(t, client, orgID, &echo.Responses{ Parse: echo.ParseComplete, diff --git a/coderd/workspaceapps/cors/cors.go b/coderd/workspaceapps/cors/cors.go new file mode 100644 index 0000000000000..5ab07f74e02b3 --- /dev/null +++ b/coderd/workspaceapps/cors/cors.go @@ -0,0 +1,21 @@ +package cors + +import ( + "context" + + "github.com/coder/coder/v2/codersdk" +) + +type contextKeyBehavior struct{} + +// WithBehavior sets the CORS behavior for the given context. +func WithBehavior(ctx context.Context, behavior codersdk.CORSBehavior) context.Context { + return context.WithValue(ctx, contextKeyBehavior{}, behavior) +} + +// HasBehavior returns true if the given context has the specified CORS behavior. +func HasBehavior(ctx context.Context, behavior codersdk.CORSBehavior) bool { + val := ctx.Value(contextKeyBehavior{}) + b, ok := val.(codersdk.CORSBehavior) + return ok && b == behavior +} diff --git a/coderd/workspaceapps/db.go b/coderd/workspaceapps/db.go index 61a9e218edc7f..9e26a28c71370 100644 --- a/coderd/workspaceapps/db.go +++ b/coderd/workspaceapps/db.go @@ -151,6 +151,7 @@ func (p *DBTokenProvider) Issue(ctx context.Context, rw http.ResponseWriter, r * if dbReq.AppURL != nil { token.AppURL = dbReq.AppURL.String() } + token.CORSBehavior = codersdk.CORSBehavior(dbReq.CorsBehavior) // Verify the user has access to the app. authed, warnings, err := p.authorizeRequest(r.Context(), authz, dbReq) diff --git a/coderd/workspaceapps/db_test.go b/coderd/workspaceapps/db_test.go index e78762c035565..22669d568b0e1 100644 --- a/coderd/workspaceapps/db_test.go +++ b/coderd/workspaceapps/db_test.go @@ -255,6 +255,7 @@ func Test_ResolveRequest(t *testing.T) { for _, c := range cases { t.Run(c.name, func(t *testing.T) { t.Parallel() + ctx := testutil.Context(t, testutil.WaitShort) // Try resolving a request for each app as the owner, without a // token, then use the token to resolve each app. @@ -301,11 +302,12 @@ func Test_ResolveRequest(t *testing.T) { RegisteredClaims: jwtutils.RegisteredClaims{ Expiry: jwt.NewNumericDate(token.Expiry.Time()), }, - Request: req, - UserID: me.ID, - WorkspaceID: workspace.ID, - AgentID: agentID, - AppURL: appURL, + Request: req, + UserID: me.ID, + WorkspaceID: workspace.ID, + AgentID: agentID, + AppURL: appURL, + CORSBehavior: codersdk.CORSBehaviorSimple, }, token) require.NotZero(t, token.Expiry) require.WithinDuration(t, time.Now().Add(workspaceapps.DefaultTokenExpiry), token.Expiry.Time(), time.Minute) @@ -588,6 +590,7 @@ func Test_ResolveRequest(t *testing.T) { t.Run("TokenDoesNotMatchRequest", func(t *testing.T) { t.Parallel() + ctx := testutil.Context(t, testutil.WaitShort) badToken := workspaceapps.SignedToken{ Request: (workspaceapps.Request{ diff --git a/coderd/workspaceapps/proxy.go b/coderd/workspaceapps/proxy.go index bc8d32ed2ead9..002bb1ea05aae 100644 --- a/coderd/workspaceapps/proxy.go +++ b/coderd/workspaceapps/proxy.go @@ -28,6 +28,7 @@ import ( "github.com/coder/coder/v2/coderd/tracing" "github.com/coder/coder/v2/coderd/util/slice" "github.com/coder/coder/v2/coderd/workspaceapps/appurl" + "github.com/coder/coder/v2/coderd/workspaceapps/cors" "github.com/coder/coder/v2/codersdk" "github.com/coder/coder/v2/codersdk/workspacesdk" "github.com/coder/coder/v2/site" @@ -73,7 +74,7 @@ type AgentProvider interface { ReverseProxy(targetURL, dashboardURL *url.URL, agentID uuid.UUID, app appurl.ApplicationURL, wildcardHost string) *httputil.ReverseProxy // AgentConn returns a new connection to the specified agent. - AgentConn(ctx context.Context, agentID uuid.UUID) (_ *workspacesdk.AgentConn, release func(), _ error) + AgentConn(ctx context.Context, agentID uuid.UUID) (_ workspacesdk.AgentConn, release func(), _ error) ServeHTTPDebug(w http.ResponseWriter, r *http.Request) @@ -323,6 +324,37 @@ func (s *Server) workspaceAppsProxyPath(rw http.ResponseWriter, r *http.Request) s.proxyWorkspaceApp(rw, r, *token, chiPath, appurl.ApplicationURL{}) } +// determineCORSBehavior examines the given token and conditionally applies +// CORS middleware if the token specifies that behavior. +func (s *Server) determineCORSBehavior(token *SignedToken, app appurl.ApplicationURL) func(http.Handler) http.Handler { + return func(next http.Handler) http.Handler { + // Create the CORS middleware handler upfront. + corsHandler := httpmw.WorkspaceAppCors(s.HostnameRegex, app)(next) + + return http.HandlerFunc(func(rw http.ResponseWriter, r *http.Request) { + var behavior codersdk.CORSBehavior + if token != nil { + behavior = token.CORSBehavior + } + + // Add behavior to context regardless of which handler we use, + // since we will use this later on to determine if we should strip + // CORS headers in the response. + r = r.WithContext(cors.WithBehavior(r.Context(), behavior)) + + switch behavior { + case codersdk.CORSBehaviorPassthru: + // Bypass the CORS middleware. + next.ServeHTTP(rw, r) + return + default: + // Apply the CORS middleware. + corsHandler.ServeHTTP(rw, r) + } + }) + } +} + // HandleSubdomain handles subdomain-based application proxy requests (aka. // DevURLs in Coder V1). // @@ -394,36 +426,36 @@ func (s *Server) HandleSubdomain(middlewares ...func(http.Handler) http.Handler) return } - // Use the passed in app middlewares before checking authentication and - // passing to the proxy app. - mws := chi.Middlewares(append(middlewares, httpmw.WorkspaceAppCors(s.HostnameRegex, app))) - mws.Handler(http.HandlerFunc(func(rw http.ResponseWriter, r *http.Request) { - if !s.handleAPIKeySmuggling(rw, r, AccessMethodSubdomain) { - return - } + if !s.handleAPIKeySmuggling(rw, r, AccessMethodSubdomain) { + return + } - token, ok := ResolveRequest(rw, r, ResolveRequestOptions{ - Logger: s.Logger, - CookieCfg: s.Cookies, - SignedTokenProvider: s.SignedTokenProvider, - DashboardURL: s.DashboardURL, - PathAppBaseURL: s.AccessURL, - AppHostname: s.Hostname, - AppRequest: Request{ - AccessMethod: AccessMethodSubdomain, - BasePath: "/", - Prefix: app.Prefix, - UsernameOrID: app.Username, - WorkspaceNameOrID: app.WorkspaceName, - AgentNameOrID: app.AgentName, - AppSlugOrPort: app.AppSlugOrPort, - }, - AppPath: r.URL.Path, - AppQuery: r.URL.RawQuery, - }) - if !ok { - return - } + // Generate a signed token for the request. + token, ok := ResolveRequest(rw, r, ResolveRequestOptions{ + Logger: s.Logger, + SignedTokenProvider: s.SignedTokenProvider, + DashboardURL: s.DashboardURL, + PathAppBaseURL: s.AccessURL, + AppHostname: s.Hostname, + AppRequest: Request{ + AccessMethod: AccessMethodSubdomain, + BasePath: "/", + Prefix: app.Prefix, + UsernameOrID: app.Username, + WorkspaceNameOrID: app.WorkspaceName, + AgentNameOrID: app.AgentName, + AppSlugOrPort: app.AppSlugOrPort, + }, + AppPath: r.URL.Path, + AppQuery: r.URL.RawQuery, + }) + if !ok { + return + } + + // Proxy the request (possibly with the CORS middleware). + mws := chi.Middlewares(append(middlewares, s.determineCORSBehavior(token, app))) + mws.Handler(http.HandlerFunc(func(rw http.ResponseWriter, r *http.Request) { s.proxyWorkspaceApp(rw, r, *token, r.URL.Path, app) })).ServeHTTP(rw, r.WithContext(ctx)) }) @@ -560,6 +592,10 @@ func (s *Server) proxyWorkspaceApp(rw http.ResponseWriter, r *http.Request, appT proxy := s.AgentProvider.ReverseProxy(appURL, s.DashboardURL, appToken.AgentID, app, s.Hostname) proxy.ModifyResponse = func(r *http.Response) error { + // If passthru behavior is set, disable our CORS header stripping. + if cors.HasBehavior(r.Request.Context(), codersdk.CORSBehaviorPassthru) { + return nil + } r.Header.Del(httpmw.AccessControlAllowOriginHeader) r.Header.Del(httpmw.AccessControlAllowCredentialsHeader) r.Header.Del(httpmw.AccessControlAllowMethodsHeader) diff --git a/coderd/workspaceapps/request.go b/coderd/workspaceapps/request.go index 0e6a43cb4cbe4..aa90ead2cdd29 100644 --- a/coderd/workspaceapps/request.go +++ b/coderd/workspaceapps/request.go @@ -204,6 +204,9 @@ type databaseRequest struct { // AppSharingLevel is the sharing level of the app. This is forced to be set // to AppSharingLevelOwner if the access method is terminal. AppSharingLevel database.AppSharingLevel + // CorsBehavior is set at the template level for all apps/ports in a workspace, and can + // either be the current CORS middleware 'simple' or bypass the cors middleware with 'passthru'. + CorsBehavior database.CorsBehavior } // getDatabase does queries to get the owner user, workspace and agent @@ -296,7 +299,14 @@ func (r Request) getDatabase(ctx context.Context, db database.Store) (*databaseR // First check if it's a port-based URL with an optional "s" suffix for HTTPS. potentialPortStr = strings.TrimSuffix(r.AppSlugOrPort, "s") portUint, portUintErr = strconv.ParseUint(potentialPortStr, 10, 16) + corsBehavior database.CorsBehavior ) + + tmpl, err := db.GetTemplateByID(ctx, workspace.TemplateID) + if err != nil { + return nil, xerrors.Errorf("get template %q: %w", workspace.TemplateID, err) + } + corsBehavior = tmpl.CorsBehavior //nolint:nestif if portUintErr == nil { protocol := "http" @@ -417,6 +427,7 @@ func (r Request) getDatabase(ctx context.Context, db database.Store) (*databaseR App: app, AppURL: appURLParsed, AppSharingLevel: appSharingLevel, + CorsBehavior: corsBehavior, }, nil } diff --git a/coderd/workspaceapps/token.go b/coderd/workspaceapps/token.go index dcd8c5a0e5c34..a3dbc02b61ddd 100644 --- a/coderd/workspaceapps/token.go +++ b/coderd/workspaceapps/token.go @@ -22,10 +22,11 @@ type SignedToken struct { // Request details. Request `json:"request"` - UserID uuid.UUID `json:"user_id"` - WorkspaceID uuid.UUID `json:"workspace_id"` - AgentID uuid.UUID `json:"agent_id"` - AppURL string `json:"app_url"` + UserID uuid.UUID `json:"user_id"` + WorkspaceID uuid.UUID `json:"workspace_id"` + AgentID uuid.UUID `json:"agent_id"` + AppURL string `json:"app_url"` + CORSBehavior codersdk.CORSBehavior `json:"cors_behavior"` } // MatchesRequest returns true if the token matches the request. Any token that diff --git a/coderd/workspacebuilds.go b/coderd/workspacebuilds.go index 583b9c4edaf21..e54f75ef5cba6 100644 --- a/coderd/workspacebuilds.go +++ b/coderd/workspacebuilds.go @@ -1157,6 +1157,11 @@ func (api *API) convertWorkspaceBuild( aiTasksSidebarAppID = &build.AITaskSidebarAppID.UUID } + var hasExternalAgent *bool + if build.HasExternalAgent.Valid { + hasExternalAgent = &build.HasExternalAgent.Bool + } + apiJob := convertProvisionerJob(job) transition := codersdk.WorkspaceTransition(build.Transition) return codersdk.WorkspaceBuild{ @@ -1185,6 +1190,7 @@ func (api *API) convertWorkspaceBuild( TemplateVersionPresetID: presetID, HasAITask: hasAITask, AITaskSidebarAppID: aiTasksSidebarAppID, + HasExternalAgent: hasExternalAgent, }, nil } diff --git a/coderd/workspacebuilds_test.go b/coderd/workspacebuilds_test.go index 29c9cac0ffa13..e888115093a9b 100644 --- a/coderd/workspacebuilds_test.go +++ b/coderd/workspacebuilds_test.go @@ -55,7 +55,6 @@ func TestWorkspaceBuild(t *testing.T) { Auditor: auditor, }) user := coderdtest.CreateFirstUser(t, client) - //nolint:gocritic // testing up, err := db.UpdateUserProfile(dbauthz.AsSystemRestricted(ctx), database.UpdateUserProfileParams{ ID: user.UserID, Email: coderdtest.FirstUserParams.Email, @@ -518,7 +517,6 @@ func TestWorkspaceBuildsProvisionerState(t *testing.T) { OrganizationID: first.OrganizationID, }).Do() - // nolint:gocritic // For testing daemons, err := store.GetProvisionerDaemons(dbauthz.AsSystemReadProvisionerDaemons(ctx)) require.NoError(t, err) require.Empty(t, daemons, "Provisioner daemons should be empty for this test") @@ -1638,6 +1636,8 @@ func TestPostWorkspaceBuild(t *testing.T) { t.Run("SetsPresetID", func(t *testing.T) { t.Parallel() + ctx := testutil.Context(t, testutil.WaitLong) + client := coderdtest.New(t, &coderdtest.Options{IncludeProvisionerDaemon: true}) user := coderdtest.CreateFirstUser(t, client) version := coderdtest.CreateTemplateVersion(t, client, user.OrganizationID, &echo.Responses{ @@ -1645,9 +1645,20 @@ func TestPostWorkspaceBuild(t *testing.T) { ProvisionPlan: []*proto.Response{{ Type: &proto.Response_Plan{ Plan: &proto.PlanComplete{ - Presets: []*proto.Preset{{ - Name: "test", - }}, + Presets: []*proto.Preset{ + { + Name: "autodetected", + }, + { + Name: "manual", + Parameters: []*proto.PresetParameter{ + { + Name: "param1", + Value: "value1", + }, + }, + }, + }, }, }, }}, @@ -1655,28 +1666,29 @@ func TestPostWorkspaceBuild(t *testing.T) { }) template := coderdtest.CreateTemplate(t, client, user.OrganizationID, version.ID) coderdtest.AwaitTemplateVersionJobCompleted(t, client, version.ID) - workspace := coderdtest.CreateWorkspace(t, client, template.ID) - coderdtest.AwaitWorkspaceBuildJobCompleted(t, client, workspace.LatestBuild.ID) - require.Nil(t, workspace.LatestBuild.TemplateVersionPresetID) - - ctx, cancel := context.WithTimeout(context.Background(), testutil.WaitLong) - defer cancel() presets, err := client.TemplateVersionPresets(ctx, version.ID) require.NoError(t, err) - require.Equal(t, 1, len(presets)) - require.Equal(t, "test", presets[0].Name) + require.Equal(t, 2, len(presets)) + require.Equal(t, "autodetected", presets[0].Name) + require.Equal(t, "manual", presets[1].Name) + + workspace := coderdtest.CreateWorkspace(t, client, template.ID) + coderdtest.AwaitWorkspaceBuildJobCompleted(t, client, workspace.LatestBuild.ID) + // Preset ID was detected based on the workspace parameters: + require.Equal(t, presets[0].ID, *workspace.LatestBuild.TemplateVersionPresetID) build, err := client.CreateWorkspaceBuild(ctx, workspace.ID, codersdk.CreateWorkspaceBuildRequest{ TemplateVersionID: version.ID, Transition: codersdk.WorkspaceTransitionStart, - TemplateVersionPresetID: presets[0].ID, + TemplateVersionPresetID: presets[1].ID, }) require.NoError(t, err) require.NotNil(t, build.TemplateVersionPresetID) workspace, err = client.Workspace(ctx, workspace.ID) require.NoError(t, err) + require.Equal(t, presets[1].ID, *workspace.LatestBuild.TemplateVersionPresetID) require.Equal(t, build.TemplateVersionPresetID, workspace.LatestBuild.TemplateVersionPresetID) }) diff --git a/coderd/workspaces.go b/coderd/workspaces.go index 0f3f0a24c75d3..e998aeb894c13 100644 --- a/coderd/workspaces.go +++ b/coderd/workspaces.go @@ -32,6 +32,7 @@ import ( "github.com/coder/coder/v2/coderd/notifications" "github.com/coder/coder/v2/coderd/prebuilds" "github.com/coder/coder/v2/coderd/rbac" + "github.com/coder/coder/v2/coderd/rbac/acl" "github.com/coder/coder/v2/coderd/rbac/policy" "github.com/coder/coder/v2/coderd/schedule" "github.com/coder/coder/v2/coderd/schedule/cron" @@ -45,8 +46,8 @@ import ( ) var ( - ttlMin = time.Minute //nolint:revive // min here means 'minimum' not 'minutes' - ttlMax = 30 * 24 * time.Hour + ttlMinimum = time.Minute + ttlMaximum = 30 * 24 * time.Hour errTTLMin = xerrors.New("time until shutdown must be at least one minute") errTTLMax = xerrors.New("time until shutdown must be less than 30 days") @@ -137,7 +138,7 @@ func (api *API) workspace(rw http.ResponseWriter, r *http.Request) { // @Security CoderSessionToken // @Produce json // @Tags Workspaces -// @Param q query string false "Search query in the format `key:value`. Available keys are: owner, template, name, status, has-agent, dormant, last_used_after, last_used_before, has-ai-task." +// @Param q query string false "Search query in the format `key:value`. Available keys are: owner, template, name, status, has-agent, dormant, last_used_after, last_used_before, has-ai-task, has_external_agent." // @Param limit query int false "Page limit" // @Param offset query int false "Page offset" // @Success 200 {object} codersdk.WorkspacesResponse @@ -634,10 +635,38 @@ func createWorkspace( claimedWorkspace *database.Workspace ) - // If a template preset was chosen, try claim a prebuilt workspace. - if req.TemplateVersionPresetID != uuid.Nil { + // Use injected Clock to allow time mocking in tests + now := dbtime.Time(api.Clock.Now()) + + templateVersionPresetID := req.TemplateVersionPresetID + + // If no preset was chosen, look for a matching preset by parameter values. + if templateVersionPresetID == uuid.Nil { + parameterNames := make([]string, len(req.RichParameterValues)) + parameterValues := make([]string, len(req.RichParameterValues)) + for i, parameter := range req.RichParameterValues { + parameterNames[i] = parameter.Name + parameterValues[i] = parameter.Value + } + var err error + templateVersionID := req.TemplateVersionID + if templateVersionID == uuid.Nil { + templateVersionID = template.ActiveVersionID + } + templateVersionPresetID, err = prebuilds.FindMatchingPresetID(ctx, db, templateVersionID, parameterNames, parameterValues) + if err != nil { + return xerrors.Errorf("find matching preset: %w", err) + } + } + + // Try to claim a prebuilt workspace. + if templateVersionPresetID != uuid.Nil { // Try and claim an eligible prebuild, if available. - claimedWorkspace, err = claimPrebuild(ctx, prebuildsClaimer, db, api.Logger, req, owner) + // On successful claim, initialize all lifecycle fields from template and workspace-level config + // so the newly claimed workspace is properly managed by the lifecycle executor. + claimedWorkspace, err = claimPrebuild( + ctx, prebuildsClaimer, db, api.Logger, now, req.Name, owner, + templateVersionPresetID, dbAutostartSchedule, nextStartAt, dbTTL) // If claiming fails with an expected error (no claimable prebuilds or AGPL does not support prebuilds), // we fall back to creating a new workspace. Otherwise, propagate the unexpected error. if err != nil { @@ -646,7 +675,7 @@ func createWorkspace( fields := []any{ slog.Error(err), slog.F("workspace_name", req.Name), - slog.F("template_version_preset_id", req.TemplateVersionPresetID), + slog.F("template_version_preset_id", templateVersionPresetID), } if !isExpectedError { @@ -665,7 +694,6 @@ func createWorkspace( // No prebuild found; regular flow. if claimedWorkspace == nil { - now := dbtime.Now() // Workspaces are created without any versions. minimumWorkspace, err := db.InsertWorkspace(ctx, database.InsertWorkspaceParams{ ID: uuid.New(), @@ -680,7 +708,7 @@ func createWorkspace( Ttl: dbTTL, // The workspaces page will sort by last used at, and it's useful to // have the newly created workspace at the top of the list! - LastUsedAt: dbtime.Now(), + LastUsedAt: now, AutomaticUpdates: dbAU, }) if err != nil { @@ -711,8 +739,8 @@ func createWorkspace( if req.TemplateVersionID != uuid.Nil { builder = builder.VersionID(req.TemplateVersionID) } - if req.TemplateVersionPresetID != uuid.Nil { - builder = builder.TemplateVersionPresetID(req.TemplateVersionPresetID) + if templateVersionPresetID != uuid.Nil { + builder = builder.TemplateVersionPresetID(templateVersionPresetID) } if claimedWorkspace != nil { builder = builder.MarkPrebuiltWorkspaceClaim() @@ -871,8 +899,20 @@ func requestTemplate(ctx context.Context, rw http.ResponseWriter, req codersdk.C return template, true } -func claimPrebuild(ctx context.Context, claimer prebuilds.Claimer, db database.Store, logger slog.Logger, req codersdk.CreateWorkspaceRequest, owner workspaceOwner) (*database.Workspace, error) { - claimedID, err := claimer.Claim(ctx, owner.ID, req.Name, req.TemplateVersionPresetID) +func claimPrebuild( + ctx context.Context, + claimer prebuilds.Claimer, + db database.Store, + logger slog.Logger, + now time.Time, + name string, + owner workspaceOwner, + templateVersionPresetID uuid.UUID, + autostartSchedule sql.NullString, + nextStartAt sql.NullTime, + ttl sql.NullInt64, +) (*database.Workspace, error) { + claimedID, err := claimer.Claim(ctx, now, owner.ID, name, templateVersionPresetID, autostartSchedule, nextStartAt, ttl) if err != nil { // TODO: enhance this by clarifying whether this *specific* prebuild failed or whether there are none to claim. return nil, xerrors.Errorf("claim prebuild: %w", err) @@ -1071,6 +1111,17 @@ func (api *API) putWorkspaceAutostart(rw http.ResponseWriter, r *http.Request) { return } + // Autostart configuration is not supported for prebuilt workspaces. + // Prebuild lifecycle is managed by the reconciliation loop, with scheduling behavior + // defined per preset at the template level, not per workspace. + if workspace.IsPrebuild() { + httpapi.Write(ctx, rw, http.StatusConflict, codersdk.Response{ + Message: "Autostart is not supported for prebuilt workspaces", + Detail: "Prebuilt workspace scheduling is configured per preset at the template level. Workspace-level overrides are not supported.", + }) + return + } + dbSched, err := validWorkspaceSchedule(req.Schedule) if err != nil { httpapi.Write(ctx, rw, http.StatusBadRequest, codersdk.Response{ @@ -1097,12 +1148,20 @@ func (api *API) putWorkspaceAutostart(rw http.ResponseWriter, r *http.Request) { return } + // Use injected Clock to allow time mocking in tests + now := api.Clock.Now() + nextStartAt := sql.NullTime{} if dbSched.Valid { - next, err := schedule.NextAllowedAutostart(dbtime.Now(), dbSched.String, templateSchedule) - if err == nil { - nextStartAt = sql.NullTime{Valid: true, Time: dbtime.Time(next.UTC())} + next, err := schedule.NextAllowedAutostart(now, dbSched.String, templateSchedule) + if err != nil { + httpapi.Write(ctx, rw, http.StatusInternalServerError, codersdk.Response{ + Message: "Internal error calculating workspace autostart schedule.", + Detail: err.Error(), + }) + return } + nextStartAt = sql.NullTime{Valid: true, Time: dbtime.Time(next.UTC())} } err = api.Database.UpdateWorkspaceAutostart(ctx, database.UpdateWorkspaceAutostartParams{ @@ -1155,6 +1214,17 @@ func (api *API) putWorkspaceTTL(rw http.ResponseWriter, r *http.Request) { return } + // TTL updates are not supported for prebuilt workspaces. + // Prebuild lifecycle is managed by the reconciliation loop, with TTL behavior + // defined per preset at the template level, not per workspace. + if workspace.IsPrebuild() { + httpapi.Write(ctx, rw, http.StatusConflict, codersdk.Response{ + Message: "TTL updates are not supported for prebuilt workspaces", + Detail: "Prebuilt workspace TTL is configured per preset at the template level. Workspace-level overrides are not supported.", + }) + return + } + var dbTTL sql.NullInt64 err := api.Database.InTx(func(s database.Store) error { @@ -1180,6 +1250,9 @@ func (api *API) putWorkspaceTTL(rw http.ResponseWriter, r *http.Request) { return xerrors.Errorf("update workspace time until shutdown: %w", err) } + // Use injected Clock to allow time mocking in tests + now := api.Clock.Now() + // If autostop has been disabled, we want to remove the deadline from the // existing workspace build (if there is one). if !dbTTL.Valid { @@ -1190,10 +1263,24 @@ func (api *API) putWorkspaceTTL(rw http.ResponseWriter, r *http.Request) { if build.Transition == database.WorkspaceTransitionStart { if err = s.UpdateWorkspaceBuildDeadlineByID(ctx, database.UpdateWorkspaceBuildDeadlineByIDParams{ - ID: build.ID, - Deadline: time.Time{}, + ID: build.ID, + // Use the max_deadline as the new build deadline. It will + // either be zero (our target), or a non-zero value that we + // need to abide by anyway due to template policy. + // + // Previously, we would always set the deadline to zero, + // which was incorrect behavior. When max_deadline is + // non-zero, deadline must be set to a non-zero value that + // is less than max_deadline. + // + // Disabling TTL autostop (at a workspace or template level) + // does not trump the template's autostop requirement. + // + // Refer to the comments on schedule.CalculateAutostop for + // more information. + Deadline: build.MaxDeadline, MaxDeadline: build.MaxDeadline, - UpdatedAt: dbtime.Time(api.Clock.Now()), + UpdatedAt: dbtime.Time(now), }); err != nil { return xerrors.Errorf("update workspace build deadline: %w", err) } @@ -1257,17 +1344,30 @@ func (api *API) putWorkspaceDormant(rw http.ResponseWriter, r *http.Request) { return } + // Dormancy configuration is not supported for prebuilt workspaces. + // Prebuilds are managed by the reconciliation loop and are not subject to dormancy. + if oldWorkspace.IsPrebuild() { + httpapi.Write(ctx, rw, http.StatusConflict, codersdk.Response{ + Message: "Dormancy updates are not supported for prebuilt workspaces", + Detail: "Prebuilt workspaces are not subject to dormancy. Dormancy behavior is only applicable to regular workspaces", + }) + return + } + // If the workspace is already in the desired state do nothing! if oldWorkspace.DormantAt.Valid == req.Dormant { rw.WriteHeader(http.StatusNotModified) return } + // Use injected Clock to allow time mocking in tests + now := api.Clock.Now() + dormantAt := sql.NullTime{ Valid: req.Dormant, } if req.Dormant { - dormantAt.Time = dbtime.Now() + dormantAt.Time = dbtime.Time(now) } newWorkspace, err := api.Database.UpdateWorkspaceDormantDeletingAt(ctx, database.UpdateWorkspaceDormantDeletingAtParams{ @@ -1307,7 +1407,7 @@ func (api *API) putWorkspaceDormant(rw http.ResponseWriter, r *http.Request) { } if initiatorErr == nil && tmplErr == nil { - dormantTime := dbtime.Now().Add(time.Duration(tmpl.TimeTilDormant)) + dormantTime := dbtime.Time(now).Add(time.Duration(tmpl.TimeTilDormant)) _, err = api.NotificationsEnqueuer.Enqueue( // nolint:gocritic // Need notifier actor to enqueue notifications dbauthz.AsNotifier(ctx), @@ -1401,6 +1501,17 @@ func (api *API) putExtendWorkspace(rw http.ResponseWriter, r *http.Request) { return } + // Deadline extensions are not supported for prebuilt workspaces. + // Prebuilds are managed by the reconciliation loop and must always have + // Deadline and MaxDeadline unset. + if workspace.IsPrebuild() { + httpapi.Write(ctx, rw, http.StatusConflict, codersdk.Response{ + Message: "Deadline extension is not supported for prebuilt workspaces", + Detail: "Prebuilt workspaces do not support user deadline modifications. Deadline extension is only applicable to regular workspaces", + }) + return + } + code := http.StatusOK resp := codersdk.Response{} @@ -1437,8 +1548,11 @@ func (api *API) putExtendWorkspace(rw http.ResponseWriter, r *http.Request) { return xerrors.Errorf("workspace shutdown is manual") } + // Use injected Clock to allow time mocking in tests + now := api.Clock.Now() + newDeadline := req.Deadline.UTC() - if err := validWorkspaceDeadline(job.CompletedAt.Time, newDeadline); err != nil { + if err := validWorkspaceDeadline(now, job.CompletedAt.Time, newDeadline); err != nil { // NOTE(Cian): Putting the error in the Message field on request from the FE folks. // Normally, we would put the validation error in Validations, but this endpoint is // not tied to a form or specific named user input on the FE. @@ -1454,7 +1568,7 @@ func (api *API) putExtendWorkspace(rw http.ResponseWriter, r *http.Request) { if err := s.UpdateWorkspaceBuildDeadlineByID(ctx, database.UpdateWorkspaceBuildDeadlineByIDParams{ ID: build.ID, - UpdatedAt: dbtime.Now(), + UpdatedAt: dbtime.Time(now), Deadline: newDeadline, MaxDeadline: build.MaxDeadline, }); err != nil { @@ -2041,6 +2155,97 @@ func (api *API) workspaceTimings(rw http.ResponseWriter, r *http.Request) { httpapi.Write(ctx, rw, http.StatusOK, timings) } +// @Summary Update workspace ACL +// @ID update-workspace-acl +// @Security CoderSessionToken +// @Accept json +// @Produce json +// @Tags Workspaces +// @Param workspace path string true "Workspace ID" format(uuid) +// @Param request body codersdk.UpdateWorkspaceACL true "Update workspace ACL request" +// @Success 204 +// @Router /workspaces/{workspace}/acl [patch] +func (api *API) patchWorkspaceACL(rw http.ResponseWriter, r *http.Request) { + var ( + ctx = r.Context() + workspace = httpmw.WorkspaceParam(r) + auditor = api.Auditor.Load() + aReq, commitAudit = audit.InitRequest[database.WorkspaceTable](rw, &audit.RequestParams{ + Audit: *auditor, + Log: api.Logger, + Request: r, + Action: database.AuditActionWrite, + OrganizationID: workspace.OrganizationID, + }) + ) + defer commitAudit() + aReq.Old = workspace.WorkspaceTable() + + var req codersdk.UpdateWorkspaceACL + if !httpapi.Read(ctx, rw, r, &req) { + return + } + + validErrs := acl.Validate(ctx, api.Database, WorkspaceACLUpdateValidator(req)) + if len(validErrs) > 0 { + httpapi.Write(ctx, rw, http.StatusBadRequest, codersdk.Response{ + Message: "Invalid request to update workspace ACL", + Validations: validErrs, + }) + return + } + + err := api.Database.InTx(func(tx database.Store) error { + var err error + workspace, err = tx.GetWorkspaceByID(ctx, workspace.ID) + if err != nil { + return xerrors.Errorf("get template by ID: %w", err) + } + + for id, role := range req.UserRoles { + if role == codersdk.WorkspaceRoleDeleted { + delete(workspace.UserACL, id) + continue + } + workspace.UserACL[id] = database.WorkspaceACLEntry{ + Permissions: db2sdk.WorkspaceRoleActions(role), + } + } + + for id, role := range req.GroupRoles { + if role == codersdk.WorkspaceRoleDeleted { + delete(workspace.GroupACL, id) + continue + } + workspace.GroupACL[id] = database.WorkspaceACLEntry{ + Permissions: db2sdk.WorkspaceRoleActions(role), + } + } + + err = tx.UpdateWorkspaceACLByID(ctx, database.UpdateWorkspaceACLByIDParams{ + ID: workspace.ID, + UserACL: workspace.UserACL, + GroupACL: workspace.GroupACL, + }) + if err != nil { + return xerrors.Errorf("update workspace ACL by ID: %w", err) + } + workspace, err = tx.GetWorkspaceByID(ctx, workspace.ID) + if err != nil { + return xerrors.Errorf("get updated workspace by ID: %w", err) + } + return nil + }, nil) + if err != nil { + httpapi.InternalServerError(rw, err) + return + } + + aReq.New = workspace.WorkspaceTable() + + rw.WriteHeader(http.StatusNoContent) +} + type workspaceData struct { templates []database.Template builds []codersdk.WorkspaceBuild @@ -2293,11 +2498,11 @@ func validWorkspaceTTLMillis(millis *int64, templateDefault time.Duration) (sql. dur := time.Duration(*millis) * time.Millisecond truncated := dur.Truncate(time.Minute) - if truncated < ttlMin { + if truncated < ttlMinimum { return sql.NullInt64{}, errTTLMin } - if truncated > ttlMax { + if truncated > ttlMaximum { return sql.NullInt64{}, errTTLMax } @@ -2318,8 +2523,8 @@ func validWorkspaceAutomaticUpdates(updates codersdk.AutomaticUpdates) (database return dbAU, nil } -func validWorkspaceDeadline(startedAt, newDeadline time.Time) error { - soon := time.Now().Add(29 * time.Minute) +func validWorkspaceDeadline(now, startedAt, newDeadline time.Time) error { + soon := now.Add(29 * time.Minute) if newDeadline.Before(soon) { return errDeadlineTooSoon } @@ -2379,3 +2584,42 @@ func (api *API) publishWorkspaceAgentLogsUpdate(ctx context.Context, workspaceAg api.Logger.Warn(ctx, "failed to publish workspace agent logs update", slog.F("workspace_agent_id", workspaceAgentID), slog.Error(err)) } } + +type WorkspaceACLUpdateValidator codersdk.UpdateWorkspaceACL + +var ( + workspaceACLUpdateUsersFieldName = "user_roles" + workspaceACLUpdateGroupsFieldName = "group_roles" +) + +// WorkspaceACLUpdateValidator implements acl.UpdateValidator[codersdk.WorkspaceRole] +var _ acl.UpdateValidator[codersdk.WorkspaceRole] = WorkspaceACLUpdateValidator{} + +func (w WorkspaceACLUpdateValidator) Users() (map[string]codersdk.WorkspaceRole, string) { + return w.UserRoles, workspaceACLUpdateUsersFieldName +} + +func (w WorkspaceACLUpdateValidator) Groups() (map[string]codersdk.WorkspaceRole, string) { + return w.GroupRoles, workspaceACLUpdateGroupsFieldName +} + +func (WorkspaceACLUpdateValidator) ValidateRole(role codersdk.WorkspaceRole) error { + actions := db2sdk.WorkspaceRoleActions(role) + if len(actions) == 0 && role != codersdk.WorkspaceRoleDeleted { + return xerrors.Errorf("role %q is not a valid workspace role", role) + } + + return nil +} + +// TODO: This will go here +// func convertToWorkspaceRole(actions []policy.Action) codersdk.TemplateRole { +// switch { +// case len(actions) == 2 && slice.SameElements(actions, []policy.Action{policy.ActionUse, policy.ActionRead}): +// return codersdk.TemplateRoleUse +// case len(actions) == 1 && actions[0] == policy.WildcardSymbol: +// return codersdk.TemplateRoleAdmin +// } + +// return "" +// } diff --git a/coderd/workspaces_test.go b/coderd/workspaces_test.go index 141c62ff3a4b3..4df83114c68a1 100644 --- a/coderd/workspaces_test.go +++ b/coderd/workspaces_test.go @@ -431,9 +431,9 @@ func TestWorkspace(t *testing.T) { // Test Utility variables templateVersionParameters := []*proto.RichParameter{ - {Name: "param1", Type: "string", Required: false}, - {Name: "param2", Type: "string", Required: false}, - {Name: "param3", Type: "string", Required: false}, + {Name: "param1", Type: "string", Required: false, DefaultValue: "default1"}, + {Name: "param2", Type: "string", Required: false, DefaultValue: "default2"}, + {Name: "param3", Type: "string", Required: false, DefaultValue: "default3"}, } presetParameters := []*proto.PresetParameter{ {Name: "param1", Value: "value1"}, @@ -1427,7 +1427,6 @@ func TestWorkspaceFilterAllStatus(t *testing.T) { t.Parallel() // For this test, we do not care about permissions. - // nolint:gocritic // unit testing ctx := dbauthz.AsSystemRestricted(context.Background()) db, pubsub := dbtestutil.NewDB(t) client := coderdtest.New(t, &coderdtest.Options{ @@ -1800,6 +1799,7 @@ func TestWorkspaceFilter(t *testing.T) { for _, c := range testCases { t.Run(c.Name, func(t *testing.T) { t.Parallel() + ctx := testutil.Context(t, testutil.WaitShort) workspaces, err := client.Workspaces(ctx, c.Filter) require.NoError(t, err, "fetch workspaces") @@ -2214,15 +2214,12 @@ func TestWorkspaceFilterManual(t *testing.T) { after := coderdtest.CreateWorkspace(t, client, template.ID) _ = coderdtest.AwaitWorkspaceBuildJobCompleted(t, client, after.LatestBuild.ID) - //nolint:gocritic // Unit testing context err := api.Database.UpdateWorkspaceLastUsedAt(dbauthz.AsSystemRestricted(ctx), database.UpdateWorkspaceLastUsedAtParams{ ID: before.ID, LastUsedAt: now.UTC().Add(time.Hour * -1), }) require.NoError(t, err) - // Unit testing context - //nolint:gocritic // Unit testing context err = api.Database.UpdateWorkspaceLastUsedAt(dbauthz.AsSystemRestricted(ctx), database.UpdateWorkspaceLastUsedAtParams{ ID: after.ID, LastUsedAt: now.UTC().Add(time.Hour * 1), @@ -2678,8 +2675,7 @@ func TestWorkspaceUpdateAutostart(t *testing.T) { // ensure test invariant: new workspaces have no autostart schedule. require.Empty(t, workspace.AutostartSchedule, "expected newly-minted workspace to have no autostart schedule") - ctx, cancel := context.WithTimeout(context.Background(), testutil.WaitLong) - defer cancel() + ctx := testutil.Context(t, testutil.WaitLong) err := client.UpdateWorkspaceAutostart(ctx, workspace.ID, codersdk.UpdateWorkspaceAutostartRequest{ Schedule: ptr.Ref("CRON_TZ=Europe/Dublin 30 9 * * 1-5"), @@ -2698,8 +2694,7 @@ func TestWorkspaceUpdateAutostart(t *testing.T) { } ) - ctx, cancel := context.WithTimeout(context.Background(), testutil.WaitLong) - defer cancel() + ctx := testutil.Context(t, testutil.WaitLong) err := client.UpdateWorkspaceAutostart(ctx, wsid, req) require.IsType(t, err, &codersdk.Error{}, "expected codersdk.Error") @@ -2897,6 +2892,56 @@ func TestWorkspaceUpdateTTL(t *testing.T) { } }) + t.Run("RemoveAutostopWithRunningWorkspaceWithMaxDeadline", func(t *testing.T) { + t.Parallel() + + var ( + ctx = testutil.Context(t, testutil.WaitLong) + client, db = coderdtest.NewWithDatabase(t, &coderdtest.Options{IncludeProvisionerDaemon: true}) + user = coderdtest.CreateFirstUser(t, client) + version = coderdtest.CreateTemplateVersion(t, client, user.OrganizationID, nil) + _ = coderdtest.AwaitTemplateVersionJobCompleted(t, client, version.ID) + template = coderdtest.CreateTemplate(t, client, user.OrganizationID, version.ID) + deadline = 8 * time.Hour + maxDeadline = 10 * time.Hour + workspace = coderdtest.CreateWorkspace(t, client, template.ID, func(cwr *codersdk.CreateWorkspaceRequest) { + cwr.TTLMillis = ptr.Ref(deadline.Milliseconds()) + }) + build = coderdtest.AwaitWorkspaceBuildJobCompleted(t, client, workspace.LatestBuild.ID) + ) + + // This is a hack, but the max_deadline isn't precisely configurable + // without a lot of unnecessary hassle. + dbBuild, err := db.GetWorkspaceBuildByID(dbauthz.AsSystemRestricted(ctx), build.ID) + require.NoError(t, err) + dbJob, err := db.GetProvisionerJobByID(dbauthz.AsSystemRestricted(ctx), dbBuild.JobID) + require.NoError(t, err) + require.True(t, dbJob.CompletedAt.Valid) + initialDeadline := dbJob.CompletedAt.Time.Add(deadline) + expectedMaxDeadline := dbJob.CompletedAt.Time.Add(maxDeadline) + err = db.UpdateWorkspaceBuildDeadlineByID(dbauthz.AsSystemRestricted(ctx), database.UpdateWorkspaceBuildDeadlineByIDParams{ + ID: build.ID, + Deadline: initialDeadline, + MaxDeadline: expectedMaxDeadline, + UpdatedAt: dbtime.Now(), + }) + require.NoError(t, err) + + // Remove autostop. + err = client.UpdateWorkspaceTTL(ctx, workspace.ID, codersdk.UpdateWorkspaceTTLRequest{ + TTLMillis: nil, + }) + require.NoError(t, err) + + // Expect that the deadline is set to the max_deadline. + build, err = client.WorkspaceBuild(ctx, build.ID) + require.NoError(t, err) + require.True(t, build.Deadline.Valid) + require.WithinDuration(t, build.Deadline.Time, expectedMaxDeadline, time.Second) + require.True(t, build.MaxDeadline.Valid) + require.WithinDuration(t, build.MaxDeadline.Time, expectedMaxDeadline, time.Second) + }) + t.Run("CustomAutostopDisabledByTemplate", func(t *testing.T) { t.Parallel() var ( @@ -3842,7 +3887,9 @@ func TestWorkspaceWithEphemeralRichParameters(t *testing.T) { }}, }) coderdtest.AwaitTemplateVersionJobCompleted(t, client, version.ID) - template := coderdtest.CreateTemplate(t, client, user.OrganizationID, version.ID) + template := coderdtest.CreateTemplate(t, client, user.OrganizationID, version.ID, func(request *codersdk.CreateTemplateRequest) { + request.UseClassicParameterFlow = ptr.Ref(true) // TODO: Remove this when dynamic parameters handles this case + }) // Create workspace with default values workspace := coderdtest.CreateWorkspace(t, client, template.ID) @@ -4456,14 +4503,12 @@ func TestOIDCRemoved(t *testing.T) { user, userData := coderdtest.CreateAnotherUser(t, owner, first.OrganizationID, rbac.ScopedRoleOrgAdmin(first.OrganizationID)) ctx := testutil.Context(t, testutil.WaitMedium) - //nolint:gocritic // unit test _, err := db.UpdateUserLoginType(dbauthz.AsSystemRestricted(ctx), database.UpdateUserLoginTypeParams{ NewLoginType: database.LoginTypeOIDC, UserID: userData.ID, }) require.NoError(t, err) - //nolint:gocritic // unit test _, err = db.InsertUserLink(dbauthz.AsSystemRestricted(ctx), database.InsertUserLinkParams{ UserID: userData.ID, LoginType: database.LoginTypeOIDC, @@ -4552,7 +4597,6 @@ func TestWorkspaceFilterHasAITask(t *testing.T) { }) if aiTaskPrompt != nil { - //nolint:gocritic // unit test err := db.InsertWorkspaceBuildParameters(dbauthz.AsSystemRestricted(ctx), database.InsertWorkspaceBuildParametersParams{ WorkspaceBuildID: build.ID, Name: []string{provider.TaskPromptParameterName}, @@ -4755,9 +4799,393 @@ func TestMultipleAITasksDisallowed(t *testing.T) { ws := coderdtest.CreateWorkspace(t, client, template.ID) coderdtest.AwaitWorkspaceBuildJobCompleted(t, client, ws.LatestBuild.ID) - //nolint: gocritic // testing ctx := dbauthz.AsSystemRestricted(t.Context()) pj, err := db.GetProvisionerJobByID(ctx, ws.LatestBuild.Job.ID) require.NoError(t, err) require.Contains(t, pj.Error.String, "only one 'coder_ai_task' resource can be provisioned per template") } + +func TestUpdateWorkspaceACL(t *testing.T) { + t.Parallel() + + t.Run("OK", func(t *testing.T) { + t.Parallel() + + dv := coderdtest.DeploymentValues(t) + dv.Experiments = []string{string(codersdk.ExperimentWorkspaceSharing)} + adminClient := coderdtest.New(t, &coderdtest.Options{ + IncludeProvisionerDaemon: true, + DeploymentValues: dv, + }) + adminUser := coderdtest.CreateFirstUser(t, adminClient) + orgID := adminUser.OrganizationID + client, _ := coderdtest.CreateAnotherUser(t, adminClient, orgID) + _, friend := coderdtest.CreateAnotherUser(t, adminClient, orgID) + + tv := coderdtest.CreateTemplateVersion(t, adminClient, orgID, nil) + coderdtest.AwaitTemplateVersionJobCompleted(t, adminClient, tv.ID) + template := coderdtest.CreateTemplate(t, adminClient, orgID, tv.ID) + + ws := coderdtest.CreateWorkspace(t, client, template.ID) + coderdtest.AwaitWorkspaceBuildJobCompleted(t, client, ws.LatestBuild.ID) + + ctx := testutil.Context(t, testutil.WaitMedium) + err := client.UpdateWorkspaceACL(ctx, ws.ID, codersdk.UpdateWorkspaceACL{ + UserRoles: map[string]codersdk.WorkspaceRole{ + friend.ID.String(): codersdk.WorkspaceRoleAdmin, + }, + }) + require.NoError(t, err) + }) + + t.Run("UnknownUserID", func(t *testing.T) { + t.Parallel() + + dv := coderdtest.DeploymentValues(t) + dv.Experiments = []string{string(codersdk.ExperimentWorkspaceSharing)} + adminClient := coderdtest.New(t, &coderdtest.Options{ + IncludeProvisionerDaemon: true, + DeploymentValues: dv, + }) + adminUser := coderdtest.CreateFirstUser(t, adminClient) + orgID := adminUser.OrganizationID + client, _ := coderdtest.CreateAnotherUser(t, adminClient, orgID) + + tv := coderdtest.CreateTemplateVersion(t, adminClient, orgID, nil) + coderdtest.AwaitTemplateVersionJobCompleted(t, adminClient, tv.ID) + template := coderdtest.CreateTemplate(t, adminClient, orgID, tv.ID) + + ws := coderdtest.CreateWorkspace(t, client, template.ID) + coderdtest.AwaitWorkspaceBuildJobCompleted(t, client, ws.LatestBuild.ID) + + ctx := testutil.Context(t, testutil.WaitMedium) + err := client.UpdateWorkspaceACL(ctx, ws.ID, codersdk.UpdateWorkspaceACL{ + UserRoles: map[string]codersdk.WorkspaceRole{ + uuid.NewString(): codersdk.WorkspaceRoleAdmin, + }, + }) + require.Error(t, err) + cerr, ok := codersdk.AsError(err) + require.True(t, ok) + require.Len(t, cerr.Validations, 1) + require.Equal(t, cerr.Validations[0].Field, "user_roles") + }) + + t.Run("DeletedUser", func(t *testing.T) { + t.Parallel() + + dv := coderdtest.DeploymentValues(t) + dv.Experiments = []string{string(codersdk.ExperimentWorkspaceSharing)} + adminClient := coderdtest.New(t, &coderdtest.Options{ + IncludeProvisionerDaemon: true, + DeploymentValues: dv, + }) + adminUser := coderdtest.CreateFirstUser(t, adminClient) + orgID := adminUser.OrganizationID + client, _ := coderdtest.CreateAnotherUser(t, adminClient, orgID) + _, mike := coderdtest.CreateAnotherUser(t, adminClient, orgID) + + tv := coderdtest.CreateTemplateVersion(t, adminClient, orgID, nil) + coderdtest.AwaitTemplateVersionJobCompleted(t, adminClient, tv.ID) + template := coderdtest.CreateTemplate(t, adminClient, orgID, tv.ID) + + ws := coderdtest.CreateWorkspace(t, client, template.ID) + coderdtest.AwaitWorkspaceBuildJobCompleted(t, client, ws.LatestBuild.ID) + + ctx := testutil.Context(t, testutil.WaitMedium) + err := adminClient.DeleteUser(ctx, mike.ID) + require.NoError(t, err) + err = client.UpdateWorkspaceACL(ctx, ws.ID, codersdk.UpdateWorkspaceACL{ + UserRoles: map[string]codersdk.WorkspaceRole{ + mike.ID.String(): codersdk.WorkspaceRoleAdmin, + }, + }) + require.Error(t, err) + cerr, ok := codersdk.AsError(err) + require.True(t, ok) + require.Len(t, cerr.Validations, 1) + require.Equal(t, cerr.Validations[0].Field, "user_roles") + }) +} + +func TestWorkspaceCreateWithImplicitPreset(t *testing.T) { + t.Parallel() + + // Helper function to create template with presets + createTemplateWithPresets := func(t *testing.T, client *codersdk.Client, user codersdk.CreateFirstUserResponse, presets []*proto.Preset) (codersdk.Template, codersdk.TemplateVersion) { + version := coderdtest.CreateTemplateVersion(t, client, user.OrganizationID, &echo.Responses{ + Parse: echo.ParseComplete, + ProvisionPlan: []*proto.Response{ + { + Type: &proto.Response_Plan{ + Plan: &proto.PlanComplete{ + Presets: presets, + }, + }, + }, + }, + }) + coderdtest.AwaitTemplateVersionJobCompleted(t, client, version.ID) + template := coderdtest.CreateTemplate(t, client, user.OrganizationID, version.ID) + return template, version + } + + // Helper function to create workspace and verify preset usage + createWorkspaceAndVerifyPreset := func(t *testing.T, client *codersdk.Client, template codersdk.Template, expectedPresetID *uuid.UUID, params []codersdk.WorkspaceBuildParameter) codersdk.Workspace { + wsName := testutil.GetRandomNameHyphenated(t) + var ws codersdk.Workspace + if len(params) > 0 { + ws = coderdtest.CreateWorkspace(t, client, template.ID, func(cwr *codersdk.CreateWorkspaceRequest) { + cwr.Name = wsName + cwr.RichParameterValues = params + }) + } else { + ws = coderdtest.CreateWorkspace(t, client, template.ID, func(cwr *codersdk.CreateWorkspaceRequest) { + cwr.Name = wsName + }) + } + require.Equal(t, wsName, ws.Name) + + coderdtest.AwaitWorkspaceBuildJobCompleted(t, client, ws.LatestBuild.ID) + + // Verify the preset was used if expected + if expectedPresetID != nil { + require.NotNil(t, ws.LatestBuild.TemplateVersionPresetID) + require.Equal(t, *expectedPresetID, *ws.LatestBuild.TemplateVersionPresetID) + } else { + require.Nil(t, ws.LatestBuild.TemplateVersionPresetID) + } + + return ws + } + + t.Run("NoPresets", func(t *testing.T) { + t.Parallel() + + client := coderdtest.New(t, &coderdtest.Options{IncludeProvisionerDaemon: true}) + user := coderdtest.CreateFirstUser(t, client) + + // Create template with no presets + template, _ := createTemplateWithPresets(t, client, user, []*proto.Preset{}) + + // Test workspace creation with no parameters + createWorkspaceAndVerifyPreset(t, client, template, nil, nil) + + // Test workspace creation with parameters (should still work, no preset matching) + createWorkspaceAndVerifyPreset(t, client, template, nil, []codersdk.WorkspaceBuildParameter{ + {Name: "param1", Value: "value1"}, + }) + }) + + t.Run("SinglePresetNoParameters", func(t *testing.T) { + t.Parallel() + + client := coderdtest.New(t, &coderdtest.Options{IncludeProvisionerDaemon: true}) + user := coderdtest.CreateFirstUser(t, client) + + // Create template with single preset that has no parameters + preset := &proto.Preset{ + Name: "empty-preset", + Description: "A preset with no parameters", + Parameters: []*proto.PresetParameter{}, + } + template, version := createTemplateWithPresets(t, client, user, []*proto.Preset{preset}) + + // Get the preset ID from the database + ctx := context.Background() + presets, err := client.TemplateVersionPresets(ctx, version.ID) + require.NoError(t, err) + require.Len(t, presets, 1) + presetID := presets[0].ID + + // Test workspace creation with no parameters - should match the preset + createWorkspaceAndVerifyPreset(t, client, template, &presetID, nil) + + // Test workspace creation with parameters - should not match the preset + createWorkspaceAndVerifyPreset(t, client, template, &presetID, []codersdk.WorkspaceBuildParameter{ + {Name: "param1", Value: "value1"}, + }) + }) + + t.Run("SinglePresetWithParameters", func(t *testing.T) { + t.Parallel() + + client := coderdtest.New(t, &coderdtest.Options{IncludeProvisionerDaemon: true}) + user := coderdtest.CreateFirstUser(t, client) + + // Create template with single preset that has parameters + preset := &proto.Preset{ + Name: "param-preset", + Description: "A preset with parameters", + Parameters: []*proto.PresetParameter{ + {Name: "param1", Value: "value1"}, + {Name: "param2", Value: "value2"}, + }, + } + template, version := createTemplateWithPresets(t, client, user, []*proto.Preset{preset}) + + // Get the preset ID from the database + ctx := context.Background() + presets, err := client.TemplateVersionPresets(ctx, version.ID) + require.NoError(t, err) + require.Len(t, presets, 1) + presetID := presets[0].ID + + // Test workspace creation with no parameters - should not match the preset + createWorkspaceAndVerifyPreset(t, client, template, nil, nil) + + // Test workspace creation with exact matching parameters - should match the preset + createWorkspaceAndVerifyPreset(t, client, template, &presetID, []codersdk.WorkspaceBuildParameter{ + {Name: "param1", Value: "value1"}, + {Name: "param2", Value: "value2"}, + }) + + // Test workspace creation with partial matching parameters - should not match the preset + createWorkspaceAndVerifyPreset(t, client, template, nil, []codersdk.WorkspaceBuildParameter{ + {Name: "param1", Value: "value1"}, + }) + + // Test workspace creation with different parameter values - should not match the preset + createWorkspaceAndVerifyPreset(t, client, template, nil, []codersdk.WorkspaceBuildParameter{ + {Name: "param1", Value: "value1"}, + {Name: "param2", Value: "different"}, + }) + + // Test workspace creation with extra parameters - should match the preset + createWorkspaceAndVerifyPreset(t, client, template, &presetID, []codersdk.WorkspaceBuildParameter{ + {Name: "param1", Value: "value1"}, + {Name: "param2", Value: "value2"}, + {Name: "param3", Value: "value3"}, + }) + }) + + t.Run("MultiplePresets", func(t *testing.T) { + t.Parallel() + + client := coderdtest.New(t, &coderdtest.Options{IncludeProvisionerDaemon: true}) + user := coderdtest.CreateFirstUser(t, client) + + // Create template with multiple presets + preset1 := &proto.Preset{ + Name: "empty-preset", + Description: "A preset with no parameters", + Parameters: []*proto.PresetParameter{}, + } + preset2 := &proto.Preset{ + Name: "single-param-preset", + Description: "A preset with one parameter", + Parameters: []*proto.PresetParameter{ + {Name: "param1", Value: "value1"}, + }, + } + preset3 := &proto.Preset{ + Name: "multi-param-preset", + Description: "A preset with multiple parameters", + Parameters: []*proto.PresetParameter{ + {Name: "param1", Value: "value1"}, + {Name: "param2", Value: "value2"}, + }, + } + template, version := createTemplateWithPresets(t, client, user, []*proto.Preset{preset1, preset2, preset3}) + + // Get the preset IDs from the database + ctx := context.Background() + presets, err := client.TemplateVersionPresets(ctx, version.ID) + require.NoError(t, err) + require.Len(t, presets, 3) + + // Sort presets by name to get consistent ordering + var emptyPresetID, singleParamPresetID, multiParamPresetID uuid.UUID + for _, p := range presets { + switch p.Name { + case "empty-preset": + emptyPresetID = p.ID + case "single-param-preset": + singleParamPresetID = p.ID + case "multi-param-preset": + multiParamPresetID = p.ID + } + } + + // Test workspace creation with no parameters - should match empty preset + createWorkspaceAndVerifyPreset(t, client, template, &emptyPresetID, nil) + + // Test workspace creation with single parameter - should match single param preset + createWorkspaceAndVerifyPreset(t, client, template, &singleParamPresetID, []codersdk.WorkspaceBuildParameter{ + {Name: "param1", Value: "value1"}, + }) + + // Test workspace creation with multiple parameters - should match multi param preset + createWorkspaceAndVerifyPreset(t, client, template, &multiParamPresetID, []codersdk.WorkspaceBuildParameter{ + {Name: "param1", Value: "value1"}, + {Name: "param2", Value: "value2"}, + }) + + // Test workspace creation with non-matching parameters - should not match any preset + createWorkspaceAndVerifyPreset(t, client, template, &emptyPresetID, []codersdk.WorkspaceBuildParameter{ + {Name: "param1", Value: "different"}, + }) + }) + + t.Run("PresetSpecifiedExplicitly", func(t *testing.T) { + t.Parallel() + + client := coderdtest.New(t, &coderdtest.Options{IncludeProvisionerDaemon: true}) + user := coderdtest.CreateFirstUser(t, client) + + // Create template with multiple presets + preset1 := &proto.Preset{ + Name: "preset1", + Description: "First preset", + Parameters: []*proto.PresetParameter{ + {Name: "param1", Value: "value1"}, + }, + } + preset2 := &proto.Preset{ + Name: "preset2", + Description: "Second preset", + Parameters: []*proto.PresetParameter{ + {Name: "param1", Value: "value2"}, + }, + } + template, version := createTemplateWithPresets(t, client, user, []*proto.Preset{preset1, preset2}) + + // Get the preset IDs from the database + ctx := context.Background() + presets, err := client.TemplateVersionPresets(ctx, version.ID) + require.NoError(t, err) + require.Len(t, presets, 2) + + var preset1ID, preset2ID uuid.UUID + for _, p := range presets { + switch p.Name { + case "preset1": + preset1ID = p.ID + case "preset2": + preset2ID = p.ID + } + } + + // Test workspace creation with preset1 specified explicitly - should use preset1 regardless of parameters + ws := coderdtest.CreateWorkspace(t, client, template.ID, func(cwr *codersdk.CreateWorkspaceRequest) { + cwr.TemplateVersionPresetID = preset1ID + cwr.RichParameterValues = []codersdk.WorkspaceBuildParameter{ + {Name: "param1", Value: "value2"}, // This would normally match preset2 + } + }) + coderdtest.AwaitWorkspaceBuildJobCompleted(t, client, ws.LatestBuild.ID) + require.NotNil(t, ws.LatestBuild.TemplateVersionPresetID) + require.Equal(t, preset1ID, *ws.LatestBuild.TemplateVersionPresetID) + + // Test workspace creation with preset2 specified explicitly - should use preset2 regardless of parameters + ws2 := coderdtest.CreateWorkspace(t, client, template.ID, func(cwr *codersdk.CreateWorkspaceRequest) { + cwr.TemplateVersionPresetID = preset2ID + cwr.RichParameterValues = []codersdk.WorkspaceBuildParameter{ + {Name: "param1", Value: "value1"}, // This would normally match preset1 + } + }) + coderdtest.AwaitWorkspaceBuildJobCompleted(t, client, ws2.LatestBuild.ID) + require.NotNil(t, ws2.LatestBuild.TemplateVersionPresetID) + require.Equal(t, preset2ID, *ws2.LatestBuild.TemplateVersionPresetID) + }) +} diff --git a/coderd/workspacestats/reporter.go b/coderd/workspacestats/reporter.go index 58d177f1c2071..7a6b1d50034a8 100644 --- a/coderd/workspacestats/reporter.go +++ b/coderd/workspacestats/reporter.go @@ -126,13 +126,8 @@ func (r *Reporter) ReportAgentStats(ctx context.Context, now time.Time, workspac // update prometheus metrics if r.opts.UpdateAgentMetricsFn != nil { - user, err := r.opts.Database.GetUserByID(ctx, workspace.OwnerID) - if err != nil { - return xerrors.Errorf("get user: %w", err) - } - r.opts.UpdateAgentMetricsFn(ctx, prometheusmetrics.AgentMetricLabels{ - Username: user.Username, + Username: workspace.OwnerUsername, WorkspaceName: workspace.Name, AgentName: workspaceAgent.Name, TemplateName: templateName, @@ -149,33 +144,36 @@ func (r *Reporter) ReportAgentStats(ctx context.Context, now time.Time, workspac return nil } - // check next autostart - var nextAutostart time.Time - if workspace.AutostartSchedule.String != "" { - templateSchedule, err := (*(r.opts.TemplateScheduleStore.Load())).Get(ctx, r.opts.Database, workspace.TemplateID) - // If the template schedule fails to load, just default to bumping - // without the next transition and log it. - switch { - case err == nil: - next, allowed := schedule.NextAutostart(now, workspace.AutostartSchedule.String, templateSchedule) - if allowed { - nextAutostart = next + // Prebuilds are not subject to activity-based deadline bumps + if !workspace.IsPrebuild() { + // check next autostart + var nextAutostart time.Time + if workspace.AutostartSchedule.String != "" { + templateSchedule, err := (*(r.opts.TemplateScheduleStore.Load())).Get(ctx, r.opts.Database, workspace.TemplateID) + // If the template schedule fails to load, just default to bumping + // without the next transition and log it. + switch { + case err == nil: + next, allowed := schedule.NextAutostart(now, workspace.AutostartSchedule.String, templateSchedule) + if allowed { + nextAutostart = next + } + case database.IsQueryCanceledError(err): + r.opts.Logger.Debug(ctx, "query canceled while loading template schedule", + slog.F("workspace_id", workspace.ID), + slog.F("template_id", workspace.TemplateID)) + default: + r.opts.Logger.Error(ctx, "failed to load template schedule bumping activity, defaulting to bumping by 60min", + slog.F("workspace_id", workspace.ID), + slog.F("template_id", workspace.TemplateID), + slog.Error(err), + ) } - case database.IsQueryCanceledError(err): - r.opts.Logger.Debug(ctx, "query canceled while loading template schedule", - slog.F("workspace_id", workspace.ID), - slog.F("template_id", workspace.TemplateID)) - default: - r.opts.Logger.Error(ctx, "failed to load template schedule bumping activity, defaulting to bumping by 60min", - slog.F("workspace_id", workspace.ID), - slog.F("template_id", workspace.TemplateID), - slog.Error(err), - ) } - } - // bump workspace activity - ActivityBumpWorkspace(ctx, r.opts.Logger.Named("activity_bump"), r.opts.Database, workspace.ID, nextAutostart) + // bump workspace activity + ActivityBumpWorkspace(ctx, r.opts.Logger.Named("activity_bump"), r.opts.Database, workspace.ID, nextAutostart) + } // bump workspace last_used_at r.opts.UsageTracker.Add(workspace.ID) diff --git a/coderd/wsbuilder/wsbuilder.go b/coderd/wsbuilder/wsbuilder.go index 52567b463baac..223b8bec084ad 100644 --- a/coderd/wsbuilder/wsbuilder.go +++ b/coderd/wsbuilder/wsbuilder.go @@ -15,6 +15,7 @@ import ( "github.com/coder/coder/v2/coderd/dynamicparameters" "github.com/coder/coder/v2/coderd/files" + "github.com/coder/coder/v2/coderd/prebuilds" "github.com/coder/coder/v2/coderd/rbac/policy" "github.com/coder/coder/v2/coderd/util/ptr" "github.com/coder/coder/v2/provisioner/terraform/tfparse" @@ -409,6 +410,7 @@ func (b *Builder) buildTx(authFunc func(action policy.Action, object rbac.Object Valid: true, RawMessage: traceMetadataRaw, }, + LogsOverflowed: false, }) if err != nil { return nil, nil, nil, BuildError{http.StatusInternalServerError, "insert provisioner job", err} @@ -441,6 +443,20 @@ func (b *Builder) buildTx(authFunc func(action policy.Action, object rbac.Object var workspaceBuild database.WorkspaceBuild err = b.store.InTx(func(store database.Store) error { + names, values, err := b.getParameters() + if err != nil { + // getParameters already wraps errors in BuildError + return err + } + + if b.templateVersionPresetID == uuid.Nil { + presetID, err := prebuilds.FindMatchingPresetID(b.ctx, b.store, templateVersionID, names, values) + if err != nil { + return BuildError{http.StatusInternalServerError, "find matching preset", err} + } + b.templateVersionPresetID = presetID + } + err = store.InsertWorkspaceBuild(b.ctx, database.InsertWorkspaceBuildParams{ ID: workspaceBuildID, CreatedAt: now, @@ -472,12 +488,6 @@ func (b *Builder) buildTx(authFunc func(action policy.Action, object rbac.Object return BuildError{code, "insert workspace build", err} } - names, values, err := b.getParameters() - if err != nil { - // getParameters already wraps errors in BuildError - return err - } - err = store.InsertWorkspaceBuildParameters(b.ctx, database.InsertWorkspaceBuildParametersParams{ WorkspaceBuildID: workspaceBuildID, Name: names, diff --git a/coderd/wsbuilder/wsbuilder_test.go b/coderd/wsbuilder/wsbuilder_test.go index ee421a8adb649..b862e6459c285 100644 --- a/coderd/wsbuilder/wsbuilder_test.go +++ b/coderd/wsbuilder/wsbuilder_test.go @@ -82,6 +82,7 @@ func TestBuilder_NoOptions(t *testing.T) { }), withInTx, + expectFindMatchingPresetID(uuid.Nil, sql.ErrNoRows), expectBuild(func(bld database.InsertWorkspaceBuildParams) { asrt.Equal(inactiveVersionID, bld.TemplateVersionID) asrt.Equal(workspaceID, bld.WorkspaceID) @@ -132,6 +133,7 @@ func TestBuilder_Initiator(t *testing.T) { asrt.Equal(otherUserID, job.InitiatorID) }), withInTx, + expectFindMatchingPresetID(uuid.Nil, sql.ErrNoRows), expectBuild(func(bld database.InsertWorkspaceBuildParams) { asrt.Equal(otherUserID, bld.InitiatorID) }), @@ -180,6 +182,7 @@ func TestBuilder_Baggage(t *testing.T) { asrt.Contains(string(job.TraceMetadata.RawMessage), "ip=127.0.0.1") }), withInTx, + expectFindMatchingPresetID(uuid.Nil, sql.ErrNoRows), expectBuild(func(bld database.InsertWorkspaceBuildParams) { }), expectBuildParameters(func(params database.InsertWorkspaceBuildParametersParams) { @@ -219,6 +222,7 @@ func TestBuilder_Reason(t *testing.T) { expectProvisionerJob(func(_ database.InsertProvisionerJobParams) { }), withInTx, + expectFindMatchingPresetID(uuid.Nil, sql.ErrNoRows), expectBuild(func(bld database.InsertWorkspaceBuildParams) { asrt.Equal(database.BuildReasonAutostart, bld.Reason) }), @@ -261,6 +265,7 @@ func TestBuilder_ActiveVersion(t *testing.T) { }), withInTx, + expectFindMatchingPresetID(uuid.Nil, sql.ErrNoRows), expectBuild(func(bld database.InsertWorkspaceBuildParams) { asrt.Equal(activeVersionID, bld.TemplateVersionID) // no previous build... @@ -386,6 +391,7 @@ func TestWorkspaceBuildWithTags(t *testing.T) { expectBuildParameters(func(_ database.InsertWorkspaceBuildParametersParams) { }), withBuild, + expectFindMatchingPresetID(uuid.Nil, sql.ErrNoRows), ) fc := files.New(prometheus.NewRegistry(), &coderdtest.FakeAuthorizer{}) @@ -470,6 +476,7 @@ func TestWorkspaceBuildWithRichParameters(t *testing.T) { } }), withBuild, + expectFindMatchingPresetID(uuid.Nil, sql.ErrNoRows), ) fc := files.New(prometheus.NewRegistry(), &coderdtest.FakeAuthorizer{}) @@ -519,6 +526,7 @@ func TestWorkspaceBuildWithRichParameters(t *testing.T) { } }), withBuild, + expectFindMatchingPresetID(uuid.Nil, sql.ErrNoRows), ) fc := files.New(prometheus.NewRegistry(), &coderdtest.FakeAuthorizer{}) @@ -661,6 +669,7 @@ func TestWorkspaceBuildWithRichParameters(t *testing.T) { } }), withBuild, + expectFindMatchingPresetID(uuid.Nil, sql.ErrNoRows), ) fc := files.New(prometheus.NewRegistry(), &coderdtest.FakeAuthorizer{}) @@ -713,6 +722,7 @@ func TestWorkspaceBuildWithRichParameters(t *testing.T) { withProvisionerDaemons([]database.GetEligibleProvisionerDaemonsByProvisionerJobIDsRow{}), // Outputs + expectFindMatchingPresetID(uuid.Nil, sql.ErrNoRows), expectProvisionerJob(func(job database.InsertProvisionerJobParams) {}), withInTx, expectBuild(func(bld database.InsertWorkspaceBuildParams) {}), @@ -775,6 +785,7 @@ func TestWorkspaceBuildWithRichParameters(t *testing.T) { withProvisionerDaemons([]database.GetEligibleProvisionerDaemonsByProvisionerJobIDsRow{}), // Outputs + expectFindMatchingPresetID(uuid.Nil, sql.ErrNoRows), expectProvisionerJob(func(job database.InsertProvisionerJobParams) {}), withInTx, expectBuild(func(bld database.InsertWorkspaceBuildParams) {}), @@ -906,6 +917,7 @@ func TestWorkspaceBuildDeleteOrphan(t *testing.T) { }), withInTx, + expectFindMatchingPresetID(uuid.Nil, sql.ErrNoRows), expectBuild(func(bld database.InsertWorkspaceBuildParams) { asrt.Equal(inactiveVersionID, bld.TemplateVersionID) asrt.Equal(workspaceID, bld.WorkspaceID) @@ -968,6 +980,7 @@ func TestWorkspaceBuildDeleteOrphan(t *testing.T) { }), withInTx, + expectFindMatchingPresetID(uuid.Nil, sql.ErrNoRows), expectBuild(func(bld database.InsertWorkspaceBuildParams) { asrt.Equal(inactiveVersionID, bld.TemplateVersionID) asrt.Equal(workspaceID, bld.WorkspaceID) @@ -1041,6 +1054,7 @@ func TestWorkspaceBuildUsageChecker(t *testing.T) { // Outputs expectProvisionerJob(func(job database.InsertProvisionerJobParams) {}), withInTx, + expectFindMatchingPresetID(uuid.Nil, sql.ErrNoRows), expectBuild(func(bld database.InsertWorkspaceBuildParams) {}), withBuild, expectBuildParameters(func(params database.InsertWorkspaceBuildParametersParams) {}), @@ -1485,6 +1499,14 @@ func withProvisionerDaemons(provisionerDaemons []database.GetEligibleProvisioner } } +func expectFindMatchingPresetID(id uuid.UUID, err error) func(mTx *dbmock.MockStore) { + return func(mTx *dbmock.MockStore) { + mTx.EXPECT().FindMatchingPresetID(gomock.Any(), gomock.Any()). + Times(1). + Return(id, err) + } +} + type fakeUsageChecker struct { checkBuildUsageFunc func(ctx context.Context, store database.Store, templateVersion *database.TemplateVersion) (wsbuilder.UsageCheckResponse, error) } diff --git a/codersdk/aitasks.go b/codersdk/aitasks.go index 89ca9c948f272..965b0fac1d493 100644 --- a/codersdk/aitasks.go +++ b/codersdk/aitasks.go @@ -3,8 +3,10 @@ package codersdk import ( "context" "encoding/json" + "fmt" "net/http" "strings" + "time" "github.com/google/uuid" @@ -44,3 +46,130 @@ func (c *ExperimentalClient) AITaskPrompts(ctx context.Context, buildIDs []uuid. var prompts AITasksPromptsResponse return prompts, json.NewDecoder(res.Body).Decode(&prompts) } + +type CreateTaskRequest struct { + TemplateVersionID uuid.UUID `json:"template_version_id" format:"uuid"` + TemplateVersionPresetID uuid.UUID `json:"template_version_preset_id,omitempty" format:"uuid"` + Prompt string `json:"prompt"` +} + +func (c *ExperimentalClient) CreateTask(ctx context.Context, user string, request CreateTaskRequest) (Workspace, error) { + res, err := c.Request(ctx, http.MethodPost, fmt.Sprintf("/api/experimental/tasks/%s", user), request) + if err != nil { + return Workspace{}, err + } + defer res.Body.Close() + + if res.StatusCode != http.StatusCreated { + return Workspace{}, ReadBodyAsError(res) + } + + var workspace Workspace + if err := json.NewDecoder(res.Body).Decode(&workspace); err != nil { + return Workspace{}, err + } + + return workspace, nil +} + +// TaskState represents the high-level lifecycle of a task. +// +// Experimental: This type is experimental and may change in the future. +type TaskState string + +const ( + TaskStateWorking TaskState = "working" + TaskStateIdle TaskState = "idle" + TaskStateCompleted TaskState = "completed" + TaskStateFailed TaskState = "failed" +) + +// Task represents a task. +// +// Experimental: This type is experimental and may change in the future. +type Task struct { + ID uuid.UUID `json:"id" format:"uuid"` + OrganizationID uuid.UUID `json:"organization_id" format:"uuid"` + OwnerID uuid.UUID `json:"owner_id" format:"uuid"` + Name string `json:"name"` + TemplateID uuid.UUID `json:"template_id" format:"uuid"` + WorkspaceID uuid.NullUUID `json:"workspace_id" format:"uuid"` + InitialPrompt string `json:"initial_prompt"` + Status WorkspaceStatus `json:"status" enums:"pending,starting,running,stopping,stopped,failed,canceling,canceled,deleting,deleted"` + CurrentState *TaskStateEntry `json:"current_state"` + CreatedAt time.Time `json:"created_at" format:"date-time"` + UpdatedAt time.Time `json:"updated_at" format:"date-time"` +} + +// TaskStateEntry represents a single entry in the task's state history. +// +// Experimental: This type is experimental and may change in the future. +type TaskStateEntry struct { + Timestamp time.Time `json:"timestamp" format:"date-time"` + State TaskState `json:"state" enum:"working,idle,completed,failed"` + Message string `json:"message"` + URI string `json:"uri"` +} + +// TasksFilter filters the list of tasks. +// +// Experimental: This type is experimental and may change in the future. +type TasksFilter struct { + // Owner can be a username, UUID, or "me" + Owner string `json:"owner,omitempty"` +} + +// Tasks lists all tasks belonging to the user or specified owner. +// +// Experimental: This method is experimental and may change in the future. +func (c *ExperimentalClient) Tasks(ctx context.Context, filter *TasksFilter) ([]Task, error) { + if filter == nil { + filter = &TasksFilter{} + } + user := filter.Owner + if user == "" { + user = "me" + } + + res, err := c.Request(ctx, http.MethodGet, fmt.Sprintf("/api/experimental/tasks/%s", user), nil) + if err != nil { + return nil, err + } + defer res.Body.Close() + if res.StatusCode != http.StatusOK { + return nil, ReadBodyAsError(res) + } + + // Experimental response shape for tasks list (server returns []Task). + type tasksListResponse struct { + Tasks []Task `json:"tasks"` + Count int `json:"count"` + } + var tres tasksListResponse + if err := json.NewDecoder(res.Body).Decode(&tres); err != nil { + return nil, err + } + + return tres.Tasks, nil +} + +// TaskByID fetches a single experimental task by its ID. +// +// Experimental: This method is experimental and may change in the future. +func (c *ExperimentalClient) TaskByID(ctx context.Context, id uuid.UUID) (Task, error) { + res, err := c.Request(ctx, http.MethodGet, fmt.Sprintf("/api/experimental/tasks/%s/%s", "me", id.String()), nil) + if err != nil { + return Task{}, err + } + defer res.Body.Close() + if res.StatusCode != http.StatusOK { + return Task{}, ReadBodyAsError(res) + } + + var task Task + if err := json.NewDecoder(res.Body).Decode(&task); err != nil { + return Task{}, err + } + + return task, nil +} diff --git a/codersdk/cors_behavior.go b/codersdk/cors_behavior.go new file mode 100644 index 0000000000000..8de84b000994e --- /dev/null +++ b/codersdk/cors_behavior.go @@ -0,0 +1,8 @@ +package codersdk + +type CORSBehavior string + +const ( + CORSBehaviorSimple CORSBehavior = "simple" + CORSBehaviorPassthru CORSBehavior = "passthru" +) diff --git a/codersdk/deployment.go b/codersdk/deployment.go index 3844523063db7..a70a6b55500d2 100644 --- a/codersdk/deployment.go +++ b/codersdk/deployment.go @@ -88,7 +88,8 @@ const ( // ManagedAgentLimit is a usage period feature, so the value in the license // contains both a soft and hard limit. Refer to // enterprise/coderd/license/license.go for the license format. - FeatureManagedAgentLimit FeatureName = "managed_agent_limit" + FeatureManagedAgentLimit FeatureName = "managed_agent_limit" + FeatureWorkspaceExternalAgent FeatureName = "workspace_external_agent" ) var ( @@ -115,6 +116,7 @@ var ( FeatureMultipleOrganizations, FeatureWorkspacePrebuilds, FeatureManagedAgentLimit, + FeatureWorkspaceExternalAgent, } // FeatureNamesMap is a map of all feature names for quick lookups. @@ -155,6 +157,7 @@ func (n FeatureName) AlwaysEnable() bool { FeatureCustomRoles: true, FeatureMultipleOrganizations: true, FeatureWorkspacePrebuilds: true, + FeatureWorkspaceExternalAgent: true, }[n] } @@ -3432,6 +3435,7 @@ const ( ExperimentWebPush Experiment = "web-push" // Enables web push notifications through the browser. ExperimentOAuth2 Experiment = "oauth2" // Enables OAuth2 provider functionality. ExperimentMCPServerHTTP Experiment = "mcp-server-http" // Enables the MCP HTTP server functionality. + ExperimentWorkspaceSharing Experiment = "workspace-sharing" // Enables updating workspace ACLs for sharing with users and groups. ) func (e Experiment) DisplayName() string { @@ -3450,6 +3454,8 @@ func (e Experiment) DisplayName() string { return "OAuth2 Provider Functionality" case ExperimentMCPServerHTTP: return "MCP HTTP Server Functionality" + case ExperimentWorkspaceSharing: + return "Workspace Sharing" default: // Split on hyphen and convert to title case // e.g. "web-push" -> "Web Push", "mcp-server-http" -> "Mcp Server Http" @@ -3467,6 +3473,7 @@ var ExperimentsKnown = Experiments{ ExperimentWebPush, ExperimentOAuth2, ExperimentMCPServerHTTP, + ExperimentWorkspaceSharing, } // ExperimentsSafe should include all experiments that are safe for diff --git a/codersdk/initscript.go b/codersdk/initscript.go new file mode 100644 index 0000000000000..d1adbf79460f0 --- /dev/null +++ b/codersdk/initscript.go @@ -0,0 +1,28 @@ +package codersdk + +import ( + "context" + "fmt" + "io" + "net/http" +) + +func (c *Client) InitScript(ctx context.Context, os, arch string) (string, error) { + url := fmt.Sprintf("/api/v2/init-script/%s/%s", os, arch) + res, err := c.Request(ctx, http.MethodGet, url, nil) + if err != nil { + return "", err + } + defer res.Body.Close() + + if res.StatusCode != http.StatusOK { + return "", ReadBodyAsError(res) + } + + script, err := io.ReadAll(res.Body) + if err != nil { + return "", err + } + + return string(script), nil +} diff --git a/codersdk/organizations.go b/codersdk/organizations.go index 35a1e0be0a426..bca87c7bd4591 100644 --- a/codersdk/organizations.go +++ b/codersdk/organizations.go @@ -206,6 +206,9 @@ type CreateTemplateRequest struct { // true, and is why `*bool` is used here. When dynamic parameters becomes // the default, this will default to false. UseClassicParameterFlow *bool `json:"template_use_classic_parameter_flow,omitempty"` + + // CORSBehavior allows optionally specifying the CORS behavior for all shared ports. + CORSBehavior *CORSBehavior `json:"cors_behavior"` } // CreateWorkspaceRequest provides options for creating a new workspace. @@ -341,9 +344,12 @@ func (c *Client) ProvisionerDaemons(ctx context.Context) ([]ProvisionerDaemon, e } type OrganizationProvisionerDaemonsOptions struct { - Limit int - IDs []uuid.UUID - Tags map[string]string + Limit int + Offline bool + Status []ProvisionerDaemonStatus + MaxAge time.Duration + IDs []uuid.UUID + Tags map[string]string } func (c *Client) OrganizationProvisionerDaemons(ctx context.Context, organizationID uuid.UUID, opts *OrganizationProvisionerDaemonsOptions) ([]ProvisionerDaemon, error) { @@ -352,6 +358,15 @@ func (c *Client) OrganizationProvisionerDaemons(ctx context.Context, organizatio if opts.Limit > 0 { qp.Add("limit", strconv.Itoa(opts.Limit)) } + if opts.Offline { + qp.Add("offline", "true") + } + if len(opts.Status) > 0 { + qp.Add("status", joinSlice(opts.Status)) + } + if opts.MaxAge > 0 { + qp.Add("max_age", opts.MaxAge.String()) + } if len(opts.IDs) > 0 { qp.Add("ids", joinSliceStringer(opts.IDs)) } @@ -538,6 +553,7 @@ type TemplateFilter struct { OrganizationID uuid.UUID `typescript:"-"` ExactName string `typescript:"-"` FuzzyName string `typescript:"-"` + AuthorUsername string `typescript:"-"` SearchQuery string `json:"q,omitempty"` } @@ -559,6 +575,11 @@ func (f TemplateFilter) asRequestOption() RequestOption { if f.FuzzyName != "" { params = append(params, fmt.Sprintf("name:%q", f.FuzzyName)) } + + if f.AuthorUsername != "" { + params = append(params, fmt.Sprintf("author:%q", f.AuthorUsername)) + } + if f.SearchQuery != "" { params = append(params, f.SearchQuery) } diff --git a/codersdk/presets.go b/codersdk/presets.go index 2d94aa3baabb6..eba1b9216dd4b 100644 --- a/codersdk/presets.go +++ b/codersdk/presets.go @@ -16,6 +16,8 @@ type Preset struct { Parameters []PresetParameter Default bool DesiredPrebuildInstances *int + Description string + Icon string } type PresetParameter struct { diff --git a/codersdk/provisionerdaemons.go b/codersdk/provisionerdaemons.go index 5fbda371b8f3f..4bff7d7827aa1 100644 --- a/codersdk/provisionerdaemons.go +++ b/codersdk/provisionerdaemons.go @@ -49,6 +49,14 @@ const ( ProvisionerDaemonBusy ProvisionerDaemonStatus = "busy" ) +func ProvisionerDaemonStatusEnums() []ProvisionerDaemonStatus { + return []ProvisionerDaemonStatus{ + ProvisionerDaemonOffline, + ProvisionerDaemonIdle, + ProvisionerDaemonBusy, + } +} + type ProvisionerDaemon struct { ID uuid.UUID `json:"id" format:"uuid" table:"id"` OrganizationID uuid.UUID `json:"organization_id" format:"uuid" table:"organization id"` @@ -188,6 +196,7 @@ type ProvisionerJob struct { Type ProvisionerJobType `json:"type" table:"type"` AvailableWorkers []uuid.UUID `json:"available_workers,omitempty" format:"uuid" table:"available workers"` Metadata ProvisionerJobMetadata `json:"metadata" table:"metadata,recursive_inline"` + LogsOverflowed bool `json:"logs_overflowed" table:"logs overflowed"` } // ProvisionerJobLog represents the provisioner log entry annotated with source and level. diff --git a/codersdk/rbacresources_gen.go b/codersdk/rbacresources_gen.go index 3e22d29c73297..54532106a6fd1 100644 --- a/codersdk/rbacresources_gen.go +++ b/codersdk/rbacresources_gen.go @@ -35,7 +35,9 @@ const ( ResourceSystem RBACResource = "system" ResourceTailnetCoordinator RBACResource = "tailnet_coordinator" ResourceTemplate RBACResource = "template" + ResourceUsageEvent RBACResource = "usage_event" ResourceUser RBACResource = "user" + ResourceUserSecret RBACResource = "user_secret" ResourceWebpushSubscription RBACResource = "webpush_subscription" ResourceWorkspace RBACResource = "workspace" ResourceWorkspaceAgentDevcontainers RBACResource = "workspace_agent_devcontainers" @@ -99,7 +101,9 @@ var RBACResourceActions = map[RBACResource][]RBACAction{ ResourceSystem: {ActionCreate, ActionDelete, ActionRead, ActionUpdate}, ResourceTailnetCoordinator: {ActionCreate, ActionDelete, ActionRead, ActionUpdate}, ResourceTemplate: {ActionCreate, ActionDelete, ActionRead, ActionUpdate, ActionUse, ActionViewInsights}, + ResourceUsageEvent: {ActionCreate, ActionRead, ActionUpdate}, ResourceUser: {ActionCreate, ActionDelete, ActionRead, ActionReadPersonal, ActionUpdate, ActionUpdatePersonal}, + ResourceUserSecret: {ActionCreate, ActionDelete, ActionRead, ActionUpdate}, ResourceWebpushSubscription: {ActionCreate, ActionDelete, ActionRead}, ResourceWorkspace: {ActionApplicationConnect, ActionCreate, ActionCreateAgent, ActionDelete, ActionDeleteAgent, ActionRead, ActionSSH, ActionWorkspaceStart, ActionWorkspaceStop, ActionUpdate}, ResourceWorkspaceAgentDevcontainers: {ActionCreate}, diff --git a/codersdk/templates.go b/codersdk/templates.go index a7d983bc1cc6f..cc9314e44794d 100644 --- a/codersdk/templates.go +++ b/codersdk/templates.go @@ -61,6 +61,7 @@ type Template struct { // template version. RequireActiveVersion bool `json:"require_active_version"` MaxPortShareLevel WorkspaceAgentPortShareLevel `json:"max_port_share_level"` + CORSBehavior CORSBehavior `json:"cors_behavior"` UseClassicParameterFlow bool `json:"use_classic_parameter_flow"` } @@ -207,11 +208,11 @@ type ACLAvailable struct { } type UpdateTemplateMeta struct { - Name string `json:"name,omitempty" validate:"omitempty,template_name"` - DisplayName string `json:"display_name,omitempty" validate:"omitempty,template_display_name"` - Description string `json:"description,omitempty"` - Icon string `json:"icon,omitempty"` - DefaultTTLMillis int64 `json:"default_ttl_ms,omitempty"` + Name string `json:"name,omitempty" validate:"omitempty,template_name"` + DisplayName *string `json:"display_name,omitempty" validate:"omitempty,template_display_name"` + Description *string `json:"description,omitempty"` + Icon *string `json:"icon,omitempty"` + DefaultTTLMillis int64 `json:"default_ttl_ms,omitempty"` // ActivityBumpMillis allows optionally specifying the activity bump // duration for all workspaces created from this template. Defaults to 1h // but can be set to 0 to disable activity bumping. @@ -252,6 +253,7 @@ type UpdateTemplateMeta struct { // of the template. DisableEveryoneGroupAccess bool `json:"disable_everyone_group_access"` MaxPortShareLevel *WorkspaceAgentPortShareLevel `json:"max_port_share_level,omitempty"` + CORSBehavior *CORSBehavior `json:"cors_behavior,omitempty"` // UseClassicParameterFlow is a flag that switches the default behavior to use the classic // parameter flow when creating a workspace. This only affects deployments with the experiment // "dynamic-parameters" enabled. This setting will live for a period after the experiment is diff --git a/codersdk/templateversions.go b/codersdk/templateversions.go index a47cbb685898b..992797578630d 100644 --- a/codersdk/templateversions.go +++ b/codersdk/templateversions.go @@ -33,6 +33,8 @@ type TemplateVersion struct { Warnings []TemplateVersionWarning `json:"warnings,omitempty" enums:"DEPRECATED_PARAMETERS"` MatchedProvisioners *MatchedProvisioners `json:"matched_provisioners,omitempty"` + + HasExternalAgent bool `json:"has_external_agent"` } type TemplateVersionExternalAuth struct { diff --git a/codersdk/toolsdk/bash.go b/codersdk/toolsdk/bash.go index 5fb15843f1bf1..037227337bfc9 100644 --- a/codersdk/toolsdk/bash.go +++ b/codersdk/toolsdk/bash.go @@ -21,9 +21,10 @@ import ( ) type WorkspaceBashArgs struct { - Workspace string `json:"workspace"` - Command string `json:"command"` - TimeoutMs int `json:"timeout_ms,omitempty"` + Workspace string `json:"workspace"` + Command string `json:"command"` + TimeoutMs int `json:"timeout_ms,omitempty"` + Background bool `json:"background,omitempty"` } type WorkspaceBashResult struct { @@ -50,9 +51,13 @@ The workspace parameter supports various formats: The timeout_ms parameter specifies the command timeout in milliseconds (defaults to 60000ms, maximum of 300000ms). If the command times out, all output captured up to that point is returned with a cancellation message. +For background commands (background: true), output is captured until the timeout is reached, then the command +continues running in the background. The captured output is returned as the result. + Examples: - workspace: "my-workspace", command: "ls -la" - workspace: "john/dev-env", command: "git status", timeout_ms: 30000 +- workspace: "my-workspace", command: "npm run dev", background: true, timeout_ms: 10000 - workspace: "my-workspace.main", command: "docker ps"`, Schema: aisdk.Schema{ Properties: map[string]any{ @@ -70,6 +75,10 @@ Examples: "default": 60000, "minimum": 1, }, + "background": map[string]any{ + "type": "boolean", + "description": "Whether to run the command in the background. Output is captured until timeout, then the command continues running in the background.", + }, }, Required: []string{"workspace", "command"}, }, @@ -137,23 +146,35 @@ Examples: // Set default timeout if not specified (60 seconds) timeoutMs := args.TimeoutMs + defaultTimeoutMs := 60000 if timeoutMs <= 0 { - timeoutMs = 60000 + timeoutMs = defaultTimeoutMs + } + command := args.Command + if args.Background { + // For background commands, use nohup directly to ensure they survive SSH session + // termination. This captures output normally but allows the process to continue + // running even after the SSH connection closes. + command = fmt.Sprintf("nohup %s &1", args.Command) } - // Create context with timeout - ctx, cancel = context.WithTimeout(ctx, time.Duration(timeoutMs)*time.Millisecond) - defer cancel() + // Create context with command timeout (replace the broader MCP timeout) + commandCtx, commandCancel := context.WithTimeout(ctx, time.Duration(timeoutMs)*time.Millisecond) + defer commandCancel() // Execute command with timeout handling - output, err := executeCommandWithTimeout(ctx, session, args.Command) + output, err := executeCommandWithTimeout(commandCtx, session, command) outputStr := strings.TrimSpace(string(output)) // Handle command execution results if err != nil { // Check if the command timed out - if errors.Is(context.Cause(ctx), context.DeadlineExceeded) { - outputStr += "\nCommand canceled due to timeout" + if errors.Is(context.Cause(commandCtx), context.DeadlineExceeded) { + if args.Background { + outputStr += "\nCommand continues running in background" + } else { + outputStr += "\nCommand canceled due to timeout" + } return WorkspaceBashResult{ Output: outputStr, ExitCode: 124, @@ -387,21 +408,27 @@ func executeCommandWithTimeout(ctx context.Context, session *gossh.Session, comm return safeWriter.Bytes(), err case <-ctx.Done(): // Context was canceled (timeout or other cancellation) - // Close the session to stop the command - _ = session.Close() + // Close the session to stop the command, but handle errors gracefully + closeErr := session.Close() - // Give a brief moment to collect any remaining output - timer := time.NewTimer(50 * time.Millisecond) + // Give a brief moment to collect any remaining output and for goroutines to finish + timer := time.NewTimer(100 * time.Millisecond) defer timer.Stop() select { case <-timer.C: // Timer expired, return what we have + break case err := <-done: // Command finished during grace period - return safeWriter.Bytes(), err + if closeErr == nil { + return safeWriter.Bytes(), err + } + // If session close failed, prioritize the context error + break } + // Return the collected output with the context error return safeWriter.Bytes(), context.Cause(ctx) } } @@ -421,5 +448,9 @@ func (sw *syncWriter) Write(p []byte) (n int, err error) { func (sw *syncWriter) Bytes() []byte { sw.mu.Lock() defer sw.mu.Unlock() - return sw.w.Bytes() + // Return a copy to prevent race conditions with the underlying buffer + b := sw.w.Bytes() + result := make([]byte, len(b)) + copy(result, b) + return result } diff --git a/codersdk/toolsdk/bash_test.go b/codersdk/toolsdk/bash_test.go index 53ac480039278..caf54109688ea 100644 --- a/codersdk/toolsdk/bash_test.go +++ b/codersdk/toolsdk/bash_test.go @@ -2,6 +2,7 @@ package toolsdk_test import ( "context" + "runtime" "testing" "github.com/stretchr/testify/require" @@ -9,10 +10,14 @@ import ( "github.com/coder/coder/v2/agent/agenttest" "github.com/coder/coder/v2/coderd/coderdtest" "github.com/coder/coder/v2/codersdk/toolsdk" + "github.com/coder/coder/v2/testutil" ) func TestWorkspaceBash(t *testing.T) { t.Parallel() + if runtime.GOOS == "windows" { + t.Skip("Skipping on Windows: Workspace MCP bash tools rely on a Unix-like shell (bash) and POSIX/SSH semantics. Use Linux/macOS or WSL for these tests.") + } t.Run("ValidateArgs", func(t *testing.T) { t.Parallel() @@ -96,6 +101,9 @@ func TestWorkspaceBash(t *testing.T) { func TestNormalizeWorkspaceInput(t *testing.T) { t.Parallel() + if runtime.GOOS == "windows" { + t.Skip("Skipping on Windows: Workspace MCP bash tools rely on a Unix-like shell (bash) and POSIX/SSH semantics. Use Linux/macOS or WSL for these tests.") + } testCases := []struct { name string @@ -150,6 +158,9 @@ func TestNormalizeWorkspaceInput(t *testing.T) { func TestAllToolsIncludesBash(t *testing.T) { t.Parallel() + if runtime.GOOS == "windows" { + t.Skip("Skipping on Windows: Workspace MCP bash tools rely on a Unix-like shell (bash) and POSIX/SSH semantics. Use Linux/macOS or WSL for these tests.") + } // Verify that WorkspaceBash is included in the All slice found := false @@ -168,14 +179,15 @@ func TestAllToolsIncludesBash(t *testing.T) { func TestWorkspaceBashTimeout(t *testing.T) { t.Parallel() + if runtime.GOOS == "windows" { + t.Skip("Skipping on Windows: Workspace MCP bash tools rely on a Unix-like shell (bash) and POSIX/SSH semantics. Use Linux/macOS or WSL for these tests.") + } t.Run("TimeoutDefaultValue", func(t *testing.T) { t.Parallel() // Test that the TimeoutMs field can be set and read correctly args := toolsdk.WorkspaceBashArgs{ - Workspace: "test-workspace", - Command: "echo test", TimeoutMs: 0, // Should default to 60000 in handler } @@ -192,8 +204,6 @@ func TestWorkspaceBashTimeout(t *testing.T) { // Test that negative values can be set and will be handled by the default logic args := toolsdk.WorkspaceBashArgs{ - Workspace: "test-workspace", - Command: "echo test", TimeoutMs: -100, } @@ -254,6 +264,9 @@ func TestWorkspaceBashTimeout(t *testing.T) { func TestWorkspaceBashTimeoutIntegration(t *testing.T) { t.Parallel() + if runtime.GOOS == "windows" { + t.Skip("Skipping on Windows: Workspace MCP bash tools rely on a Unix-like shell (bash) and POSIX/SSH semantics. Use Linux/macOS or WSL for these tests.") + } t.Run("ActualTimeoutBehavior", func(t *testing.T) { t.Parallel() @@ -279,7 +292,7 @@ func TestWorkspaceBashTimeoutIntegration(t *testing.T) { TimeoutMs: 2000, // 2 seconds timeout - should timeout after first echo } - result, err := toolsdk.WorkspaceBash.Handler(t.Context(), deps, args) + result, err := testTool(t, toolsdk.WorkspaceBash, deps, args) // Should not error (timeout is handled gracefully) require.NoError(t, err) @@ -313,7 +326,6 @@ func TestWorkspaceBashTimeoutIntegration(t *testing.T) { deps, err := toolsdk.NewDeps(client) require.NoError(t, err) - ctx := context.Background() args := toolsdk.WorkspaceBashArgs{ Workspace: workspace.Name, @@ -321,7 +333,8 @@ func TestWorkspaceBashTimeoutIntegration(t *testing.T) { TimeoutMs: 5000, // 5 second timeout - plenty of time } - result, err := toolsdk.WorkspaceBash.Handler(ctx, deps, args) + // Use testTool to register the tool as tested and satisfy coverage validation + result, err := testTool(t, toolsdk.WorkspaceBash, deps, args) // Should not error require.NoError(t, err) @@ -338,3 +351,145 @@ func TestWorkspaceBashTimeoutIntegration(t *testing.T) { require.NotContains(t, result.Output, "Command canceled due to timeout") }) } + +func TestWorkspaceBashBackgroundIntegration(t *testing.T) { + t.Parallel() + if runtime.GOOS == "windows" { + t.Skip("Skipping on Windows: Workspace MCP bash tools rely on a Unix-like shell (bash) and POSIX/SSH semantics. Use Linux/macOS or WSL for these tests.") + } + + t.Run("BackgroundCommandCapturesOutput", func(t *testing.T) { + t.Parallel() + + client, workspace, agentToken := setupWorkspaceForAgent(t) + + // Start the agent and wait for it to be fully ready + _ = agenttest.New(t, client.URL, agentToken) + + // Wait for workspace agents to be ready + coderdtest.NewWorkspaceAgentWaiter(t, client, workspace.ID).Wait() + + deps, err := toolsdk.NewDeps(client) + require.NoError(t, err) + + args := toolsdk.WorkspaceBashArgs{ + Workspace: workspace.Name, + Command: `echo "started" && sleep 60 && echo "completed"`, // Command that would take 60+ seconds + Background: true, // Run in background + TimeoutMs: 2000, // 2 second timeout + } + + result, err := testTool(t, toolsdk.WorkspaceBash, deps, args) + + // Should not error + require.NoError(t, err) + + t.Logf("Background result: exitCode=%d, output=%q", result.ExitCode, result.Output) + + // Should have exit code 124 (timeout) since command times out + require.Equal(t, 124, result.ExitCode) + + // Should capture output up to timeout point + require.Contains(t, result.Output, "started", "Should contain output captured before timeout") + + // Should NOT contain the second echo (it never executed due to timeout) + require.NotContains(t, result.Output, "completed", "Should not contain output after timeout") + + // Should contain background continuation message + require.Contains(t, result.Output, "Command continues running in background") + }) + + t.Run("BackgroundVsNormalExecution", func(t *testing.T) { + t.Parallel() + + client, workspace, agentToken := setupWorkspaceForAgent(t) + + // Start the agent and wait for it to be fully ready + _ = agenttest.New(t, client.URL, agentToken) + + // Wait for workspace agents to be ready + coderdtest.NewWorkspaceAgentWaiter(t, client, workspace.ID).Wait() + + deps, err := toolsdk.NewDeps(client) + require.NoError(t, err) + + // First run the same command in normal mode + normalArgs := toolsdk.WorkspaceBashArgs{ + Workspace: workspace.Name, + Command: `echo "hello world"`, + Background: false, + } + + normalResult, err := toolsdk.WorkspaceBash.Handler(t.Context(), deps, normalArgs) + require.NoError(t, err) + + // Normal mode should return the actual output + require.Equal(t, 0, normalResult.ExitCode) + require.Equal(t, "hello world", normalResult.Output) + + // Now run the same command in background mode + backgroundArgs := toolsdk.WorkspaceBashArgs{ + Workspace: workspace.Name, + Command: `echo "hello world"`, + Background: true, + } + + backgroundResult, err := testTool(t, toolsdk.WorkspaceBash, deps, backgroundArgs) + require.NoError(t, err) + + t.Logf("Normal result: %q", normalResult.Output) + t.Logf("Background result: %q", backgroundResult.Output) + + // Background mode should also return the actual output since command completes quickly + require.Equal(t, 0, backgroundResult.ExitCode) + require.Equal(t, "hello world", backgroundResult.Output) + }) + + t.Run("BackgroundCommandContinuesAfterTimeout", func(t *testing.T) { + t.Parallel() + + client, workspace, agentToken := setupWorkspaceForAgent(t) + + // Start the agent and wait for it to be fully ready + _ = agenttest.New(t, client.URL, agentToken) + + // Wait for workspace agents to be ready + coderdtest.NewWorkspaceAgentWaiter(t, client, workspace.ID).Wait() + + deps, err := toolsdk.NewDeps(client) + require.NoError(t, err) + + args := toolsdk.WorkspaceBashArgs{ + Workspace: workspace.Name, + Command: `echo "started" && sleep 4 && echo "done" > /tmp/bg-test-done`, // Command that will timeout but continue + TimeoutMs: 2000, // 2000ms timeout (shorter than command duration) + Background: true, // Run in background + } + + result, err := testTool(t, toolsdk.WorkspaceBash, deps, args) + + // Should not error but should timeout + require.NoError(t, err) + + t.Logf("Background with timeout result: exitCode=%d, output=%q", result.ExitCode, result.Output) + + // Should have timeout exit code + require.Equal(t, 124, result.ExitCode) + + // Should capture output before timeout + require.Contains(t, result.Output, "started", "Should contain output captured before timeout") + + // Should contain background continuation message + require.Contains(t, result.Output, "Command continues running in background") + + // Wait for the background command to complete (even though SSH session timed out) + require.Eventually(t, func() bool { + checkArgs := toolsdk.WorkspaceBashArgs{ + Workspace: workspace.Name, + Command: `cat /tmp/bg-test-done 2>/dev/null || echo "not found"`, + } + checkResult, err := toolsdk.WorkspaceBash.Handler(t.Context(), deps, checkArgs) + return err == nil && checkResult.Output == "done" + }, testutil.WaitMedium, testutil.IntervalMedium, "Background command should continue running and complete after timeout") + }) +} diff --git a/codersdk/toolsdk/chatgpt.go b/codersdk/toolsdk/chatgpt.go new file mode 100644 index 0000000000000..c4bf5b5d4c174 --- /dev/null +++ b/codersdk/toolsdk/chatgpt.go @@ -0,0 +1,436 @@ +package toolsdk + +import ( + "context" + "encoding/json" + "fmt" + "strings" + + "golang.org/x/xerrors" + + "github.com/google/uuid" + + "github.com/coder/aisdk-go" + "github.com/coder/coder/v2/codersdk" +) + +type ObjectType string + +const ( + ObjectTypeTemplate ObjectType = "template" + ObjectTypeWorkspace ObjectType = "workspace" +) + +type ObjectID struct { + Type ObjectType + ID string +} + +func (o ObjectID) String() string { + return fmt.Sprintf("%s:%s", o.Type, o.ID) +} + +func parseObjectID(id string) (ObjectID, error) { + parts := strings.Split(id, ":") + if len(parts) != 2 || (parts[0] != "template" && parts[0] != "workspace") { + return ObjectID{}, xerrors.Errorf("invalid ID: %s", id) + } + return ObjectID{ + Type: ObjectType(parts[0]), + ID: parts[1], + }, nil +} + +func createObjectID(objectType ObjectType, id string) ObjectID { + return ObjectID{ + Type: objectType, + ID: id, + } +} + +func searchTemplates(ctx context.Context, deps Deps, query string) ([]SearchResultItem, error) { + serverURL := deps.ServerURL() + templates, err := deps.coderClient.Templates(ctx, codersdk.TemplateFilter{ + SearchQuery: query, + }) + if err != nil { + return nil, err + } + results := make([]SearchResultItem, len(templates)) + for i, template := range templates { + results[i] = SearchResultItem{ + ID: createObjectID(ObjectTypeTemplate, template.ID.String()).String(), + Title: template.DisplayName, + Text: template.Description, + URL: fmt.Sprintf("%s/templates/%s/%s", serverURL, template.OrganizationName, template.Name), + } + } + return results, nil +} + +func searchWorkspaces(ctx context.Context, deps Deps, query string) ([]SearchResultItem, error) { + serverURL := deps.ServerURL() + workspaces, err := deps.coderClient.Workspaces(ctx, codersdk.WorkspaceFilter{ + FilterQuery: query, + }) + if err != nil { + return nil, err + } + results := make([]SearchResultItem, len(workspaces.Workspaces)) + for i, workspace := range workspaces.Workspaces { + results[i] = SearchResultItem{ + ID: createObjectID(ObjectTypeWorkspace, workspace.ID.String()).String(), + Title: workspace.Name, + Text: fmt.Sprintf("Owner: %s\nTemplate: %s\nLatest transition: %s", workspace.OwnerName, workspace.TemplateDisplayName, workspace.LatestBuild.Transition), + URL: fmt.Sprintf("%s/%s/%s", serverURL, workspace.OwnerName, workspace.Name), + } + } + return results, nil +} + +type SearchQueryType string + +const ( + SearchQueryTypeTemplates SearchQueryType = "templates" + SearchQueryTypeWorkspaces SearchQueryType = "workspaces" +) + +type SearchQuery struct { + Type SearchQueryType + Query string +} + +func parseSearchQuery(query string) (SearchQuery, error) { + parts := strings.Split(query, "/") + queryType := SearchQueryType(parts[0]) + if !(queryType == SearchQueryTypeTemplates || queryType == SearchQueryTypeWorkspaces) { + return SearchQuery{}, xerrors.Errorf("invalid query: %s", query) + } + queryString := "" + if len(parts) > 1 { + queryString = strings.Join(parts[1:], "/") + } + return SearchQuery{ + Type: queryType, + Query: queryString, + }, nil +} + +type SearchArgs struct { + Query string `json:"query"` +} + +type SearchResultItem struct { + ID string `json:"id"` + Title string `json:"title"` + Text string `json:"text"` + URL string `json:"url"` +} + +type SearchResult struct { + Results []SearchResultItem `json:"results"` +} + +// Implements the "search" tool as described in https://platform.openai.com/docs/mcp#search-tool. +// From my experiments with ChatGPT, it has access to the description that is provided in the +// tool definition. This is in contrast to the "fetch" tool, where ChatGPT does not have access +// to the description. +var ChatGPTSearch = Tool[SearchArgs, SearchResult]{ + Tool: aisdk.Tool{ + Name: ToolNameChatGPTSearch, + // Note: the queries are passed directly to the list workspaces and list templates + // endpoints. The list of accepted parameters below is not exhaustive - some are omitted + // because they are not as useful in ChatGPT. + Description: `Search for templates, workspaces, and files in workspaces. + +To pick what you want to search for, use the following query formats: + +- ` + "`" + `templates/` + "`" + `: List templates. The query accepts the following, optional parameters delineated by whitespace: + - "name:" - Fuzzy search by template name (substring matching). Example: "name:docker" + - "organization:" - Filter by organization ID or name. Example: "organization:coder" + - "deprecated:" - Filter by deprecated status. Example: "deprecated:true" + - "deleted:" - Filter by deleted status. Example: "deleted:true" + - "has-ai-task:" - Filter by whether the template has an AI task. Example: "has-ai-task:true" +- ` + "`" + `workspaces/` + "`" + `: List workspaces. The query accepts the following, optional parameters delineated by whitespace: + - "owner:" - Filter by workspace owner (username or "me"). Example: "owner:alice" or "owner:me" + - "template:" - Filter by template name. Example: "template:web-development" + - "name:" - Filter by workspace name (substring matching). Example: "name:project" + - "organization:" - Filter by organization ID or name. Example: "organization:engineering" + - "status:" - Filter by workspace/build status. Values: starting, stopping, deleting, deleted, stopped, started, running, pending, canceling, canceled, failed. Example: "status:running" + - "has-agent:" - Filter by agent connectivity status. Values: connecting, connected, disconnected, timeout. Example: "has-agent:connected" + - "dormant:" - Filter dormant workspaces. Example: "dormant:true" + - "outdated:" - Filter workspaces using outdated template versions. Example: "outdated:true" + - "last_used_after:" - Filter workspaces last used after a specific date. Example: "last_used_after:2023-12-01T00:00:00Z" + - "last_used_before:" - Filter workspaces last used before a specific date. Example: "last_used_before:2023-12-31T23:59:59Z" + - "has-ai-task:" - Filter workspaces with AI tasks. Example: "has-ai-task:true" + - "param:" or "param:=" - Match workspaces by build parameters. Example: "param:environment=production" or "param:gpu" + +# Examples + +## Listing templates + +List all templates without any filters. + +` + "```" + `json +{ + "query": "templates" +} +` + "```" + ` + +List all templates with a "docker" substring in the name. + +` + "```" + `json +{ + "query": "templates/name:docker" +} +` + "```" + ` + +List templates in a specific organization. + +` + "```" + `json +{ + "query": "templates/organization:engineering" +} +` + "```" + ` + +List deprecated templates. + +` + "```" + `json +{ + "query": "templates/deprecated:true" +} +` + "```" + ` + +List templates that have AI tasks. + +` + "```" + `json +{ + "query": "templates/has-ai-task:true" +} +` + "```" + ` + +List templates with multiple filters - non-deprecated templates with "web" in the name. + +` + "```" + `json +{ + "query": "templates/name:web deprecated:false" +} +` + "```" + ` + +List deleted templates (requires appropriate permissions). + +` + "```" + `json +{ + "query": "templates/deleted:true" +} +` + "```" + ` + +## Listing workspaces + +List all workspaces belonging to the current user. + +` + "```" + `json +{ + "query": "workspaces/owner:me" +} +` + "```" + ` + +or + +` + "```" + `json +{ + "query": "workspaces" +} +` + "```" + ` + +List all workspaces belonging to a user with username "josh". + +` + "```" + `json +{ + "query": "workspaces/owner:josh" +} +` + "```" + ` + +List all running workspaces. + +` + "```" + `json +{ + "query": "workspaces/status:running" +} +` + "```" + ` + +List workspaces using a specific template. + +` + "```" + `json +{ + "query": "workspaces/template:web-development" +} +` + "```" + ` + +List dormant workspaces. + +` + "```" + `json +{ + "query": "workspaces/dormant:true" +} +` + "```" + ` + +List workspaces with connected agents. + +` + "```" + `json +{ + "query": "workspaces/has-agent:connected" +} +` + "```" + ` + +List workspaces with multiple filters - running workspaces owned by "alice". + +` + "```" + `json +{ + "query": "workspaces/owner:alice status:running" +} +` + "```" + ` +`, + Schema: aisdk.Schema{ + Properties: map[string]any{ + "query": map[string]any{ + "type": "string", + }, + }, + Required: []string{"query"}, + }, + }, + Handler: func(ctx context.Context, deps Deps, args SearchArgs) (SearchResult, error) { + query, err := parseSearchQuery(args.Query) + if err != nil { + return SearchResult{}, err + } + switch query.Type { + case SearchQueryTypeTemplates: + results, err := searchTemplates(ctx, deps, query.Query) + if err != nil { + return SearchResult{}, err + } + return SearchResult{Results: results}, nil + case SearchQueryTypeWorkspaces: + searchQuery := query.Query + if searchQuery == "" { + searchQuery = "owner:me" + } + results, err := searchWorkspaces(ctx, deps, searchQuery) + if err != nil { + return SearchResult{}, err + } + return SearchResult{Results: results}, nil + } + return SearchResult{}, xerrors.Errorf("reached unreachable code with query: %s", args.Query) + }, +} + +func fetchWorkspace(ctx context.Context, deps Deps, workspaceID string) (FetchResult, error) { + parsedID, err := uuid.Parse(workspaceID) + if err != nil { + return FetchResult{}, xerrors.Errorf("invalid workspace ID, must be a valid UUID: %w", err) + } + workspace, err := deps.coderClient.Workspace(ctx, parsedID) + if err != nil { + return FetchResult{}, err + } + workspaceJSON, err := json.Marshal(workspace) + if err != nil { + return FetchResult{}, xerrors.Errorf("failed to marshal workspace: %w", err) + } + return FetchResult{ + ID: workspace.ID.String(), + Title: workspace.Name, + Text: string(workspaceJSON), + URL: fmt.Sprintf("%s/%s/%s", deps.ServerURL(), workspace.OwnerName, workspace.Name), + }, nil +} + +func fetchTemplate(ctx context.Context, deps Deps, templateID string) (FetchResult, error) { + parsedID, err := uuid.Parse(templateID) + if err != nil { + return FetchResult{}, xerrors.Errorf("invalid template ID, must be a valid UUID: %w", err) + } + template, err := deps.coderClient.Template(ctx, parsedID) + if err != nil { + return FetchResult{}, err + } + templateJSON, err := json.Marshal(template) + if err != nil { + return FetchResult{}, xerrors.Errorf("failed to marshal template: %w", err) + } + return FetchResult{ + ID: template.ID.String(), + Title: template.DisplayName, + Text: string(templateJSON), + URL: fmt.Sprintf("%s/templates/%s/%s", deps.ServerURL(), template.OrganizationName, template.Name), + }, nil +} + +type FetchArgs struct { + ID string `json:"id"` +} + +type FetchResult struct { + ID string `json:"id"` + Title string `json:"title"` + Text string `json:"text"` + URL string `json:"url"` + Metadata map[string]string `json:"metadata,omitempty"` +} + +// Implements the "fetch" tool as described in https://platform.openai.com/docs/mcp#fetch-tool. +// From my experiments with ChatGPT, it seems that it does not see the description that is +// provided in the tool definition. ChatGPT sees "fetch" as a very simple tool that can take +// an ID returned by the "search" tool and return the full details of the object. +var ChatGPTFetch = Tool[FetchArgs, FetchResult]{ + Tool: aisdk.Tool{ + Name: ToolNameChatGPTFetch, + Description: `Fetch a template or workspace. + + ID is a unique identifier for the template or workspace. It is a combination of the type and the ID. + + # Examples + + Fetch a template with ID "56f13b5e-be0f-4a17-bdb2-aaacc3353ea7". + + ` + "```" + `json + { + "id": "template:56f13b5e-be0f-4a17-bdb2-aaacc3353ea7" + } + ` + "```" + ` + + Fetch a workspace with ID "fcb6fc42-ba88-4175-9508-88e6a554a61a". + + ` + "```" + `json + { + "id": "workspace:fcb6fc42-ba88-4175-9508-88e6a554a61a" + } + ` + "```" + ` + `, + + Schema: aisdk.Schema{ + Properties: map[string]any{ + "id": map[string]any{ + "type": "string", + }, + }, + Required: []string{"id"}, + }, + }, + Handler: func(ctx context.Context, deps Deps, args FetchArgs) (FetchResult, error) { + objectID, err := parseObjectID(args.ID) + if err != nil { + return FetchResult{}, err + } + switch objectID.Type { + case ObjectTypeTemplate: + return fetchTemplate(ctx, deps, objectID.ID) + case ObjectTypeWorkspace: + return fetchWorkspace(ctx, deps, objectID.ID) + } + return FetchResult{}, xerrors.Errorf("reached unreachable code with object ID: %s", args.ID) + }, +} diff --git a/codersdk/toolsdk/chatgpt_test.go b/codersdk/toolsdk/chatgpt_test.go new file mode 100644 index 0000000000000..c8a05ba41411b --- /dev/null +++ b/codersdk/toolsdk/chatgpt_test.go @@ -0,0 +1,566 @@ +// nolint:gocritic // This is a test package, so database types do not end up in the build +package toolsdk_test + +import ( + "encoding/json" + "testing" + + "github.com/google/uuid" + "github.com/stretchr/testify/require" + + "github.com/coder/coder/v2/coderd/coderdtest" + "github.com/coder/coder/v2/coderd/database" + "github.com/coder/coder/v2/coderd/database/dbfake" + "github.com/coder/coder/v2/coderd/database/dbgen" + "github.com/coder/coder/v2/codersdk" + "github.com/coder/coder/v2/codersdk/toolsdk" +) + +func TestChatGPTSearch_TemplateSearch(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + query string + setupTemplates int + expectError bool + errorContains string + }{ + { + name: "ValidTemplatesQuery_MultipleTemplates", + query: "templates", + setupTemplates: 3, + expectError: false, + }, + { + name: "ValidTemplatesQuery_NoTemplates", + query: "templates", + setupTemplates: 0, + expectError: false, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + + // Setup + client, store := coderdtest.NewWithDatabase(t, nil) + owner := coderdtest.CreateFirstUser(t, client) + + // Create templates as needed + var expectedTemplates []database.Template + for i := 0; i < tt.setupTemplates; i++ { + template := dbfake.TemplateVersion(t, store). + Seed(database.TemplateVersion{ + OrganizationID: owner.OrganizationID, + CreatedBy: owner.UserID, + }).Do() + expectedTemplates = append(expectedTemplates, template.Template) + } + + // Create tool dependencies + deps, err := toolsdk.NewDeps(client) + require.NoError(t, err) + + // Execute tool + args := toolsdk.SearchArgs{Query: tt.query} + result, err := testTool(t, toolsdk.ChatGPTSearch, deps, args) + + // Verify results + if tt.expectError { + require.Error(t, err) + if tt.errorContains != "" { + require.Contains(t, err.Error(), tt.errorContains) + } + return + } + + require.NoError(t, err) + require.Len(t, result.Results, tt.setupTemplates) + + // Validate result format for each template + templateIDsFound := make(map[string]bool) + for _, item := range result.Results { + require.NotEmpty(t, item.ID) + require.Contains(t, item.ID, "template:") + require.NotEmpty(t, item.Title) + require.Contains(t, item.URL, "/templates/") + + // Track that we found this template ID + templateIDsFound[item.ID] = true + } + + // Verify all expected templates are present + for _, expectedTemplate := range expectedTemplates { + expectedID := "template:" + expectedTemplate.ID.String() + require.True(t, templateIDsFound[expectedID], "Expected template %s not found in results", expectedID) + } + }) + } +} + +func TestChatGPTSearch_TemplateMultipleFilters(t *testing.T) { + t.Parallel() + + // Setup + client, store := coderdtest.NewWithDatabase(t, nil) + owner := coderdtest.CreateFirstUser(t, client) + org2 := dbgen.Organization(t, store, database.Organization{ + Name: "org2", + }) + + dbgen.Template(t, store, database.Template{ + OrganizationID: owner.OrganizationID, + CreatedBy: owner.UserID, + Name: "docker-development", // Name contains "docker" + DisplayName: "Docker Development", + Description: "A Docker-based development template", + }) + + // Create another template that doesn't contain "docker" + dbgen.Template(t, store, database.Template{ + OrganizationID: org2.ID, + CreatedBy: owner.UserID, + Name: "python-web", // Name doesn't contain "docker" + DisplayName: "Python Web", + Description: "A Python web development template", + }) + + // Create third template with "docker" in name + dockerTemplate2 := dbgen.Template(t, store, database.Template{ + OrganizationID: org2.ID, + CreatedBy: owner.UserID, + Name: "old-docker-template", // Name contains "docker" + DisplayName: "Old Docker Template", + Description: "An old Docker template", + }) + + // Create tool dependencies + deps, err := toolsdk.NewDeps(client) + require.NoError(t, err) + + args := toolsdk.SearchArgs{Query: "templates/name:docker organization:org2"} + result, err := testTool(t, toolsdk.ChatGPTSearch, deps, args) + + // Verify results + require.NoError(t, err) + require.Len(t, result.Results, 1, "Should match only the docker template in org2") + + expectedID := "template:" + dockerTemplate2.ID.String() + require.Equal(t, expectedID, result.Results[0].ID, "Should match the docker template in org2") +} + +func TestChatGPTSearch_WorkspaceSearch(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + query string + setupOwner string // "self" or "other" + setupWorkspace bool + expectError bool + errorContains string + }{ + { + name: "ValidWorkspacesQuery_CurrentUser", + query: "workspaces", + setupOwner: "self", + setupWorkspace: true, + expectError: false, + }, + { + name: "ValidWorkspacesQuery_CurrentUserMe", + query: "workspaces/owner:me", + setupOwner: "self", + setupWorkspace: true, + expectError: false, + }, + { + name: "ValidWorkspacesQuery_NoWorkspaces", + query: "workspaces", + setupOwner: "self", + setupWorkspace: false, + expectError: false, + }, + { + name: "ValidWorkspacesQuery_SpecificUser", + query: "workspaces/owner:otheruser", + setupOwner: "other", + setupWorkspace: true, + expectError: false, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + + // Setup + client, store := coderdtest.NewWithDatabase(t, nil) + owner := coderdtest.CreateFirstUser(t, client) + + var workspaceOwnerID uuid.UUID + var workspaceClient *codersdk.Client + if tt.setupOwner == "self" { + workspaceOwnerID = owner.UserID + workspaceClient = client + } else { + var workspaceOwner codersdk.User + workspaceClient, workspaceOwner = coderdtest.CreateAnotherUserMutators(t, client, owner.OrganizationID, nil, func(r *codersdk.CreateUserRequestWithOrgs) { + r.Username = "otheruser" + }) + workspaceOwnerID = workspaceOwner.ID + } + + // Create workspace if needed + var expectedWorkspace database.WorkspaceTable + if tt.setupWorkspace { + workspace := dbfake.WorkspaceBuild(t, store, database.WorkspaceTable{ + Name: "test-workspace", + OrganizationID: owner.OrganizationID, + OwnerID: workspaceOwnerID, + }).Do() + expectedWorkspace = workspace.Workspace + } + + // Create tool dependencies + deps, err := toolsdk.NewDeps(workspaceClient) + require.NoError(t, err) + + // Execute tool + args := toolsdk.SearchArgs{Query: tt.query} + result, err := testTool(t, toolsdk.ChatGPTSearch, deps, args) + + // Verify results + if tt.expectError { + require.Error(t, err) + if tt.errorContains != "" { + require.Contains(t, err.Error(), tt.errorContains) + } + return + } + + require.NoError(t, err) + + if tt.setupWorkspace { + require.Len(t, result.Results, 1) + item := result.Results[0] + require.NotEmpty(t, item.ID) + require.Contains(t, item.ID, "workspace:") + require.Equal(t, expectedWorkspace.Name, item.Title) + require.Contains(t, item.Text, "Owner:") + require.Contains(t, item.Text, "Template:") + require.Contains(t, item.Text, "Latest transition:") + require.Contains(t, item.URL, expectedWorkspace.Name) + } else { + require.Len(t, result.Results, 0) + } + }) + } +} + +func TestChatGPTSearch_QueryParsing(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + query string + expectError bool + errorMsg string + }{ + { + name: "ValidTemplatesQuery", + query: "templates", + expectError: false, + }, + { + name: "ValidWorkspacesQuery", + query: "workspaces", + expectError: false, + }, + { + name: "ValidWorkspacesMeQuery", + query: "workspaces/owner:me", + expectError: false, + }, + { + name: "ValidWorkspacesUserQuery", + query: "workspaces/owner:testuser", + expectError: false, + }, + { + name: "InvalidQueryType", + query: "users", + expectError: true, + errorMsg: "invalid query", + }, + { + name: "EmptyQuery", + query: "", + expectError: true, + errorMsg: "invalid query", + }, + { + name: "MalformedQuery", + query: "invalidtype/somequery", + expectError: true, + errorMsg: "invalid query", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + + // Setup minimal environment + client, _ := coderdtest.NewWithDatabase(t, nil) + coderdtest.CreateFirstUser(t, client) + + deps, err := toolsdk.NewDeps(client) + require.NoError(t, err) + + // Execute tool + args := toolsdk.SearchArgs{Query: tt.query} + _, err = testTool(t, toolsdk.ChatGPTSearch, deps, args) + + // Verify results + if tt.expectError { + require.Error(t, err) + require.Contains(t, err.Error(), tt.errorMsg) + } else { + require.NoError(t, err) + } + }) + } +} + +func TestChatGPTFetch_TemplateFetch(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + setupTemplate bool + objectID string // if empty, will use real template ID + expectError bool + errorContains string + }{ + { + name: "ValidTemplateFetch", + setupTemplate: true, + expectError: false, + }, + { + name: "NonExistentTemplateID", + setupTemplate: false, + objectID: "template:" + uuid.NewString(), + expectError: true, + errorContains: "Resource not found", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + + // Setup + client, store := coderdtest.NewWithDatabase(t, nil) + owner := coderdtest.CreateFirstUser(t, client) + + var templateID string + var expectedTemplate database.Template + if tt.setupTemplate { + template := dbfake.TemplateVersion(t, store). + Seed(database.TemplateVersion{ + OrganizationID: owner.OrganizationID, + CreatedBy: owner.UserID, + }).Do() + expectedTemplate = template.Template + templateID = "template:" + template.Template.ID.String() + } else if tt.objectID != "" { + templateID = tt.objectID + } + + // Create tool dependencies + deps, err := toolsdk.NewDeps(client) + require.NoError(t, err) + + // Execute tool + args := toolsdk.FetchArgs{ID: templateID} + result, err := testTool(t, toolsdk.ChatGPTFetch, deps, args) + + // Verify results + if tt.expectError { + require.Error(t, err) + if tt.errorContains != "" { + require.Contains(t, err.Error(), tt.errorContains) + } + return + } + + require.NoError(t, err) + require.Equal(t, expectedTemplate.ID.String(), result.ID) + require.Equal(t, expectedTemplate.DisplayName, result.Title) + require.NotEmpty(t, result.Text) + require.Contains(t, result.URL, "/templates/") + require.Contains(t, result.URL, expectedTemplate.Name) + + // Validate JSON marshaling + var templateData codersdk.Template + err = json.Unmarshal([]byte(result.Text), &templateData) + require.NoError(t, err) + require.Equal(t, expectedTemplate.ID, templateData.ID) + }) + } +} + +func TestChatGPTFetch_WorkspaceFetch(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + setupWorkspace bool + objectID string // if empty, will use real workspace ID + expectError bool + errorContains string + }{ + { + name: "ValidWorkspaceFetch", + setupWorkspace: true, + expectError: false, + }, + { + name: "NonExistentWorkspaceID", + setupWorkspace: false, + objectID: "workspace:" + uuid.NewString(), + expectError: true, + errorContains: "Resource not found", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + + // Setup + client, store := coderdtest.NewWithDatabase(t, nil) + owner := coderdtest.CreateFirstUser(t, client) + + var workspaceID string + var expectedWorkspace database.WorkspaceTable + if tt.setupWorkspace { + workspace := dbfake.WorkspaceBuild(t, store, database.WorkspaceTable{ + OrganizationID: owner.OrganizationID, + OwnerID: owner.UserID, + }).Do() + expectedWorkspace = workspace.Workspace + workspaceID = "workspace:" + workspace.Workspace.ID.String() + } else if tt.objectID != "" { + workspaceID = tt.objectID + } + + // Create tool dependencies + deps, err := toolsdk.NewDeps(client) + require.NoError(t, err) + + // Execute tool + args := toolsdk.FetchArgs{ID: workspaceID} + result, err := testTool(t, toolsdk.ChatGPTFetch, deps, args) + + // Verify results + if tt.expectError { + require.Error(t, err) + if tt.errorContains != "" { + require.Contains(t, err.Error(), tt.errorContains) + } + return + } + + require.NoError(t, err) + require.Equal(t, expectedWorkspace.ID.String(), result.ID) + require.Equal(t, expectedWorkspace.Name, result.Title) + require.NotEmpty(t, result.Text) + require.Contains(t, result.URL, expectedWorkspace.Name) + + // Validate JSON marshaling + var workspaceData codersdk.Workspace + err = json.Unmarshal([]byte(result.Text), &workspaceData) + require.NoError(t, err) + require.Equal(t, expectedWorkspace.ID, workspaceData.ID) + }) + } +} + +func TestChatGPTFetch_ObjectIDParsing(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + objectID string + expectError bool + errorMsg string + }{ + { + name: "ValidTemplateID", + objectID: "template:" + uuid.NewString(), + expectError: false, + }, + { + name: "ValidWorkspaceID", + objectID: "workspace:" + uuid.NewString(), + expectError: false, + }, + { + name: "MissingColon", + objectID: "template" + uuid.NewString(), + expectError: true, + errorMsg: "invalid ID", + }, + { + name: "InvalidUUID", + objectID: "template:invalid-uuid", + expectError: true, + errorMsg: "invalid template ID, must be a valid UUID", + }, + { + name: "UnsupportedType", + objectID: "user:" + uuid.NewString(), + expectError: true, + errorMsg: "invalid ID", + }, + { + name: "EmptyID", + objectID: "", + expectError: true, + errorMsg: "invalid ID", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + + // Setup minimal environment + client, _ := coderdtest.NewWithDatabase(t, nil) + coderdtest.CreateFirstUser(t, client) + + deps, err := toolsdk.NewDeps(client) + require.NoError(t, err) + + // Execute tool + args := toolsdk.FetchArgs{ID: tt.objectID} + _, err = testTool(t, toolsdk.ChatGPTFetch, deps, args) + + // Verify results + if tt.expectError { + require.Error(t, err) + require.Contains(t, err.Error(), tt.errorMsg) + } else { + // For valid formats, we expect it to fail on API call since IDs don't exist + // but parsing should succeed + require.Error(t, err) + require.Contains(t, err.Error(), "Resource not found") + } + }) + } +} diff --git a/codersdk/toolsdk/toolsdk.go b/codersdk/toolsdk/toolsdk.go index 862d0c34a5316..7cb8cecb25234 100644 --- a/codersdk/toolsdk/toolsdk.go +++ b/codersdk/toolsdk/toolsdk.go @@ -36,6 +36,8 @@ const ( ToolNameCreateTemplate = "coder_create_template" ToolNameDeleteTemplate = "coder_delete_template" ToolNameWorkspaceBash = "coder_workspace_bash" + ToolNameChatGPTSearch = "search" + ToolNameChatGPTFetch = "fetch" ) func NewDeps(client *codersdk.Client, opts ...func(*Deps)) (Deps, error) { @@ -56,6 +58,13 @@ type Deps struct { report func(ReportTaskArgs) error } +func (d Deps) ServerURL() string { + serverURLCopy := *d.coderClient.URL + serverURLCopy.Path = "" + serverURLCopy.RawQuery = "" + return serverURLCopy.String() +} + func WithTaskReporter(fn func(ReportTaskArgs) error) func(*Deps) { return func(d *Deps) { d.report = fn @@ -194,6 +203,8 @@ var All = []GenericTool{ UploadTarFile.Generic(), UpdateTemplateActiveVersion.Generic(), WorkspaceBash.Generic(), + ChatGPTSearch.Generic(), + ChatGPTFetch.Generic(), } type ReportTaskArgs struct { @@ -229,7 +240,7 @@ ONLY report an "idle" or "failure" state if you have FULLY completed the task. Properties: map[string]any{ "summary": map[string]any{ "type": "string", - "description": "A concise summary of your current progress on the task. This must be less than 160 characters in length.", + "description": "A concise summary of your current progress on the task. This must be less than 160 characters in length and must not include newlines or other control characters.", }, "link": map[string]any{ "type": "string", diff --git a/codersdk/toolsdk/toolsdk_test.go b/codersdk/toolsdk/toolsdk_test.go index c201190bd3456..fb321e90e7dee 100644 --- a/codersdk/toolsdk/toolsdk_test.go +++ b/codersdk/toolsdk/toolsdk_test.go @@ -4,6 +4,7 @@ import ( "context" "encoding/json" "os" + "runtime" "sort" "sync" "testing" @@ -397,6 +398,9 @@ func TestTools(t *testing.T) { }) t.Run("WorkspaceSSHExec", func(t *testing.T) { + if runtime.GOOS == "windows" { + t.Skip("WorkspaceSSHExec is not supported on Windows") + } // Setup workspace exactly like main SSH tests client, workspace, agentToken := setupWorkspaceForAgent(t) @@ -456,7 +460,7 @@ var testedTools sync.Map // This is to mimic how we expect external callers to use the tool. func testTool[Arg, Ret any](t *testing.T, tool toolsdk.Tool[Arg, Ret], tb toolsdk.Deps, args Arg) (Ret, error) { t.Helper() - defer func() { testedTools.Store(tool.Tool.Name, true) }() + defer func() { testedTools.Store(tool.Name, true) }() toolArgs, err := json.Marshal(args) require.NoError(t, err, "failed to marshal args") result, err := tool.Generic().Handler(t.Context(), tb, toolArgs) @@ -625,23 +629,23 @@ func TestToolSchemaFields(t *testing.T) { // Test that all tools have the required Schema fields (Properties and Required) for _, tool := range toolsdk.All { - t.Run(tool.Tool.Name, func(t *testing.T) { + t.Run(tool.Name, func(t *testing.T) { t.Parallel() // Check that Properties is not nil - require.NotNil(t, tool.Tool.Schema.Properties, - "Tool %q missing Schema.Properties", tool.Tool.Name) + require.NotNil(t, tool.Schema.Properties, + "Tool %q missing Schema.Properties", tool.Name) // Check that Required is not nil - require.NotNil(t, tool.Tool.Schema.Required, - "Tool %q missing Schema.Required", tool.Tool.Name) + require.NotNil(t, tool.Schema.Required, + "Tool %q missing Schema.Required", tool.Name) // Ensure Properties has entries for all required fields - for _, requiredField := range tool.Tool.Schema.Required { - _, exists := tool.Tool.Schema.Properties[requiredField] + for _, requiredField := range tool.Schema.Required { + _, exists := tool.Schema.Properties[requiredField] require.True(t, exists, "Tool %q requires field %q but it is not defined in Properties", - tool.Tool.Name, requiredField) + tool.Name, requiredField) } }) } @@ -652,7 +656,7 @@ func TestToolSchemaFields(t *testing.T) { func TestMain(m *testing.M) { // Initialize testedTools for _, tool := range toolsdk.All { - testedTools.Store(tool.Tool.Name, false) + testedTools.Store(tool.Name, false) } code := m.Run() @@ -660,8 +664,12 @@ func TestMain(m *testing.M) { // Ensure all tools have been tested var untested []string for _, tool := range toolsdk.All { - if tested, ok := testedTools.Load(tool.Tool.Name); !ok || !tested.(bool) { - untested = append(untested, tool.Tool.Name) + if tested, ok := testedTools.Load(tool.Name); !ok || !tested.(bool) { + // Test is skipped on Windows + if runtime.GOOS == "windows" && tool.Name == "coder_workspace_bash" { + continue + } + untested = append(untested, tool.Name) } } diff --git a/codersdk/workspaceagents.go b/codersdk/workspaceagents.go index 1eb37bb07c989..4f3faedb534fc 100644 --- a/codersdk/workspaceagents.go +++ b/codersdk/workspaceagents.go @@ -550,7 +550,9 @@ func (c *Client) WatchWorkspaceAgentContainers(ctx context.Context, agentID uuid }}) conn, res, err := websocket.Dial(ctx, reqURL.String(), &websocket.DialOptions{ - CompressionMode: websocket.CompressionDisabled, + // We want `NoContextTakeover` compression to balance improving + // bandwidth cost/latency with minimal memory usage overhead. + CompressionMode: websocket.CompressionNoContextTakeover, HTTPClient: &http.Client{ Jar: jar, Transport: c.HTTPClient.Transport, @@ -563,6 +565,12 @@ func (c *Client) WatchWorkspaceAgentContainers(ctx context.Context, agentID uuid return nil, nil, ReadBodyAsError(res) } + // When a workspace has a few devcontainers running, or a single devcontainer + // has a large amount of apps, then each payload can easily exceed 32KiB. + // We up the limit to 4MiB to give us plenty of headroom for workspaces that + // have lots of dev containers with lots of apps. + conn.SetReadLimit(1 << 22) // 4MiB + d := wsjson.NewDecoder[WorkspaceAgentListContainersResponse](conn, websocket.MessageText, c.logger) return d.Chan(), d, nil } diff --git a/codersdk/workspacebuilds.go b/codersdk/workspacebuilds.go index 53d2a89290bca..bb9511178c7f4 100644 --- a/codersdk/workspacebuilds.go +++ b/codersdk/workspacebuilds.go @@ -90,6 +90,7 @@ type WorkspaceBuild struct { TemplateVersionPresetID *uuid.UUID `json:"template_version_preset_id" format:"uuid"` HasAITask *bool `json:"has_ai_task,omitempty"` AITaskSidebarAppID *uuid.UUID `json:"ai_task_sidebar_app_id,omitempty" format:"uuid"` + HasExternalAgent *bool `json:"has_external_agent,omitempty"` } // WorkspaceResource describes resources used to create a workspace, for instance: diff --git a/codersdk/workspaces.go b/codersdk/workspaces.go index dee2e1b838cb9..39d52325df448 100644 --- a/codersdk/workspaces.go +++ b/codersdk/workspaces.go @@ -662,3 +662,50 @@ func (c *Client) WorkspaceTimings(ctx context.Context, id uuid.UUID) (WorkspaceB var timings WorkspaceBuildTimings return timings, json.NewDecoder(res.Body).Decode(&timings) } + +type UpdateWorkspaceACL struct { + // Keys must be valid UUIDs. To remove a user/group from the ACL use "" as the + // role name (available as a constant named `codersdk.WorkspaceRoleDeleted`) + UserRoles map[string]WorkspaceRole `json:"user_roles,omitempty"` + GroupRoles map[string]WorkspaceRole `json:"group_roles,omitempty"` +} + +type WorkspaceRole string + +const ( + WorkspaceRoleAdmin WorkspaceRole = "admin" + WorkspaceRoleUse WorkspaceRole = "use" + WorkspaceRoleDeleted WorkspaceRole = "" +) + +func (c *Client) UpdateWorkspaceACL(ctx context.Context, workspaceID uuid.UUID, req UpdateWorkspaceACL) error { + res, err := c.Request(ctx, http.MethodPatch, fmt.Sprintf("/api/v2/workspaces/%s/acl", workspaceID), req) + if err != nil { + return err + } + defer res.Body.Close() + if res.StatusCode != http.StatusNoContent { + return ReadBodyAsError(res) + } + return nil +} + +// ExternalAgentCredentials contains the credentials needed for an external agent to connect to Coder. +type ExternalAgentCredentials struct { + Command string `json:"command"` + AgentToken string `json:"agent_token"` +} + +func (c *Client) WorkspaceExternalAgentCredentials(ctx context.Context, workspaceID uuid.UUID, agentName string) (ExternalAgentCredentials, error) { + path := fmt.Sprintf("/api/v2/workspaces/%s/external-agent/%s/credentials", workspaceID.String(), agentName) + res, err := c.Request(ctx, http.MethodGet, path, nil) + if err != nil { + return ExternalAgentCredentials{}, err + } + defer res.Body.Close() + if res.StatusCode != http.StatusOK { + return ExternalAgentCredentials{}, ReadBodyAsError(res) + } + var credentials ExternalAgentCredentials + return credentials, json.NewDecoder(res.Body).Decode(&credentials) +} diff --git a/codersdk/workspacesdk/agentconn.go b/codersdk/workspacesdk/agentconn.go index ce66d5e1b8a70..bb929c9ba2a04 100644 --- a/codersdk/workspacesdk/agentconn.go +++ b/codersdk/workspacesdk/agentconn.go @@ -34,8 +34,8 @@ import ( // to the WorkspaceAgentConn, or it may be shared in the case of coderd. If the // conn is shared and closing it is undesirable, you may return ErrNoClose from // opts.CloseFunc. This will ensure the underlying conn is not closed. -func NewAgentConn(conn *tailnet.Conn, opts AgentConnOptions) *AgentConn { - return &AgentConn{ +func NewAgentConn(conn *tailnet.Conn, opts AgentConnOptions) AgentConn { + return &agentConn{ Conn: conn, opts: opts, } @@ -43,23 +43,54 @@ func NewAgentConn(conn *tailnet.Conn, opts AgentConnOptions) *AgentConn { // AgentConn represents a connection to a workspace agent. // @typescript-ignore AgentConn -type AgentConn struct { +type AgentConn interface { + TailnetConn() *tailnet.Conn + + AwaitReachable(ctx context.Context) bool + Close() error + DebugLogs(ctx context.Context) ([]byte, error) + DebugMagicsock(ctx context.Context) ([]byte, error) + DebugManifest(ctx context.Context) ([]byte, error) + DialContext(ctx context.Context, network string, addr string) (net.Conn, error) + GetPeerDiagnostics() tailnet.PeerDiagnostics + ListContainers(ctx context.Context) (codersdk.WorkspaceAgentListContainersResponse, error) + ListeningPorts(ctx context.Context) (codersdk.WorkspaceAgentListeningPortsResponse, error) + Netcheck(ctx context.Context) (healthsdk.AgentNetcheckReport, error) + Ping(ctx context.Context) (time.Duration, bool, *ipnstate.PingResult, error) + PrometheusMetrics(ctx context.Context) ([]byte, error) + ReconnectingPTY(ctx context.Context, id uuid.UUID, height uint16, width uint16, command string, initOpts ...AgentReconnectingPTYInitOption) (net.Conn, error) + RecreateDevcontainer(ctx context.Context, devcontainerID string) (codersdk.Response, error) + SSH(ctx context.Context) (*gonet.TCPConn, error) + SSHClient(ctx context.Context) (*ssh.Client, error) + SSHClientOnPort(ctx context.Context, port uint16) (*ssh.Client, error) + SSHOnPort(ctx context.Context, port uint16) (*gonet.TCPConn, error) + Speedtest(ctx context.Context, direction speedtest.Direction, duration time.Duration) ([]speedtest.Result, error) + WatchContainers(ctx context.Context, logger slog.Logger) (<-chan codersdk.WorkspaceAgentListContainersResponse, io.Closer, error) +} + +// AgentConn represents a connection to a workspace agent. +// @typescript-ignore AgentConn +type agentConn struct { *tailnet.Conn opts AgentConnOptions } +func (c *agentConn) TailnetConn() *tailnet.Conn { + return c.Conn +} + // @typescript-ignore AgentConnOptions type AgentConnOptions struct { AgentID uuid.UUID CloseFunc func() error } -func (c *AgentConn) agentAddress() netip.Addr { +func (c *agentConn) agentAddress() netip.Addr { return tailnet.TailscaleServicePrefix.AddrFromUUID(c.opts.AgentID) } // AwaitReachable waits for the agent to be reachable. -func (c *AgentConn) AwaitReachable(ctx context.Context) bool { +func (c *agentConn) AwaitReachable(ctx context.Context) bool { ctx, span := tracing.StartSpan(ctx) defer span.End() @@ -68,7 +99,7 @@ func (c *AgentConn) AwaitReachable(ctx context.Context) bool { // Ping pings the agent and returns the round-trip time. // The bool returns true if the ping was made P2P. -func (c *AgentConn) Ping(ctx context.Context) (time.Duration, bool, *ipnstate.PingResult, error) { +func (c *agentConn) Ping(ctx context.Context) (time.Duration, bool, *ipnstate.PingResult, error) { ctx, span := tracing.StartSpan(ctx) defer span.End() @@ -76,7 +107,7 @@ func (c *AgentConn) Ping(ctx context.Context) (time.Duration, bool, *ipnstate.Pi } // Close ends the connection to the workspace agent. -func (c *AgentConn) Close() error { +func (c *agentConn) Close() error { var cerr error if c.opts.CloseFunc != nil { cerr = c.opts.CloseFunc() @@ -131,7 +162,7 @@ type ReconnectingPTYRequest struct { // ReconnectingPTY spawns a new reconnecting terminal session. // `ReconnectingPTYRequest` should be JSON marshaled and written to the returned net.Conn. // Raw terminal output will be read from the returned net.Conn. -func (c *AgentConn) ReconnectingPTY(ctx context.Context, id uuid.UUID, height, width uint16, command string, initOpts ...AgentReconnectingPTYInitOption) (net.Conn, error) { +func (c *agentConn) ReconnectingPTY(ctx context.Context, id uuid.UUID, height, width uint16, command string, initOpts ...AgentReconnectingPTYInitOption) (net.Conn, error) { ctx, span := tracing.StartSpan(ctx) defer span.End() @@ -171,13 +202,13 @@ func (c *AgentConn) ReconnectingPTY(ctx context.Context, id uuid.UUID, height, w // SSH pipes the SSH protocol over the returned net.Conn. // This connects to the built-in SSH server in the workspace agent. -func (c *AgentConn) SSH(ctx context.Context) (*gonet.TCPConn, error) { +func (c *agentConn) SSH(ctx context.Context) (*gonet.TCPConn, error) { return c.SSHOnPort(ctx, AgentSSHPort) } // SSHOnPort pipes the SSH protocol over the returned net.Conn. // This connects to the built-in SSH server in the workspace agent on the specified port. -func (c *AgentConn) SSHOnPort(ctx context.Context, port uint16) (*gonet.TCPConn, error) { +func (c *agentConn) SSHOnPort(ctx context.Context, port uint16) (*gonet.TCPConn, error) { ctx, span := tracing.StartSpan(ctx) defer span.End() @@ -190,12 +221,12 @@ func (c *AgentConn) SSHOnPort(ctx context.Context, port uint16) (*gonet.TCPConn, } // SSHClient calls SSH to create a client -func (c *AgentConn) SSHClient(ctx context.Context) (*ssh.Client, error) { +func (c *agentConn) SSHClient(ctx context.Context) (*ssh.Client, error) { return c.SSHClientOnPort(ctx, AgentSSHPort) } // SSHClientOnPort calls SSH to create a client on a specific port -func (c *AgentConn) SSHClientOnPort(ctx context.Context, port uint16) (*ssh.Client, error) { +func (c *agentConn) SSHClientOnPort(ctx context.Context, port uint16) (*ssh.Client, error) { ctx, span := tracing.StartSpan(ctx) defer span.End() @@ -218,7 +249,7 @@ func (c *AgentConn) SSHClientOnPort(ctx context.Context, port uint16) (*ssh.Clie } // Speedtest runs a speedtest against the workspace agent. -func (c *AgentConn) Speedtest(ctx context.Context, direction speedtest.Direction, duration time.Duration) ([]speedtest.Result, error) { +func (c *agentConn) Speedtest(ctx context.Context, direction speedtest.Direction, duration time.Duration) ([]speedtest.Result, error) { ctx, span := tracing.StartSpan(ctx) defer span.End() @@ -242,7 +273,7 @@ func (c *AgentConn) Speedtest(ctx context.Context, direction speedtest.Direction // DialContext dials the address provided in the workspace agent. // The network must be "tcp" or "udp". -func (c *AgentConn) DialContext(ctx context.Context, network string, addr string) (net.Conn, error) { +func (c *agentConn) DialContext(ctx context.Context, network string, addr string) (net.Conn, error) { ctx, span := tracing.StartSpan(ctx) defer span.End() @@ -265,7 +296,7 @@ func (c *AgentConn) DialContext(ctx context.Context, network string, addr string } // ListeningPorts lists the ports that are currently in use by the workspace. -func (c *AgentConn) ListeningPorts(ctx context.Context) (codersdk.WorkspaceAgentListeningPortsResponse, error) { +func (c *agentConn) ListeningPorts(ctx context.Context) (codersdk.WorkspaceAgentListeningPortsResponse, error) { ctx, span := tracing.StartSpan(ctx) defer span.End() res, err := c.apiRequest(ctx, http.MethodGet, "/api/v0/listening-ports", nil) @@ -282,7 +313,7 @@ func (c *AgentConn) ListeningPorts(ctx context.Context) (codersdk.WorkspaceAgent } // Netcheck returns a network check report from the workspace agent. -func (c *AgentConn) Netcheck(ctx context.Context) (healthsdk.AgentNetcheckReport, error) { +func (c *agentConn) Netcheck(ctx context.Context) (healthsdk.AgentNetcheckReport, error) { ctx, span := tracing.StartSpan(ctx) defer span.End() res, err := c.apiRequest(ctx, http.MethodGet, "/api/v0/netcheck", nil) @@ -299,7 +330,7 @@ func (c *AgentConn) Netcheck(ctx context.Context) (healthsdk.AgentNetcheckReport } // DebugMagicsock makes a request to the workspace agent's magicsock debug endpoint. -func (c *AgentConn) DebugMagicsock(ctx context.Context) ([]byte, error) { +func (c *agentConn) DebugMagicsock(ctx context.Context) ([]byte, error) { ctx, span := tracing.StartSpan(ctx) defer span.End() res, err := c.apiRequest(ctx, http.MethodGet, "/debug/magicsock", nil) @@ -319,7 +350,7 @@ func (c *AgentConn) DebugMagicsock(ctx context.Context) ([]byte, error) { // DebugManifest returns the agent's in-memory manifest. Unfortunately this must // be returns as a []byte to avoid an import cycle. -func (c *AgentConn) DebugManifest(ctx context.Context) ([]byte, error) { +func (c *agentConn) DebugManifest(ctx context.Context) ([]byte, error) { ctx, span := tracing.StartSpan(ctx) defer span.End() res, err := c.apiRequest(ctx, http.MethodGet, "/debug/manifest", nil) @@ -338,7 +369,7 @@ func (c *AgentConn) DebugManifest(ctx context.Context) ([]byte, error) { } // DebugLogs returns up to the last 10MB of `/tmp/coder-agent.log` -func (c *AgentConn) DebugLogs(ctx context.Context) ([]byte, error) { +func (c *agentConn) DebugLogs(ctx context.Context) ([]byte, error) { ctx, span := tracing.StartSpan(ctx) defer span.End() res, err := c.apiRequest(ctx, http.MethodGet, "/debug/logs", nil) @@ -357,7 +388,7 @@ func (c *AgentConn) DebugLogs(ctx context.Context) ([]byte, error) { } // PrometheusMetrics returns a response from the agent's prometheus metrics endpoint -func (c *AgentConn) PrometheusMetrics(ctx context.Context) ([]byte, error) { +func (c *agentConn) PrometheusMetrics(ctx context.Context) ([]byte, error) { ctx, span := tracing.StartSpan(ctx) defer span.End() res, err := c.apiRequest(ctx, http.MethodGet, "/debug/prometheus", nil) @@ -376,7 +407,7 @@ func (c *AgentConn) PrometheusMetrics(ctx context.Context) ([]byte, error) { } // ListContainers returns a response from the agent's containers endpoint -func (c *AgentConn) ListContainers(ctx context.Context) (codersdk.WorkspaceAgentListContainersResponse, error) { +func (c *agentConn) ListContainers(ctx context.Context) (codersdk.WorkspaceAgentListContainersResponse, error) { ctx, span := tracing.StartSpan(ctx) defer span.End() res, err := c.apiRequest(ctx, http.MethodGet, "/api/v0/containers", nil) @@ -391,7 +422,7 @@ func (c *AgentConn) ListContainers(ctx context.Context) (codersdk.WorkspaceAgent return resp, json.NewDecoder(res.Body).Decode(&resp) } -func (c *AgentConn) WatchContainers(ctx context.Context, logger slog.Logger) (<-chan codersdk.WorkspaceAgentListContainersResponse, io.Closer, error) { +func (c *agentConn) WatchContainers(ctx context.Context, logger slog.Logger) (<-chan codersdk.WorkspaceAgentListContainersResponse, io.Closer, error) { ctx, span := tracing.StartSpan(ctx) defer span.End() @@ -400,6 +431,10 @@ func (c *AgentConn) WatchContainers(ctx context.Context, logger slog.Logger) (<- conn, res, err := websocket.Dial(ctx, url, &websocket.DialOptions{ HTTPClient: c.apiClient(), + + // We want `NoContextTakeover` compression to balance improving + // bandwidth cost/latency with minimal memory usage overhead. + CompressionMode: websocket.CompressionNoContextTakeover, }) if err != nil { if res == nil { @@ -411,13 +446,19 @@ func (c *AgentConn) WatchContainers(ctx context.Context, logger slog.Logger) (<- defer res.Body.Close() } + // When a workspace has a few devcontainers running, or a single devcontainer + // has a large amount of apps, then each payload can easily exceed 32KiB. + // We up the limit to 4MiB to give us plenty of headroom for workspaces that + // have lots of dev containers with lots of apps. + conn.SetReadLimit(1 << 22) // 4MiB + d := wsjson.NewDecoder[codersdk.WorkspaceAgentListContainersResponse](conn, websocket.MessageText, logger) return d.Chan(), d, nil } // RecreateDevcontainer recreates a devcontainer with the given container. // This is a blocking call and will wait for the container to be recreated. -func (c *AgentConn) RecreateDevcontainer(ctx context.Context, devcontainerID string) (codersdk.Response, error) { +func (c *agentConn) RecreateDevcontainer(ctx context.Context, devcontainerID string) (codersdk.Response, error) { ctx, span := tracing.StartSpan(ctx) defer span.End() res, err := c.apiRequest(ctx, http.MethodPost, "/api/v0/containers/devcontainers/"+devcontainerID+"/recreate", nil) @@ -436,7 +477,7 @@ func (c *AgentConn) RecreateDevcontainer(ctx context.Context, devcontainerID str } // apiRequest makes a request to the workspace agent's HTTP API server. -func (c *AgentConn) apiRequest(ctx context.Context, method, path string, body io.Reader) (*http.Response, error) { +func (c *agentConn) apiRequest(ctx context.Context, method, path string, body io.Reader) (*http.Response, error) { ctx, span := tracing.StartSpan(ctx) defer span.End() @@ -453,7 +494,7 @@ func (c *AgentConn) apiRequest(ctx context.Context, method, path string, body io // apiClient returns an HTTP client that can be used to make // requests to the workspace agent's HTTP API server. -func (c *AgentConn) apiClient() *http.Client { +func (c *agentConn) apiClient() *http.Client { return &http.Client{ Transport: &http.Transport{ // Disable keep alives as we're usually only making a single @@ -494,6 +535,6 @@ func (c *AgentConn) apiClient() *http.Client { } } -func (c *AgentConn) GetPeerDiagnostics() tailnet.PeerDiagnostics { +func (c *agentConn) GetPeerDiagnostics() tailnet.PeerDiagnostics { return c.Conn.GetPeerDiagnostics(c.opts.AgentID) } diff --git a/codersdk/workspacesdk/agentconnmock/agentconnmock.go b/codersdk/workspacesdk/agentconnmock/agentconnmock.go new file mode 100644 index 0000000000000..eb55bb27938c0 --- /dev/null +++ b/codersdk/workspacesdk/agentconnmock/agentconnmock.go @@ -0,0 +1,373 @@ +// Code generated by MockGen. DO NOT EDIT. +// Source: .. (interfaces: AgentConn) +// +// Generated by this command: +// +// mockgen -destination ./agentconnmock.go -package agentconnmock .. AgentConn +// + +// Package agentconnmock is a generated GoMock package. +package agentconnmock + +import ( + context "context" + io "io" + net "net" + reflect "reflect" + time "time" + + slog "cdr.dev/slog" + codersdk "github.com/coder/coder/v2/codersdk" + healthsdk "github.com/coder/coder/v2/codersdk/healthsdk" + workspacesdk "github.com/coder/coder/v2/codersdk/workspacesdk" + tailnet "github.com/coder/coder/v2/tailnet" + uuid "github.com/google/uuid" + gomock "go.uber.org/mock/gomock" + ssh "golang.org/x/crypto/ssh" + gonet "gvisor.dev/gvisor/pkg/tcpip/adapters/gonet" + ipnstate "tailscale.com/ipn/ipnstate" + speedtest "tailscale.com/net/speedtest" +) + +// MockAgentConn is a mock of AgentConn interface. +type MockAgentConn struct { + ctrl *gomock.Controller + recorder *MockAgentConnMockRecorder + isgomock struct{} +} + +// MockAgentConnMockRecorder is the mock recorder for MockAgentConn. +type MockAgentConnMockRecorder struct { + mock *MockAgentConn +} + +// NewMockAgentConn creates a new mock instance. +func NewMockAgentConn(ctrl *gomock.Controller) *MockAgentConn { + mock := &MockAgentConn{ctrl: ctrl} + mock.recorder = &MockAgentConnMockRecorder{mock} + return mock +} + +// EXPECT returns an object that allows the caller to indicate expected use. +func (m *MockAgentConn) EXPECT() *MockAgentConnMockRecorder { + return m.recorder +} + +// AwaitReachable mocks base method. +func (m *MockAgentConn) AwaitReachable(ctx context.Context) bool { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "AwaitReachable", ctx) + ret0, _ := ret[0].(bool) + return ret0 +} + +// AwaitReachable indicates an expected call of AwaitReachable. +func (mr *MockAgentConnMockRecorder) AwaitReachable(ctx any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "AwaitReachable", reflect.TypeOf((*MockAgentConn)(nil).AwaitReachable), ctx) +} + +// Close mocks base method. +func (m *MockAgentConn) Close() error { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "Close") + ret0, _ := ret[0].(error) + return ret0 +} + +// Close indicates an expected call of Close. +func (mr *MockAgentConnMockRecorder) Close() *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Close", reflect.TypeOf((*MockAgentConn)(nil).Close)) +} + +// DebugLogs mocks base method. +func (m *MockAgentConn) DebugLogs(ctx context.Context) ([]byte, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "DebugLogs", ctx) + ret0, _ := ret[0].([]byte) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// DebugLogs indicates an expected call of DebugLogs. +func (mr *MockAgentConnMockRecorder) DebugLogs(ctx any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "DebugLogs", reflect.TypeOf((*MockAgentConn)(nil).DebugLogs), ctx) +} + +// DebugMagicsock mocks base method. +func (m *MockAgentConn) DebugMagicsock(ctx context.Context) ([]byte, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "DebugMagicsock", ctx) + ret0, _ := ret[0].([]byte) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// DebugMagicsock indicates an expected call of DebugMagicsock. +func (mr *MockAgentConnMockRecorder) DebugMagicsock(ctx any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "DebugMagicsock", reflect.TypeOf((*MockAgentConn)(nil).DebugMagicsock), ctx) +} + +// DebugManifest mocks base method. +func (m *MockAgentConn) DebugManifest(ctx context.Context) ([]byte, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "DebugManifest", ctx) + ret0, _ := ret[0].([]byte) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// DebugManifest indicates an expected call of DebugManifest. +func (mr *MockAgentConnMockRecorder) DebugManifest(ctx any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "DebugManifest", reflect.TypeOf((*MockAgentConn)(nil).DebugManifest), ctx) +} + +// DialContext mocks base method. +func (m *MockAgentConn) DialContext(ctx context.Context, network, addr string) (net.Conn, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "DialContext", ctx, network, addr) + ret0, _ := ret[0].(net.Conn) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// DialContext indicates an expected call of DialContext. +func (mr *MockAgentConnMockRecorder) DialContext(ctx, network, addr any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "DialContext", reflect.TypeOf((*MockAgentConn)(nil).DialContext), ctx, network, addr) +} + +// GetPeerDiagnostics mocks base method. +func (m *MockAgentConn) GetPeerDiagnostics() tailnet.PeerDiagnostics { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetPeerDiagnostics") + ret0, _ := ret[0].(tailnet.PeerDiagnostics) + return ret0 +} + +// GetPeerDiagnostics indicates an expected call of GetPeerDiagnostics. +func (mr *MockAgentConnMockRecorder) GetPeerDiagnostics() *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetPeerDiagnostics", reflect.TypeOf((*MockAgentConn)(nil).GetPeerDiagnostics)) +} + +// ListContainers mocks base method. +func (m *MockAgentConn) ListContainers(ctx context.Context) (codersdk.WorkspaceAgentListContainersResponse, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "ListContainers", ctx) + ret0, _ := ret[0].(codersdk.WorkspaceAgentListContainersResponse) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// ListContainers indicates an expected call of ListContainers. +func (mr *MockAgentConnMockRecorder) ListContainers(ctx any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ListContainers", reflect.TypeOf((*MockAgentConn)(nil).ListContainers), ctx) +} + +// ListeningPorts mocks base method. +func (m *MockAgentConn) ListeningPorts(ctx context.Context) (codersdk.WorkspaceAgentListeningPortsResponse, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "ListeningPorts", ctx) + ret0, _ := ret[0].(codersdk.WorkspaceAgentListeningPortsResponse) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// ListeningPorts indicates an expected call of ListeningPorts. +func (mr *MockAgentConnMockRecorder) ListeningPorts(ctx any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ListeningPorts", reflect.TypeOf((*MockAgentConn)(nil).ListeningPorts), ctx) +} + +// Netcheck mocks base method. +func (m *MockAgentConn) Netcheck(ctx context.Context) (healthsdk.AgentNetcheckReport, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "Netcheck", ctx) + ret0, _ := ret[0].(healthsdk.AgentNetcheckReport) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// Netcheck indicates an expected call of Netcheck. +func (mr *MockAgentConnMockRecorder) Netcheck(ctx any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Netcheck", reflect.TypeOf((*MockAgentConn)(nil).Netcheck), ctx) +} + +// Ping mocks base method. +func (m *MockAgentConn) Ping(ctx context.Context) (time.Duration, bool, *ipnstate.PingResult, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "Ping", ctx) + ret0, _ := ret[0].(time.Duration) + ret1, _ := ret[1].(bool) + ret2, _ := ret[2].(*ipnstate.PingResult) + ret3, _ := ret[3].(error) + return ret0, ret1, ret2, ret3 +} + +// Ping indicates an expected call of Ping. +func (mr *MockAgentConnMockRecorder) Ping(ctx any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Ping", reflect.TypeOf((*MockAgentConn)(nil).Ping), ctx) +} + +// PrometheusMetrics mocks base method. +func (m *MockAgentConn) PrometheusMetrics(ctx context.Context) ([]byte, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "PrometheusMetrics", ctx) + ret0, _ := ret[0].([]byte) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// PrometheusMetrics indicates an expected call of PrometheusMetrics. +func (mr *MockAgentConnMockRecorder) PrometheusMetrics(ctx any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "PrometheusMetrics", reflect.TypeOf((*MockAgentConn)(nil).PrometheusMetrics), ctx) +} + +// ReconnectingPTY mocks base method. +func (m *MockAgentConn) ReconnectingPTY(ctx context.Context, id uuid.UUID, height, width uint16, command string, initOpts ...workspacesdk.AgentReconnectingPTYInitOption) (net.Conn, error) { + m.ctrl.T.Helper() + varargs := []any{ctx, id, height, width, command} + for _, a := range initOpts { + varargs = append(varargs, a) + } + ret := m.ctrl.Call(m, "ReconnectingPTY", varargs...) + ret0, _ := ret[0].(net.Conn) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// ReconnectingPTY indicates an expected call of ReconnectingPTY. +func (mr *MockAgentConnMockRecorder) ReconnectingPTY(ctx, id, height, width, command any, initOpts ...any) *gomock.Call { + mr.mock.ctrl.T.Helper() + varargs := append([]any{ctx, id, height, width, command}, initOpts...) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ReconnectingPTY", reflect.TypeOf((*MockAgentConn)(nil).ReconnectingPTY), varargs...) +} + +// RecreateDevcontainer mocks base method. +func (m *MockAgentConn) RecreateDevcontainer(ctx context.Context, devcontainerID string) (codersdk.Response, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "RecreateDevcontainer", ctx, devcontainerID) + ret0, _ := ret[0].(codersdk.Response) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// RecreateDevcontainer indicates an expected call of RecreateDevcontainer. +func (mr *MockAgentConnMockRecorder) RecreateDevcontainer(ctx, devcontainerID any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "RecreateDevcontainer", reflect.TypeOf((*MockAgentConn)(nil).RecreateDevcontainer), ctx, devcontainerID) +} + +// SSH mocks base method. +func (m *MockAgentConn) SSH(ctx context.Context) (*gonet.TCPConn, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "SSH", ctx) + ret0, _ := ret[0].(*gonet.TCPConn) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// SSH indicates an expected call of SSH. +func (mr *MockAgentConnMockRecorder) SSH(ctx any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "SSH", reflect.TypeOf((*MockAgentConn)(nil).SSH), ctx) +} + +// SSHClient mocks base method. +func (m *MockAgentConn) SSHClient(ctx context.Context) (*ssh.Client, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "SSHClient", ctx) + ret0, _ := ret[0].(*ssh.Client) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// SSHClient indicates an expected call of SSHClient. +func (mr *MockAgentConnMockRecorder) SSHClient(ctx any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "SSHClient", reflect.TypeOf((*MockAgentConn)(nil).SSHClient), ctx) +} + +// SSHClientOnPort mocks base method. +func (m *MockAgentConn) SSHClientOnPort(ctx context.Context, port uint16) (*ssh.Client, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "SSHClientOnPort", ctx, port) + ret0, _ := ret[0].(*ssh.Client) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// SSHClientOnPort indicates an expected call of SSHClientOnPort. +func (mr *MockAgentConnMockRecorder) SSHClientOnPort(ctx, port any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "SSHClientOnPort", reflect.TypeOf((*MockAgentConn)(nil).SSHClientOnPort), ctx, port) +} + +// SSHOnPort mocks base method. +func (m *MockAgentConn) SSHOnPort(ctx context.Context, port uint16) (*gonet.TCPConn, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "SSHOnPort", ctx, port) + ret0, _ := ret[0].(*gonet.TCPConn) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// SSHOnPort indicates an expected call of SSHOnPort. +func (mr *MockAgentConnMockRecorder) SSHOnPort(ctx, port any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "SSHOnPort", reflect.TypeOf((*MockAgentConn)(nil).SSHOnPort), ctx, port) +} + +// Speedtest mocks base method. +func (m *MockAgentConn) Speedtest(ctx context.Context, direction speedtest.Direction, duration time.Duration) ([]speedtest.Result, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "Speedtest", ctx, direction, duration) + ret0, _ := ret[0].([]speedtest.Result) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// Speedtest indicates an expected call of Speedtest. +func (mr *MockAgentConnMockRecorder) Speedtest(ctx, direction, duration any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Speedtest", reflect.TypeOf((*MockAgentConn)(nil).Speedtest), ctx, direction, duration) +} + +// TailnetConn mocks base method. +func (m *MockAgentConn) TailnetConn() *tailnet.Conn { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "TailnetConn") + ret0, _ := ret[0].(*tailnet.Conn) + return ret0 +} + +// TailnetConn indicates an expected call of TailnetConn. +func (mr *MockAgentConnMockRecorder) TailnetConn() *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "TailnetConn", reflect.TypeOf((*MockAgentConn)(nil).TailnetConn)) +} + +// WatchContainers mocks base method. +func (m *MockAgentConn) WatchContainers(ctx context.Context, logger slog.Logger) (<-chan codersdk.WorkspaceAgentListContainersResponse, io.Closer, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "WatchContainers", ctx, logger) + ret0, _ := ret[0].(<-chan codersdk.WorkspaceAgentListContainersResponse) + ret1, _ := ret[1].(io.Closer) + ret2, _ := ret[2].(error) + return ret0, ret1, ret2 +} + +// WatchContainers indicates an expected call of WatchContainers. +func (mr *MockAgentConnMockRecorder) WatchContainers(ctx, logger any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "WatchContainers", reflect.TypeOf((*MockAgentConn)(nil).WatchContainers), ctx, logger) +} diff --git a/codersdk/workspacesdk/agentconnmock/doc.go b/codersdk/workspacesdk/agentconnmock/doc.go new file mode 100644 index 0000000000000..a795b21a4a89d --- /dev/null +++ b/codersdk/workspacesdk/agentconnmock/doc.go @@ -0,0 +1,4 @@ +// Package agentconnmock contains a mock implementation of workspacesdk.AgentConn for use in tests. +package agentconnmock + +//go:generate mockgen -destination ./agentconnmock.go -package agentconnmock .. AgentConn diff --git a/codersdk/workspacesdk/dialer.go b/codersdk/workspacesdk/dialer.go index 71cac0c5f04b1..39d02931e6ae1 100644 --- a/codersdk/workspacesdk/dialer.go +++ b/codersdk/workspacesdk/dialer.go @@ -24,6 +24,10 @@ var permanentErrorStatuses = []int{ http.StatusBadRequest, // returned if API mismatch http.StatusNotFound, // returned if user doesn't have permission or agent doesn't exist http.StatusInternalServerError, // returned if database is not reachable, + http.StatusForbidden, // returned if user is not authorized + // StatusUnauthorized is only a permanent error if the error is not due to + // an invalid resume token. See `checkResumeTokenFailure`. + http.StatusUnauthorized, } type WebsocketDialer struct { @@ -39,6 +43,24 @@ type WebsocketDialer struct { isFirst bool } +// checkResumeTokenFailure checks if the parsed error indicates a resume token failure +// and updates the resumeTokenFailed flag accordingly. Returns true if a resume token +// failure was detected. +func (w *WebsocketDialer) checkResumeTokenFailure(ctx context.Context, sdkErr *codersdk.Error) bool { + if sdkErr == nil { + return false + } + + for _, v := range sdkErr.Validations { + if v.Field == "resume_token" { + w.logger.Warn(ctx, "failed to dial tailnet v2+ API: server replied invalid resume token; unsetting for next connection attempt") + w.resumeTokenFailed = true + return true + } + } + return false +} + type WebsocketDialerOption func(*WebsocketDialer) func WithWorkspaceUpdates(req *proto.WorkspaceUpdatesRequest) WebsocketDialerOption { @@ -82,9 +104,14 @@ func (w *WebsocketDialer) Dial(ctx context.Context, r tailnet.ResumeTokenControl if w.isFirst { if res != nil && slices.Contains(permanentErrorStatuses, res.StatusCode) { err = codersdk.ReadBodyAsError(res) - // A bit more human-readable help in the case the API version was rejected var sdkErr *codersdk.Error if xerrors.As(err, &sdkErr) { + // Check for resume token failure first + if w.checkResumeTokenFailure(ctx, sdkErr) { + return tailnet.ControlProtocolClients{}, err + } + + // A bit more human-readable help in the case the API version was rejected if sdkErr.Message == AgentAPIMismatchMessage && sdkErr.StatusCode() == http.StatusBadRequest { sdkErr.Helper = fmt.Sprintf( @@ -107,13 +134,8 @@ func (w *WebsocketDialer) Dial(ctx context.Context, r tailnet.ResumeTokenControl bodyErr := codersdk.ReadBodyAsError(res) var sdkErr *codersdk.Error if xerrors.As(bodyErr, &sdkErr) { - for _, v := range sdkErr.Validations { - if v.Field == "resume_token" { - // Unset the resume token for the next attempt - w.logger.Warn(ctx, "failed to dial tailnet v2+ API: server replied invalid resume token; unsetting for next connection attempt") - w.resumeTokenFailed = true - return tailnet.ControlProtocolClients{}, err - } + if w.checkResumeTokenFailure(ctx, sdkErr) { + return tailnet.ControlProtocolClients{}, err } } if !errors.Is(err, context.Canceled) { diff --git a/codersdk/workspacesdk/dialer_test.go b/codersdk/workspacesdk/dialer_test.go index dbe351e4e492c..227299d43afda 100644 --- a/codersdk/workspacesdk/dialer_test.go +++ b/codersdk/workspacesdk/dialer_test.go @@ -270,6 +270,46 @@ func TestWebsocketDialer_ResumeTokenFailure(t *testing.T) { require.Error(t, err) } +func TestWebsocketDialer_UnauthenticatedFailFast(t *testing.T) { + t.Parallel() + ctx := testutil.Context(t, testutil.WaitShort) + logger := slogtest.Make(t, &slogtest.Options{ + IgnoreErrors: true, + }).Leveled(slog.LevelDebug) + + svr := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + httpapi.Write(ctx, w, http.StatusUnauthorized, codersdk.Response{}) + })) + defer svr.Close() + svrURL, err := url.Parse(svr.URL) + require.NoError(t, err) + + uut := workspacesdk.NewWebsocketDialer(logger, svrURL, &websocket.DialOptions{}) + + _, err = uut.Dial(ctx, nil) + require.Error(t, err) +} + +func TestWebsocketDialer_UnauthorizedFailFast(t *testing.T) { + t.Parallel() + ctx := testutil.Context(t, testutil.WaitShort) + logger := slogtest.Make(t, &slogtest.Options{ + IgnoreErrors: true, + }).Leveled(slog.LevelDebug) + + svr := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + httpapi.Write(ctx, w, http.StatusUnauthorized, codersdk.Response{}) + })) + defer svr.Close() + svrURL, err := url.Parse(svr.URL) + require.NoError(t, err) + + uut := workspacesdk.NewWebsocketDialer(logger, svrURL, &websocket.DialOptions{}) + + _, err = uut.Dial(ctx, nil) + require.Error(t, err) +} + func TestWebsocketDialer_UplevelVersion(t *testing.T) { t.Parallel() ctx := testutil.Context(t, testutil.WaitShort) diff --git a/codersdk/workspacesdk/workspacesdk.go b/codersdk/workspacesdk/workspacesdk.go index 9f587cf5267a8..ddaec06388238 100644 --- a/codersdk/workspacesdk/workspacesdk.go +++ b/codersdk/workspacesdk/workspacesdk.go @@ -202,7 +202,7 @@ func (c *Client) RewriteDERPMap(derpMap *tailcfg.DERPMap) { tailnet.RewriteDERPMapDefaultRelay(context.Background(), c.client.Logger(), derpMap, c.client.URL) } -func (c *Client) DialAgent(dialCtx context.Context, agentID uuid.UUID, options *DialAgentOptions) (agentConn *AgentConn, err error) { +func (c *Client) DialAgent(dialCtx context.Context, agentID uuid.UUID, options *DialAgentOptions) (agentConn AgentConn, err error) { if options == nil { options = &DialAgentOptions{} } diff --git a/cryptorand/numbers.go b/cryptorand/numbers.go index d6a4889b80562..ea1e522a37b0a 100644 --- a/cryptorand/numbers.go +++ b/cryptorand/numbers.go @@ -47,6 +47,12 @@ func Int63() (int64, error) { return rng.Int63(), cs.err } +// Int63n returns a non-negative integer in [0,maxVal) as an int64. +func Int63n(maxVal int64) (int64, error) { + rng, cs := secureRand() + return rng.Int63n(maxVal), cs.err +} + // Intn returns a non-negative integer in [0,maxVal) as an int. func Intn(maxVal int) (int, error) { rng, cs := secureRand() diff --git a/cryptorand/numbers_test.go b/cryptorand/numbers_test.go index aec9c89a7476c..dd47d942dc4e4 100644 --- a/cryptorand/numbers_test.go +++ b/cryptorand/numbers_test.go @@ -19,6 +19,27 @@ func TestInt63(t *testing.T) { } } +func TestInt63n(t *testing.T) { + t.Parallel() + + for i := 0; i < 20; i++ { + v, err := cryptorand.Int63n(100) + require.NoError(t, err, "unexpected error from Int63n") + t.Logf("value: %v <- random?", v) + require.GreaterOrEqual(t, v, int64(0), "values must be positive") + require.Less(t, v, int64(100), "values must be less than 100") + } + + // Ensure Int63n works for int larger than 32 bits + _, err := cryptorand.Int63n(1 << 35) + require.NoError(t, err, "expected Int63n to work for 64-bit int") + + // Expect a panic if max is negative + require.PanicsWithValue(t, "invalid argument to Int63n", func() { + cryptorand.Int63n(0) + }) +} + func TestIntn(t *testing.T) { t.Parallel() diff --git a/docs/about/contributing/AI_CONTRIBUTING.md b/docs/about/contributing/AI_CONTRIBUTING.md new file mode 100644 index 0000000000000..8771528f0c1ce --- /dev/null +++ b/docs/about/contributing/AI_CONTRIBUTING.md @@ -0,0 +1,32 @@ +# AI Contribution Guidelines + +This document defines rules for contributions where an AI system is the primary author of the code (i.e., most of the pull request was generated by AI). +It applies to all Coder repositories and is a supplement to the [existing contributing guidelines](./CONTRIBUTING.md), not a replacement. + +For minor AI-assisted edits, suggestions, or completions where the human contributor is clearly the primary author, these rules do not apply — standard contributing guidelines are sufficient. + +## Disclosure + +Contributors must **disclose AI involvement** in the pull request description whenever these guidelines apply. + +## Human Ownership & Attribution + +- All pull requests must be opened under **user accounts linked to a human**, and not an application ("bot account"). +- Contributors are personally accountable for the content of their PRs, regardless of how it was generated. + +## Verification & Evidence + +All AI-assisted contributions require **manual verification**. +Contributions without verification evidence will be rejected. + +- Test your changes yourself. Don’t assume AI is correct. +- Provide screenshots showing that the change works as intended. + - For visual/UI changes: include before/after screenshots. + - For CLI or backend changes: include terminal or api output. + +## Why These Rules Exist + +Traditionally, maintainers assumed that producing a pull request required more effort than reviewing it. +With AI-assisted tools, the balance has shifted: generating code is often faster than reviewing it. + +Our guidelines exist to safeguard maintainers’ time, uphold contributor accountability, and preserve the overall quality of the project. diff --git a/docs/about/contributing/CONTRIBUTING.md b/docs/about/contributing/CONTRIBUTING.md index 8f4eb518bae76..98243d3790f77 100644 --- a/docs/about/contributing/CONTRIBUTING.md +++ b/docs/about/contributing/CONTRIBUTING.md @@ -96,6 +96,7 @@ Use the following `make` commands and scripts in development: This should return an empty list of workspaces. If you encounter an error, review the output from the [develop.sh](https://github.com/coder/coder/blob/main/scripts/develop.sh) script for issues. + > [!NOTE] > `coder-dev.sh` is a helper script that behaves like the regular coder CLI, but uses the binary built from your local source and shares the same configuration directory set up by `develop.sh`. This ensures your local changes are reflected when testing. > > The default user is `admin@coder.com` and the default password is `SomeSecurePassword!` @@ -235,6 +236,11 @@ Breaking changes can be triggered in two ways: [`release/breaking`](https://github.com/coder/coder/issues?q=sort%3Aupdated-desc+label%3Arelease%2Fbreaking) label to a PR that has, or will be, merged into `main`. +### Generative AI + +Using AI to help with contributions is acceptable, but only if the [AI Contribution Guidelines](./AI_CONTRIBUTING.md) +are followed. If most of your PR was generated by AI, please read and comply with these rules before submitting. + ### Security > [!CAUTION] diff --git a/docs/admin/integrations/jfrog-xray.md b/docs/admin/integrations/jfrog-xray.md deleted file mode 100644 index 194ea25bf8b6b..0000000000000 --- a/docs/admin/integrations/jfrog-xray.md +++ /dev/null @@ -1,68 +0,0 @@ -# Integrating JFrog Xray with Coder Kubernetes Workspaces - - -March 17, 2024 - ---- - -This guide describes the process of integrating [JFrog Xray](https://jfrog.com/xray/) to Coder Kubernetes-backed -workspaces using Coder's [JFrog Xray Integration](https://github.com/coder/coder-xray). - -## Prerequisites - -- A self-hosted JFrog Platform instance. -- Kubernetes workspaces running on Coder. - -## Deploy the **Coder - JFrog Xray** Integration - -1. Create a JFrog Platform [Access Token](https://jfrog.com/help/r/jfrog-platform-administration-documentation/access-tokens) with a user that has the `read` [permission](https://jfrog.com/help/r/jfrog-platform-administration-documentation/permissions) - for the repositories you want to scan. - -1. Create a Coder [token](../../reference/cli/tokens_create.md#tokens-create) with a user that has the [`owner`](../users/index.md#roles) role. - -1. Create Kubernetes secrets for the JFrog Xray and Coder tokens. - - ```bash - kubectl create secret generic coder-token \ - --from-literal=coder-token='' - ``` - - ```bash - kubectl create secret generic jfrog-token \ - --from-literal=user='' \ - --from-literal=token='' - ``` - -1. Deploy the **Coder - JFrog Xray** integration. - - ```bash - helm repo add coder-xray https://helm.coder.com/coder-xray - ``` - - ```bash - helm upgrade --install coder-xray coder-xray/coder-xray \ - --namespace coder-xray \ - --create-namespace \ - --set namespace="" \ - --set coder.url="https://" \ - --set coder.secretName="coder-token" \ - --set artifactory.url="https://" \ - --set artifactory.secretName="jfrog-token" - ``` - -> [!IMPORTANT] -> To authenticate with the Artifactory registry, you may need to -> create a [Docker config](https://jfrog.com/help/r/jfrog-artifactory-documentation/docker-advanced-topics) and use it in the -> `imagePullSecrets` field of the Kubernetes Pod. -> See the [Defining ImagePullSecrets for Coder workspaces](../../tutorials/image-pull-secret.md) guide for more information. - -## Validate your installation - -Once installed, configured workspaces will now have a banner appear on any -workspace with vulnerabilities reported by JFrog Xray. - -JFrog Xray Integration diff --git a/docs/admin/integrations/oauth2-provider.md b/docs/admin/integrations/oauth2-provider.md new file mode 100644 index 0000000000000..e5264904293f7 --- /dev/null +++ b/docs/admin/integrations/oauth2-provider.md @@ -0,0 +1,236 @@ +# OAuth2 Provider (Experimental) + +> [!WARNING] +> The OAuth2 provider functionality is currently **experimental and unstable**. This feature: +> +> - Is subject to breaking changes without notice +> - May have incomplete functionality +> - Is not recommended for production use +> - Requires the `oauth2` experiment flag to be enabled +> +> Use this feature for development and testing purposes only. + +Coder can act as an OAuth2 authorization server, allowing third-party applications to authenticate users through Coder and access the Coder API on their behalf. This enables integrations where external applications can leverage Coder's authentication and user management. + +## Requirements + +- Admin privileges in Coder +- OAuth2 experiment flag enabled +- HTTPS recommended for production deployments + +## Enable OAuth2 Provider + +Add the `oauth2` experiment flag to your Coder server: + +```bash +coder server --experiments oauth2 +``` + +Or set the environment variable: + +```env +CODER_EXPERIMENTS=oauth2 +``` + +## Creating OAuth2 Applications + +### Method 1: Web UI + +1. Navigate to **Deployment Settings** → **OAuth2 Applications** +2. Click **Create Application** +3. Fill in the application details: + - **Name**: Your application name + - **Callback URL**: `https://yourapp.example.com/callback` + - **Icon**: Optional icon URL + +### Method 2: Management API + +Create an application using the Coder API: + +```bash +curl -X POST \ + -H "Authorization: Bearer $CODER_SESSION_TOKEN" \ + -H "Content-Type: application/json" \ + -d '{ + "name": "My Application", + "callback_url": "https://myapp.example.com/callback", + "icon": "https://myapp.example.com/icon.png" + }' \ + "$CODER_URL/api/v2/oauth2-provider/apps" +``` + +Generate a client secret: + +```bash +curl -X POST \ + -H "Authorization: Bearer $CODER_SESSION_TOKEN" \ + "$CODER_URL/api/v2/oauth2-provider/apps/$APP_ID/secrets" +``` + +## Integration Patterns + +### Standard OAuth2 Flow + +1. **Authorization Request**: Redirect users to Coder's authorization endpoint: + + ```url + https://coder.example.com/oauth2/authorize? + client_id=your-client-id& + response_type=code& + redirect_uri=https://yourapp.example.com/callback& + state=random-string + ``` + +2. **Token Exchange**: Exchange the authorization code for an access token: + + ```bash + curl -X POST \ + -H "Content-Type: application/x-www-form-urlencoded" \ + -d "grant_type=authorization_code" \ + -d "code=$AUTH_CODE" \ + -d "client_id=$CLIENT_ID" \ + -d "client_secret=$CLIENT_SECRET" \ + -d "redirect_uri=https://yourapp.example.com/callback" \ + "$CODER_URL/oauth2/tokens" + ``` + +3. **API Access**: Use the access token to call Coder's API: + + ```bash + curl -H "Authorization: Bearer $ACCESS_TOKEN" \ + "$CODER_URL/api/v2/users/me" + ``` + +### PKCE Flow (Public Clients) + +For mobile apps and single-page applications, use PKCE for enhanced security: + +1. Generate a code verifier and challenge: + + ```bash + CODE_VERIFIER=$(openssl rand -base64 96 | tr -d "=+/" | cut -c1-128) + CODE_CHALLENGE=$(echo -n $CODE_VERIFIER | openssl dgst -sha256 -binary | base64 | tr -d "=+/" | cut -c1-43) + ``` + +2. Include PKCE parameters in the authorization request: + + ```url + https://coder.example.com/oauth2/authorize? + client_id=your-client-id& + response_type=code& + code_challenge=$CODE_CHALLENGE& + code_challenge_method=S256& + redirect_uri=https://yourapp.example.com/callback + ``` + +3. Include the code verifier in the token exchange: + + ```bash + curl -X POST \ + -d "grant_type=authorization_code" \ + -d "code=$AUTH_CODE" \ + -d "client_id=$CLIENT_ID" \ + -d "code_verifier=$CODE_VERIFIER" \ + "$CODER_URL/oauth2/tokens" + ``` + +## Discovery Endpoints + +Coder provides OAuth2 discovery endpoints for programmatic integration: + +- **Authorization Server Metadata**: `GET /.well-known/oauth-authorization-server` +- **Protected Resource Metadata**: `GET /.well-known/oauth-protected-resource` + +These endpoints return server capabilities and endpoint URLs according to [RFC 8414](https://datatracker.ietf.org/doc/html/rfc8414) and [RFC 9728](https://datatracker.ietf.org/doc/html/rfc9728). + +## Token Management + +### Refresh Tokens + +Refresh an expired access token: + +```bash +curl -X POST \ + -H "Content-Type: application/x-www-form-urlencoded" \ + -d "grant_type=refresh_token" \ + -d "refresh_token=$REFRESH_TOKEN" \ + -d "client_id=$CLIENT_ID" \ + -d "client_secret=$CLIENT_SECRET" \ + "$CODER_URL/oauth2/tokens" +``` + +### Revoke Access + +Revoke all tokens for an application: + +```bash +curl -X DELETE \ + -H "Authorization: Bearer $CODER_SESSION_TOKEN" \ + "$CODER_URL/oauth2/tokens?client_id=$CLIENT_ID" +``` + +## Testing and Development + +Coder provides comprehensive test scripts for OAuth2 development: + +```bash +# Navigate to the OAuth2 test scripts +cd scripts/oauth2/ + +# Run the full automated test suite +./test-mcp-oauth2.sh + +# Create a test application for manual testing +eval $(./setup-test-app.sh) + +# Run an interactive browser-based test +./test-manual-flow.sh + +# Clean up when done +./cleanup-test-app.sh +``` + +For more details on testing, see the [OAuth2 test scripts README](../../../scripts/oauth2/README.md). + +## Common Issues + +### "OAuth2 experiment not enabled" + +Add `oauth2` to your experiment flags: `coder server --experiments oauth2` + +### "Invalid redirect_uri" + +Ensure the redirect URI in your request exactly matches the one registered for your application. + +### "PKCE verification failed" + +Verify that the `code_verifier` used in the token request matches the one used to generate the `code_challenge`. + +## Security Considerations + +- **Use HTTPS**: Always use HTTPS in production to protect tokens in transit +- **Implement PKCE**: Use PKCE for all public clients (mobile apps, SPAs) +- **Validate redirect URLs**: Only register trusted redirect URIs for your applications +- **Rotate secrets**: Periodically rotate client secrets using the management API + +## Limitations + +As an experimental feature, the current implementation has limitations: + +- No scope system - all tokens have full API access +- No client credentials grant support +- Limited to opaque access tokens (no JWT support) + +## Standards Compliance + +This implementation follows established OAuth2 standards including [RFC 6749](https://datatracker.ietf.org/doc/html/rfc6749) (OAuth2 core), [RFC 7636](https://datatracker.ietf.org/doc/html/rfc7636) (PKCE), and related specifications for discovery and client registration. + +## Next Steps + +- Review the [API Reference](../../reference/api/index.md) for complete endpoint documentation +- Check [External Authentication](../external-auth/index.md) for configuring Coder as an OAuth2 client +- See [Security Best Practices](../security/index.md) for deployment security guidance + +## Feedback + +This is an experimental feature under active development. Please report issues and feedback through [GitHub Issues](https://github.com/coder/coder/issues) with the `oauth2` label. diff --git a/docs/admin/monitoring/logs.md b/docs/admin/monitoring/logs.md index 02e175795ae1f..8b9f5e747d5fd 100644 --- a/docs/admin/monitoring/logs.md +++ b/docs/admin/monitoring/logs.md @@ -17,7 +17,7 @@ machine/VM. options. - To only display certain types of logs, use the[`CODER_LOG_FILTER`](../../reference/cli/server.md#-l---log-filter) server - config. + config. Using `.*` will result in the `DEBUG` log level being used. Events such as server errors, audit logs, user activities, and SSO & OpenID Connect logs are all captured in the `coderd` logs. diff --git a/docs/admin/monitoring/notifications/index.md b/docs/admin/monitoring/notifications/index.md index fc2bc41968d78..70279dcb16bf1 100644 --- a/docs/admin/monitoring/notifications/index.md +++ b/docs/admin/monitoring/notifications/index.md @@ -282,7 +282,7 @@ troubleshoot: 1. Review the logs. Search for the term `notifications` for diagnostic information. - If you do not see any relevant logs, set - `CODER_VERBOSE=true` or `--verbose` to output debug logs. + `CODER_LOG_FILTER=".*notifications.*"` to filter for notification-related logs. 1. If you are on version 2.15.x, notifications must be enabled using the `notifications` [experiment](../../../install/releases/feature-stages.md#early-access-features). diff --git a/docs/admin/security/audit-logs.md b/docs/admin/security/audit-logs.md index 9aca854e46b85..69d85b0d67f72 100644 --- a/docs/admin/security/audit-logs.md +++ b/docs/admin/security/audit-logs.md @@ -13,31 +13,31 @@ We track the following resources: -| Resource | | | -|----------------------------------------------------------|----------------------------------------------------------------------|-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| APIKey
login, logout, register, create, delete | |
FieldTracked
created_attrue
expires_attrue
hashed_secretfalse
idfalse
ip_addressfalse
last_usedtrue
lifetime_secondsfalse
login_typefalse
scopefalse
token_namefalse
updated_atfalse
user_idtrue
| -| AuditOAuthConvertState
| |
FieldTracked
created_attrue
expires_attrue
from_login_typetrue
to_login_typetrue
user_idtrue
| -| Group
create, write, delete | |
FieldTracked
avatar_urltrue
display_nametrue
idtrue
memberstrue
nametrue
organization_idfalse
quota_allowancetrue
sourcefalse
| -| AuditableOrganizationMember
| |
FieldTracked
created_attrue
organization_idfalse
rolestrue
updated_attrue
user_idtrue
usernametrue
| -| CustomRole
| |
FieldTracked
created_atfalse
display_nametrue
idfalse
nametrue
org_permissionstrue
organization_idfalse
site_permissionstrue
updated_atfalse
user_permissionstrue
| -| GitSSHKey
create | |
FieldTracked
created_atfalse
private_keytrue
public_keytrue
updated_atfalse
user_idtrue
| -| GroupSyncSettings
| |
FieldTracked
auto_create_missing_groupstrue
fieldtrue
legacy_group_name_mappingfalse
mappingtrue
regex_filtertrue
| -| HealthSettings
| |
FieldTracked
dismissed_healthcheckstrue
idfalse
| -| License
create, delete | |
FieldTracked
exptrue
idfalse
jwtfalse
uploaded_attrue
uuidtrue
| -| NotificationTemplate
| |
FieldTracked
actionstrue
body_templatetrue
enabled_by_defaulttrue
grouptrue
idfalse
kindtrue
methodtrue
nametrue
title_templatetrue
| -| NotificationsSettings
| |
FieldTracked
idfalse
notifier_pausedtrue
| -| OAuth2ProviderApp
| |
FieldTracked
callback_urltrue
client_id_issued_atfalse
client_secret_expires_attrue
client_typetrue
client_uritrue
contactstrue
created_atfalse
dynamically_registeredtrue
grant_typestrue
icontrue
idfalse
jwkstrue
jwks_uritrue
logo_uritrue
nametrue
policy_uritrue
redirect_uristrue
registration_access_tokentrue
registration_client_uritrue
response_typestrue
scopetrue
software_idtrue
software_versiontrue
token_endpoint_auth_methodtrue
tos_uritrue
updated_atfalse
| -| OAuth2ProviderAppSecret
| |
FieldTracked
app_idfalse
created_atfalse
display_secretfalse
hashed_secretfalse
idfalse
last_used_atfalse
secret_prefixfalse
| -| Organization
| |
FieldTracked
created_atfalse
deletedtrue
descriptiontrue
display_nametrue
icontrue
idfalse
is_defaulttrue
nametrue
updated_attrue
| -| OrganizationSyncSettings
| |
FieldTracked
assign_defaulttrue
fieldtrue
mappingtrue
| -| PrebuildsSettings
| |
FieldTracked
idfalse
reconciliation_pausedtrue
| -| RoleSyncSettings
| |
FieldTracked
fieldtrue
mappingtrue
| -| Template
write, delete | |
FieldTracked
active_version_idtrue
activity_bumptrue
allow_user_autostarttrue
allow_user_autostoptrue
allow_user_cancel_workspace_jobstrue
autostart_block_days_of_weektrue
autostop_requirement_days_of_weektrue
autostop_requirement_weekstrue
created_atfalse
created_bytrue
created_by_avatar_urlfalse
created_by_namefalse
created_by_usernamefalse
default_ttltrue
deletedfalse
deprecatedtrue
descriptiontrue
display_nametrue
failure_ttltrue
group_acltrue
icontrue
idtrue
max_port_sharing_leveltrue
nametrue
organization_display_namefalse
organization_iconfalse
organization_idfalse
organization_namefalse
provisionertrue
require_active_versiontrue
time_til_dormanttrue
time_til_dormant_autodeletetrue
updated_atfalse
use_classic_parameter_flowtrue
user_acltrue
| -| TemplateVersion
create, write | |
FieldTracked
archivedtrue
created_atfalse
created_bytrue
created_by_avatar_urlfalse
created_by_namefalse
created_by_usernamefalse
external_auth_providersfalse
has_ai_taskfalse
idtrue
job_idfalse
messagefalse
nametrue
organization_idfalse
readmetrue
source_example_idfalse
template_idtrue
updated_atfalse
| -| User
create, write, delete | |
FieldTracked
avatar_urlfalse
created_atfalse
deletedtrue
emailtrue
github_com_user_idfalse
hashed_one_time_passcodefalse
hashed_passwordtrue
idtrue
is_systemtrue
last_seen_atfalse
login_typetrue
nametrue
one_time_passcode_expires_attrue
quiet_hours_scheduletrue
rbac_rolestrue
statustrue
updated_atfalse
usernametrue
| -| WorkspaceBuild
start, stop | |
FieldTracked
ai_task_sidebar_app_idfalse
build_numberfalse
created_atfalse
daily_costfalse
deadlinefalse
has_ai_taskfalse
idfalse
initiator_by_avatar_urlfalse
initiator_by_namefalse
initiator_by_usernamefalse
initiator_idfalse
job_idfalse
max_deadlinefalse
provisioner_statefalse
reasonfalse
template_version_idtrue
template_version_preset_idfalse
transitionfalse
updated_atfalse
workspace_idfalse
| -| WorkspaceProxy
| |
FieldTracked
created_attrue
deletedfalse
derp_enabledtrue
derp_onlytrue
display_nametrue
icontrue
idtrue
nametrue
region_idtrue
token_hashed_secrettrue
updated_atfalse
urltrue
versiontrue
wildcard_hostnametrue
| -| WorkspaceTable
| |
FieldTracked
automatic_updatestrue
autostart_scheduletrue
created_atfalse
deletedfalse
deleting_attrue
dormant_attrue
favoritetrue
idtrue
last_used_atfalse
nametrue
next_start_attrue
organization_idfalse
owner_idtrue
template_idtrue
ttltrue
updated_atfalse
| +| Resource | | | +|----------------------------------------------------------|----------------------------------------------------------------------|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| APIKey
login, logout, register, create, delete | |
FieldTracked
created_attrue
expires_attrue
hashed_secretfalse
idfalse
ip_addressfalse
last_usedtrue
lifetime_secondsfalse
login_typefalse
scopefalse
token_namefalse
updated_atfalse
user_idtrue
| +| AuditOAuthConvertState
| |
FieldTracked
created_attrue
expires_attrue
from_login_typetrue
to_login_typetrue
user_idtrue
| +| Group
create, write, delete | |
FieldTracked
avatar_urltrue
display_nametrue
idtrue
memberstrue
nametrue
organization_idfalse
quota_allowancetrue
sourcefalse
| +| AuditableOrganizationMember
| |
FieldTracked
created_attrue
organization_idfalse
rolestrue
updated_attrue
user_idtrue
usernametrue
| +| CustomRole
| |
FieldTracked
created_atfalse
display_nametrue
idfalse
nametrue
org_permissionstrue
organization_idfalse
site_permissionstrue
updated_atfalse
user_permissionstrue
| +| GitSSHKey
create | |
FieldTracked
created_atfalse
private_keytrue
public_keytrue
updated_atfalse
user_idtrue
| +| GroupSyncSettings
| |
FieldTracked
auto_create_missing_groupstrue
fieldtrue
legacy_group_name_mappingfalse
mappingtrue
regex_filtertrue
| +| HealthSettings
| |
FieldTracked
dismissed_healthcheckstrue
idfalse
| +| License
create, delete | |
FieldTracked
exptrue
idfalse
jwtfalse
uploaded_attrue
uuidtrue
| +| NotificationTemplate
| |
FieldTracked
actionstrue
body_templatetrue
enabled_by_defaulttrue
grouptrue
idfalse
kindtrue
methodtrue
nametrue
title_templatetrue
| +| NotificationsSettings
| |
FieldTracked
idfalse
notifier_pausedtrue
| +| OAuth2ProviderApp
| |
FieldTracked
callback_urltrue
client_id_issued_atfalse
client_secret_expires_attrue
client_typetrue
client_uritrue
contactstrue
created_atfalse
dynamically_registeredtrue
grant_typestrue
icontrue
idfalse
jwkstrue
jwks_uritrue
logo_uritrue
nametrue
policy_uritrue
redirect_uristrue
registration_access_tokentrue
registration_client_uritrue
response_typestrue
scopetrue
software_idtrue
software_versiontrue
token_endpoint_auth_methodtrue
tos_uritrue
updated_atfalse
| +| OAuth2ProviderAppSecret
| |
FieldTracked
app_idfalse
created_atfalse
display_secretfalse
hashed_secretfalse
idfalse
last_used_atfalse
secret_prefixfalse
| +| Organization
| |
FieldTracked
created_atfalse
deletedtrue
descriptiontrue
display_nametrue
icontrue
idfalse
is_defaulttrue
nametrue
updated_attrue
| +| OrganizationSyncSettings
| |
FieldTracked
assign_defaulttrue
fieldtrue
mappingtrue
| +| PrebuildsSettings
| |
FieldTracked
idfalse
reconciliation_pausedtrue
| +| RoleSyncSettings
| |
FieldTracked
fieldtrue
mappingtrue
| +| Template
write, delete | |
FieldTracked
active_version_idtrue
activity_bumptrue
allow_user_autostarttrue
allow_user_autostoptrue
allow_user_cancel_workspace_jobstrue
autostart_block_days_of_weektrue
autostop_requirement_days_of_weektrue
autostop_requirement_weekstrue
cors_behaviortrue
created_atfalse
created_bytrue
created_by_avatar_urlfalse
created_by_namefalse
created_by_usernamefalse
default_ttltrue
deletedfalse
deprecatedtrue
descriptiontrue
display_nametrue
failure_ttltrue
group_acltrue
icontrue
idtrue
max_port_sharing_leveltrue
nametrue
organization_display_namefalse
organization_iconfalse
organization_idfalse
organization_namefalse
provisionertrue
require_active_versiontrue
time_til_dormanttrue
time_til_dormant_autodeletetrue
updated_atfalse
use_classic_parameter_flowtrue
user_acltrue
| +| TemplateVersion
create, write | |
FieldTracked
archivedtrue
created_atfalse
created_bytrue
created_by_avatar_urlfalse
created_by_namefalse
created_by_usernamefalse
external_auth_providersfalse
has_ai_taskfalse
has_external_agentfalse
idtrue
job_idfalse
messagefalse
nametrue
organization_idfalse
readmetrue
source_example_idfalse
template_idtrue
updated_atfalse
| +| User
create, write, delete | |
FieldTracked
avatar_urlfalse
created_atfalse
deletedtrue
emailtrue
github_com_user_idfalse
hashed_one_time_passcodefalse
hashed_passwordtrue
idtrue
is_systemtrue
last_seen_atfalse
login_typetrue
nametrue
one_time_passcode_expires_attrue
quiet_hours_scheduletrue
rbac_rolestrue
statustrue
updated_atfalse
usernametrue
| +| WorkspaceBuild
start, stop | |
FieldTracked
ai_task_sidebar_app_idfalse
build_numberfalse
created_atfalse
daily_costfalse
deadlinefalse
has_ai_taskfalse
has_external_agentfalse
idfalse
initiator_by_avatar_urlfalse
initiator_by_namefalse
initiator_by_usernamefalse
initiator_idfalse
job_idfalse
max_deadlinefalse
provisioner_statefalse
reasonfalse
template_version_idtrue
template_version_preset_idfalse
transitionfalse
updated_atfalse
workspace_idfalse
| +| WorkspaceProxy
| |
FieldTracked
created_attrue
deletedfalse
derp_enabledtrue
derp_onlytrue
display_nametrue
icontrue
idtrue
nametrue
region_idtrue
token_hashed_secrettrue
updated_atfalse
urltrue
versiontrue
wildcard_hostnametrue
| +| WorkspaceTable
| |
FieldTracked
automatic_updatestrue
autostart_scheduletrue
created_atfalse
deletedfalse
deleting_attrue
dormant_attrue
favoritetrue
group_acltrue
idtrue
last_used_atfalse
nametrue
next_start_attrue
organization_idfalse
owner_idtrue
template_idtrue
ttltrue
updated_atfalse
user_acltrue
| diff --git a/docs/admin/templates/extending-templates/dynamic-parameters.md b/docs/admin/templates/extending-templates/dynamic-parameters.md index d676c3bcf3148..8ca1f4efd4149 100644 --- a/docs/admin/templates/extending-templates/dynamic-parameters.md +++ b/docs/admin/templates/extending-templates/dynamic-parameters.md @@ -38,11 +38,11 @@ They allow you to set resource guardrails by referencing Coder identity in the ` ## How to enable Dynamic Parameters -In Coder v2.24.0, you can opt-in to Dynamic Parameters on a per-template basis. +In Coder v2.25.0, Dynamic Parameters are automatically enabled for new templates. You can opt-in to Dynamic Parameters for individual existing templates via template settings. 1. Go to your template's settings and enable the **Enable dynamic parameters for workspace creation** option. - ![Enable dynamic parameters for workspace creation](../../../images/admin/templates/extend-templates/dyn-params/enable-dynamic-parameters.png) + ![Enable dynamic parameters for workspace creation](../../../images/admin/templates/extend-templates/dyn-params/dynamic-parameters-ga-settings.png) 1. Update your template to use version >=2.4.0 of the Coder provider with the following Terraform block. @@ -784,9 +784,9 @@ data "coder_parameter" "your_groups" { ## Troubleshooting -Dynamic Parameters is still in Beta as we continue to polish and improve the workflow. +Dynamic Parameters is now in general availability. We're tracking a list of known issues [here in Github](https://github.com/coder/coder/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20label%3Aparameters) as we continue to polish and improve the workflow. If you have any issues during upgrade, please file an issue in our -[GitHub repository](https://github.com/coder/coder/issues/new?labels=parameters) and include a +[GitHub repository](https://github.com/coder/coder/issues/new?labels=parameters) with the `parameters` label and include a [Playground link](https://playground.coder.app/parameters) where applicable. We appreciate the feedback and look forward to what the community creates with this system! @@ -798,7 +798,7 @@ You can share anything you build with Dynamic Parameters in our [Discord](https: Ensure that the following version requirements are met: -- `coder/coder`: >= [v2.24.0](https://github.com/coder/coder/releases/tag/v2.24.0) +- `coder/coder`: >= [v2.25.0](https://github.com/coder/coder/releases/tag/v2.25.0) - `coder/terraform-provider-coder`: >= [v2.5.3](https://github.com/coder/terraform-provider-coder/releases/tag/v2.5.3) Enabling Dynamic Parameters on an existing template requires administrators to publish a new template version. @@ -818,10 +818,9 @@ To revert Dynamic Parameters on a template: ### Template variables not showing up -In beta, template variables are not supported in Dynamic Parameters. +Dynamic Parameters is GA as of [v2.25.0](https://github.com/coder/coder/releases/tag/v2.25.0), and this issue has been resolved. In beta ([v2.24.0](https://github.com/coder/coder/releases/tag/v2.24.0)), template variables were not supported in Dynamic Parameters. -This issue will be resolved by the next minor release of `coder/coder`. -If this is issue is blocking your usage of Dynamic Parameters, please let us know in [this thread](https://github.com/coder/coder/issues/18671). +If you are experiencing issues with template variables, try upgrading to the latest version with dynamic parameters in GA. Otherwise, please file an issue in our Github. ### Can I use registry modules with Dynamic Parameters? diff --git a/docs/admin/templates/extending-templates/modules.md b/docs/admin/templates/extending-templates/modules.md index d7ed472831662..1495dfce1f2da 100644 --- a/docs/admin/templates/extending-templates/modules.md +++ b/docs/admin/templates/extending-templates/modules.md @@ -40,14 +40,14 @@ in the Terraform documentation. Coder publishes plenty of modules that can be used to simplify some common tasks across templates. Some of the modules we publish are, -1. [`code-server`](https://registry.coder.com/modules/code-server) and - [`vscode-web`](https://registry.coder.com/modules/vscode-web) -2. [`git-clone`](https://registry.coder.com/modules/git-clone) -3. [`dotfiles`](https://registry.coder.com/modules/dotfiles) -4. [`jetbrains-gateway`](https://registry.coder.com/modules/jetbrains-gateway) -5. [`jfrog-oauth`](https://registry.coder.com/modules/jfrog-oauth) and - [`jfrog-token`](https://registry.coder.com/modules/jfrog-token) -6. [`vault-github`](https://registry.coder.com/modules/vault-github) +1. [`code-server`](https://registry.coder.com/modules/coder/code-server) and + [`vscode-web`](https://registry.coder.com/modules/coder/vscode-web) +2. [`git-clone`](https://registry.coder.com/modules/coder/git-clone) +3. [`dotfiles`](https://registry.coder.com/modules/coder/dotfiles) +4. [`jetbrains-gateway`](https://registry.coder.com/modules/coder/jetbrains-gateway) +5. [`jfrog-oauth`](https://registry.coder.com/modules/coder/jfrog-oauth) and + [`jfrog-token`](https://registry.coder.com/modules/coder/jfrog-token) +6. [`vault-github`](https://registry.coder.com/modules/coder/vault-github) For a full list of available modules please check [Coder module registry](https://registry.coder.com/modules). diff --git a/docs/admin/templates/extending-templates/parameters.md b/docs/admin/templates/extending-templates/parameters.md index 5b380645c1b36..43a477632e7db 100644 --- a/docs/admin/templates/extending-templates/parameters.md +++ b/docs/admin/templates/extending-templates/parameters.md @@ -391,7 +391,7 @@ parameters in one of two ways: Or set the [environment variable](../../setup/index.md), `CODER_EXPERIMENTS=auto-fill-parameters` -## Dynamic Parameters (beta) +## Dynamic Parameters Coder v2.24.0 introduces [Dynamic Parameters](./dynamic-parameters.md) to extend the existing parameter system with conditional form controls, enriched input types, and user identity awareness. diff --git a/docs/admin/templates/extending-templates/prebuilt-workspaces.md b/docs/admin/templates/extending-templates/prebuilt-workspaces.md index 8e61687ce0f01..739e13d9130e5 100644 --- a/docs/admin/templates/extending-templates/prebuilt-workspaces.md +++ b/docs/admin/templates/extending-templates/prebuilt-workspaces.md @@ -1,18 +1,12 @@ # Prebuilt workspaces -> [!WARNING] -> Prebuilds Compatibility Limitations: -> Prebuilt workspaces currently do not work reliably with [DevContainers feature](../managing-templates/devcontainers/index.md). -> If your project relies on DevContainer configuration, we recommend disabling prebuilds or carefully testing behavior before enabling them. -> -> We’re actively working to improve compatibility, but for now, please avoid using prebuilds with this feature to ensure stability and expected behavior. +Prebuilt workspaces (prebuilds) reduce workspace creation time with an automatically-maintained pool of +ready-to-use workspaces for specific parameter presets. -Prebuilt workspaces allow template administrators to improve the developer experience by reducing workspace -creation time with an automatically maintained pool of ready-to-use workspaces for specific parameter presets. - -The template administrator configures a template to provision prebuilt workspaces in the background, and then when a developer creates -a new workspace that matches the preset, Coder assigns them an existing prebuilt instance. -Prebuilt workspaces significantly reduce wait times, especially for templates with complex provisioning or lengthy startup procedures. +The template administrator defines the prebuilt workspace's parameters and number of instances to keep provisioned. +The desired number of workspaces are then provisioned transparently. +When a developer creates a new workspace that matches the definition, Coder assigns them an existing prebuilt workspace. +This significantly reduces wait times, especially for templates with complex provisioning or lengthy startup procedures. Prebuilt workspaces are: @@ -21,6 +15,9 @@ Prebuilt workspaces are: - Monitored and replaced automatically to maintain your desired pool size. - Automatically scaled based on time-based schedules to optimize resource usage. +Prebuilt workspaces are a special type of workspace that don't follow the +[regular workspace scheduling features](../../../user-guides/workspace-scheduling.md) like autostart and autostop. Instead, they have their own reconciliation loop that handles prebuild-specific scheduling features such as TTL and prebuild scheduling. + ## Relationship to workspace presets Prebuilt workspaces are tightly integrated with [workspace presets](./parameters.md#workspace-presets): @@ -29,6 +26,7 @@ Prebuilt workspaces are tightly integrated with [workspace presets](./parameters 1. The preset must define all required parameters needed to build the workspace. 1. The preset parameters define the base configuration and are immutable once a prebuilt workspace is provisioned. 1. Parameters that are not defined in the preset can still be customized by users when they claim a workspace. +1. If a user does not select a preset but provides parameters that match one or more presets, Coder will automatically select the most specific matching preset and assign a prebuilt workspace if one is available. ## Prerequisites @@ -52,7 +50,7 @@ instances your Coder deployment should maintain, and optionally configure a `exp prebuilds { instances = 3 # Number of prebuilt workspaces to maintain expiration_policy { - ttl = 86400 # Time (in seconds) after which unclaimed prebuilds are expired (1 day) + ttl = 86400 # Time (in seconds) after which unclaimed prebuilds are expired (86400 = 1 day) } } } @@ -158,17 +156,17 @@ data "coder_workspace_preset" "goland" { **Scheduling configuration:** -- **`timezone`**: The timezone for all cron expressions (required). Only a single timezone is supported per scheduling configuration. -- **`schedule`**: One or more schedule blocks defining when to scale to specific instance counts. - - **`cron`**: Cron expression interpreted as continuous time ranges (required). - - **`instances`**: Number of prebuilt workspaces to maintain during this schedule (required). +- `timezone`: (Required) The timezone for all cron expressions. Only a single timezone is supported per scheduling configuration. +- `schedule`: One or more schedule blocks defining when to scale to specific instance counts. + - `cron`: (Required) Cron expression interpreted as continuous time ranges. + - `instances`: (Required) Number of prebuilt workspaces to maintain during this schedule. **How scheduling works:** 1. The reconciliation loop evaluates all active schedules every reconciliation interval (`CODER_WORKSPACE_PREBUILDS_RECONCILIATION_INTERVAL`). -2. The schedule that matches the current time becomes active. Overlapping schedules are disallowed by validation rules. -3. If no schedules match the current time, the base `instances` count is used. -4. The reconciliation loop automatically creates or destroys prebuilt workspaces to match the target count. +1. The schedule that matches the current time becomes active. Overlapping schedules are disallowed by validation rules. +1. If no schedules match the current time, the base `instances` count is used. +1. The reconciliation loop automatically creates or destroys prebuilt workspaces to match the target count. **Cron expression format:** @@ -226,7 +224,7 @@ When a template's active version is updated: 1. Prebuilt workspaces for old versions are automatically deleted. 1. New prebuilt workspaces are created for the active template version. 1. If dependencies change (e.g., an [AMI](https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/AMIs.html) update) without a template version change: - - You may delete the existing prebuilt workspaces manually. + - You can delete the existing prebuilt workspaces manually. - Coder will automatically create new prebuilt workspaces with the updated dependencies. The system always maintains the desired number of prebuilt workspaces for the active template version. @@ -284,23 +282,6 @@ For example, the [`ami`](https://registry.terraform.io/providers/hashicorp/aws/l has [`ForceNew`](https://github.com/hashicorp/terraform-provider-aws/blob/main/internal/service/ec2/ec2_instance.go#L75-L81) set, since the AMI cannot be changed in-place._ -#### Updating claimed prebuilt workspace templates - -Once a prebuilt workspace has been claimed, and if its template uses `ignore_changes`, users may run into an issue where the agent -does not reconnect after a template update. This shortcoming is described in [this issue](https://github.com/coder/coder/issues/17840) -and will be addressed before the next release (v2.23). In the interim, a simple workaround is to restart the workspace -when it is in this problematic state. - -### Current limitations - -The prebuilt workspaces feature has these current limitations: - -- **Organizations** - - Prebuilt workspaces can only be used with the default organization. - - [View issue](https://github.com/coder/internal/issues/364) - ### Monitoring and observability #### Available metrics diff --git a/docs/admin/users/idp-sync.md b/docs/admin/users/idp-sync.md index e893bf91bb8ef..3c7ec708be3f9 100644 --- a/docs/admin/users/idp-sync.md +++ b/docs/admin/users/idp-sync.md @@ -203,7 +203,7 @@ Visit the Coder UI to confirm these changes: ### Group allowlist You can limit which groups from your identity provider can log in to Coder with -[CODER_OIDC_ALLOWED_GROUPS](https://coder.com/docs/cli/server#--oidc-allowed-groups). +[CODER_OIDC_ALLOWED_GROUPS](../../reference/cli/server.md#--oidc-allowed-groups). Users who are not in a matching group will see the following error: Unauthorized group error @@ -419,7 +419,6 @@ If you are running into issues with a sync: 1. To reduce noise, you can filter for only logs related to group/role sync: ```sh - CODER_VERBOSE=true CODER_LOG_FILTER=".*userauth.*|.*groups returned.*" ``` diff --git a/docs/admin/users/index.md b/docs/admin/users/index.md index e86d40a5a1b1f..4f6f5049d34ee 100644 --- a/docs/admin/users/index.md +++ b/docs/admin/users/index.md @@ -173,7 +173,7 @@ coder reset-password ### Resetting a password on Kubernetes ```shell -kubectl exec -it deployment/coder /bin/bash -n coder +kubectl exec -it deployment/coder -n coder -- /bin/bash coder reset-password ``` diff --git a/docs/admin/users/oidc-auth/google.md b/docs/admin/users/oidc-auth/google.md new file mode 100644 index 0000000000000..298497b27bebc --- /dev/null +++ b/docs/admin/users/oidc-auth/google.md @@ -0,0 +1,62 @@ +# Google authentication (OIDC) + +This guide shows how to configure Coder to authenticate users with Google using OpenID Connect (OIDC). + +## Prerequisites + +- A Google Cloud project with the OAuth consent screen configured +- Permission to create OAuth 2.0 Client IDs in Google Cloud + +## Step 1: Create an OAuth client in Google Cloud + +1. Open Google Cloud Console → APIs & Services → Credentials → Create Credentials → OAuth client ID. +2. Application type: Web application. +3. Authorized redirect URIs: add your Coder callback URL: + - `https://coder.example.com/api/v2/users/oidc/callback` +4. Save and note the Client ID and Client secret. + +## Step 2: Configure Coder OIDC for Google + +Set the following environment variables on your Coder deployment and restart Coder: + +```env +CODER_OIDC_ISSUER_URL=https://accounts.google.com +CODER_OIDC_CLIENT_ID= +CODER_OIDC_CLIENT_SECRET= +# Restrict to one or more email domains (comma-separated) +CODER_OIDC_EMAIL_DOMAIN="example.com" +# Standard OIDC scopes for Google +CODER_OIDC_SCOPES=openid,profile,email +# Optional: customize the login button +CODER_OIDC_SIGN_IN_TEXT="Sign in with Google" +CODER_OIDC_ICON_URL=/icon/google.svg +``` + +> [!NOTE] +> The redirect URI must exactly match what you configured in Google Cloud. + +## Enable refresh tokens (recommended) + +Google uses auth URL parameters to issue refresh tokens. Configure: + +```env +# Keep standard scopes +CODER_OIDC_SCOPES=openid,profile,email +# Add Google-specific auth URL params +CODER_OIDC_AUTH_URL_PARAMS='{"access_type": "offline", "prompt": "consent"}' +``` + +After changing settings, users must log out and back in once to obtain refresh tokens. + +Learn more in [Configure OIDC refresh tokens](./refresh-tokens.md). + +## Troubleshooting + +- "invalid redirect_uri": ensure the redirect URI in Google Cloud matches `https:///api/v2/users/oidc/callback`. +- Domain restriction: if users from unexpected domains can log in, verify `CODER_OIDC_EMAIL_DOMAIN`. +- Claims: to inspect claims returned by Google, see guidance in the [OIDC overview](./index.md#oidc-claims). + +## See also + +- [OIDC overview](./index.md) +- [Configure OIDC refresh tokens](./refresh-tokens.md) diff --git a/docs/admin/users/oidc-auth/index.md b/docs/admin/users/oidc-auth/index.md index dd674d21606f5..ae225d66ca0be 100644 --- a/docs/admin/users/oidc-auth/index.md +++ b/docs/admin/users/oidc-auth/index.md @@ -27,7 +27,7 @@ claims from the ID token and the claims obtained from hitting the upstream provider's `userinfo` endpoint, and use the resulting data as a basis for creating a new user or looking up an existing user. -To troubleshoot claims, set `CODER_VERBOSE=true` and follow the logs while +To troubleshoot claims, set `CODER_LOG_FILTER=".*got oidc claims.*"` and follow the logs while signing in via OIDC as a new user. Coder will log the claim fields returned by the upstream identity provider in a message containing the string `got oidc claims`, as well as the user info returned. diff --git a/docs/ai-coder/custom-agents.md b/docs/ai-coder/custom-agents.md index 6709251049efa..6ab68d949a69b 100644 --- a/docs/ai-coder/custom-agents.md +++ b/docs/ai-coder/custom-agents.md @@ -1,6 +1,6 @@ # Custom Agents -Custom agents beyond the ones listed in the [Coder registry](https://registry.coder.com/modules?tag=agent) can be used with Coder Tasks. +Custom agents beyond the ones listed in the [Coder registry](https://registry.coder.com/modules?search=tag%3Aagent) can be used with Coder Tasks. ## Prerequisites diff --git a/docs/ai-coder/mcp-server.md b/docs/ai-coder/mcp-server.md index 29d602030ab58..fdfadb4117d36 100644 --- a/docs/ai-coder/mcp-server.md +++ b/docs/ai-coder/mcp-server.md @@ -1,6 +1,6 @@ # MCP Server -Power users can configure Claude Desktop, Cursor, or other external agents to interact with Coder in order to: +Power users can configure [claude.ai](https://claude.ai), Claude Desktop, Cursor, or other external agents to interact with Coder in order to: - List workspaces - Create/start/stop workspaces @@ -12,6 +12,8 @@ Power users can configure Claude Desktop, Cursor, or other external agents to in In this model, any custom agent could interact with a remote Coder workspace, or Coder can be used in a remote pipeline or a larger workflow. +## Local MCP server + The Coder CLI has options to automatically configure MCP servers for you. On your local machine, run the following command: ```sh @@ -30,4 +32,27 @@ coder exp mcp server ``` > [!NOTE] -> The MCP server is authenticated with the same identity as your Coder CLI and can perform any action on the user's behalf. Fine-grained permissions and a remote MCP server are in development. [Contact us](https://coder.com/contact) if this use case is important to you. +> The MCP server is authenticated with the same identity as your Coder CLI and can perform any action on the user's behalf. Fine-grained permissions are in development. [Contact us](https://coder.com/contact) if this use case is important to you. + +## Remote MCP server + +Coder can expose an MCP server via HTTP. This is useful for connecting web-based agents, like https://claude.ai/, to Coder. This is an experimental feature and is subject to change. + +To enable this feature, activate the `oauth2` and `mcp-server-http` experiments using an environment variable or a CLI flag: + +```sh +CODER_EXPERIMENTS="oauth2,mcp-server-http" coder server +# or +coder server --experiments=oauth2,mcp-server-http +``` + +The Coder server will expose the MCP server at: + +```txt +https://coder.example.com/api/experimental/mcp/http +``` + +> [!NOTE] +> At this time, the remote MCP server is not compatible with web-based ChatGPT. + +Users can authenticate applications to use the remote MCP server with [OAuth2](../admin/integrations/oauth2-provider.md). An authenticated application can perform any action on the user's behalf. Fine-grained permissions are in development. diff --git a/docs/images/admin/templates/extend-templates/dyn-params/dynamic-parameters-ga-settings.png b/docs/images/admin/templates/extend-templates/dyn-params/dynamic-parameters-ga-settings.png new file mode 100644 index 0000000000000..14e84ccdef6dc Binary files /dev/null and b/docs/images/admin/templates/extend-templates/dyn-params/dynamic-parameters-ga-settings.png differ diff --git a/docs/images/architecture-multi-region.png b/docs/images/architecture-multi-region.png index 41205f401b64c..904b769d64237 100644 Binary files a/docs/images/architecture-multi-region.png and b/docs/images/architecture-multi-region.png differ diff --git a/docs/images/architecture-single-region.png b/docs/images/architecture-single-region.png index 3400a6ebc2809..cdca579fa5e12 100644 Binary files a/docs/images/architecture-single-region.png and b/docs/images/architecture-single-region.png differ diff --git a/docs/images/guides/xray-integration/example.png b/docs/images/guides/xray-integration/example.png deleted file mode 100644 index 58c28d332feb5..0000000000000 Binary files a/docs/images/guides/xray-integration/example.png and /dev/null differ diff --git a/docs/images/icons/ai_intelligence.svg b/docs/images/icons/ai_intelligence.svg new file mode 100644 index 0000000000000..bcef647bf3c3a --- /dev/null +++ b/docs/images/icons/ai_intelligence.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/docs/install/cloud/compute-engine.md b/docs/install/cloud/compute-engine.md index 49572059afc60..671a890125392 100644 --- a/docs/install/cloud/compute-engine.md +++ b/docs/install/cloud/compute-engine.md @@ -10,9 +10,12 @@ Google Cloud Platform project. ## Launch a Coder instance from the Google Cloud Marketplace -We publish an Ubuntu 22.04 VM image with Coder and Docker pre-installed. Search -for `Coder v2` in the GCP Marketplace or -[use direct link](https://console.cloud.google.com/marketplace/product/coder-enterprise-market-public/coder-v2). +We publish an Ubuntu 22.04 VM image with Coder and Docker pre-installed. + +Two SKU's are available via the Google Cloud Marketplace: + +1. [License purchase via Google Cloud Marketplace](https://console.cloud.google.com/marketplace/product/coder-enterprise-market-public/coder-gcmp?inv=1&invt=Ab45rg&project=secret-beacon-468405-p5) +2. [A solution to deploy VM's on GCP (Bring Your Own License)](https://console.cloud.google.com/marketplace/product/workspan-public-422119/coder?inv=1&invt=Ab45rg&project=secret-beacon-468405-p5) ![Coder on GCP Marketplace](../../images/platforms/gcp/marketplace.png) diff --git a/docs/install/kubernetes.md b/docs/install/kubernetes.md index 72c51e0da3e8c..0b9c506878517 100644 --- a/docs/install/kubernetes.md +++ b/docs/install/kubernetes.md @@ -135,9 +135,9 @@ We support two release channels: mainline and stable - read the helm install coder coder-v2/coder \ --namespace coder \ --values values.yaml \ - --version 2.23.1 + --version 2.25.0 ``` - + - **OCI Registry** @@ -146,7 +146,7 @@ We support two release channels: mainline and stable - read the helm install coder oci://ghcr.io/coder/chart/coder \ --namespace coder \ --values values.yaml \ - --version 2.23.1 + --version 2.25.0 ``` - **Stable** Coder release: @@ -159,9 +159,9 @@ We support two release channels: mainline and stable - read the helm install coder coder-v2/coder \ --namespace coder \ --values values.yaml \ - --version 2.22.1 + --version 2.25.0 ``` - + - **OCI Registry** @@ -170,7 +170,7 @@ We support two release channels: mainline and stable - read the helm install coder oci://ghcr.io/coder/chart/coder \ --namespace coder \ --values values.yaml \ - --version 2.22.1 + --version 2.25.0 ``` You can watch Coder start up by running `kubectl get pods -n coder`. Once Coder diff --git a/docs/install/releases/index.md b/docs/install/releases/index.md index 577939c05dde9..83efc16aefe17 100644 --- a/docs/install/releases/index.md +++ b/docs/install/releases/index.md @@ -55,15 +55,15 @@ pages. ## Release schedule -| Release name | Release Date | Status | Latest Release | -|------------------------------------------------|-------------------|------------------|----------------------------------------------------------------| -| [2.19](https://coder.com/changelog/coder-2-19) | February 04, 2025 | Not Supported | [v2.19.3](https://github.com/coder/coder/releases/tag/v2.19.3) | -| [2.20](https://coder.com/changelog/coder-2-20) | March 04, 2025 | Not Supported | [v2.20.3](https://github.com/coder/coder/releases/tag/v2.20.3) | -| [2.21](https://coder.com/changelog/coder-2-21) | April 02, 2025 | Not Supported | [v2.21.3](https://github.com/coder/coder/releases/tag/v2.21.3) | -| [2.22](https://coder.com/changelog/coder-2-22) | May 16, 2025 | Security Support | [v2.22.1](https://github.com/coder/coder/releases/tag/v2.22.1) | -| [2.23](https://coder.com/changelog/coder-2-23) | June 03, 2025 | Stable | [v2.23.2](https://github.com/coder/coder/releases/tag/v2.23.2) | -| [2.24](https://coder.com/changelog/coder-2-24) | July 01, 2025 | Mainline | [v2.24.1](https://github.com/coder/coder/releases/tag/v2.24.1) | -| 2.25 | | Not Released | N/A | +| Release name | Release Date | Status | Latest Release | +|------------------------------------------------|-----------------|------------------|----------------------------------------------------------------| +| [2.20](https://coder.com/changelog/coder-2-20) | March 04, 2025 | Not Supported | [v2.20.3](https://github.com/coder/coder/releases/tag/v2.20.3) | +| [2.21](https://coder.com/changelog/coder-2-21) | April 02, 2025 | Not Supported | [v2.21.3](https://github.com/coder/coder/releases/tag/v2.21.3) | +| [2.22](https://coder.com/changelog/coder-2-22) | May 16, 2025 | Security Support | [v2.22.1](https://github.com/coder/coder/releases/tag/v2.22.1) | +| [2.23](https://coder.com/changelog/coder-2-23) | June 03, 2025 | Security Support | [v2.23.2](https://github.com/coder/coder/releases/tag/v2.23.4) | +| [2.24](https://coder.com/changelog/coder-2-24) | July 01, 2025 | Stable | [v2.24.2](https://github.com/coder/coder/releases/tag/v2.24.2) | +| [2.24](https://coder.com/changelog/coder-2-24) | August 05, 2025 | Mainline | [v2.25.0](https://github.com/coder/coder/releases/tag/v2.25.0) | +| 2.25 | | Not Released | N/A | > [!TIP] diff --git a/docs/install/uninstall.md b/docs/install/uninstall.md index 7a94b22b25f6c..c04bd6e9c2723 100644 --- a/docs/install/uninstall.md +++ b/docs/install/uninstall.md @@ -74,17 +74,17 @@ performing the following step or copying the directory to another location.
-## macOS +## Linux ```shell -rm -rf ~/Library/Application\ Support/coderv2 +rm -rf ~/.config/coderv2 +rm -rf ~/.cache/coder ``` -## Linux +## macOS ```shell -rm -rf ~/.config/coderv2 -rm -rf ~/.cache/coder +rm -rf ~/Library/Application\ Support/coderv2 ``` ## Windows diff --git a/docs/manifest.json b/docs/manifest.json index 0305105c029fd..4a382da8ec25a 100644 --- a/docs/manifest.json +++ b/docs/manifest.json @@ -76,6 +76,12 @@ "description": "Security vulnerability disclosure policy", "path": "./about/contributing/SECURITY.md", "icon_path": "./images/icons/lock.svg" + }, + { + "title": "AI Contribution Guidelines", + "description": "Guidelines for AI-generated contributions.", + "path": "./about/contributing/AI_CONTRIBUTING.md", + "icon_path": "./images/icons/ai_intelligence.svg" } ] } @@ -251,6 +257,11 @@ "description": "Access your workspace with IDEs in the browser", "path": "./user-guides/workspace-access/web-ides.md" }, + { + "title": "code-server", + "description": "Access your workspace with code-server", + "path": "./user-guides/workspace-access/code-server.md" + }, { "title": "Zed", "description": "Access your workspace with Zed", @@ -411,6 +422,11 @@ "description": "Configure OpenID Connect authentication with identity providers like Okta or Active Directory", "path": "./admin/users/oidc-auth/index.md", "children": [ + { + "title": "Google", + "description": "Configure Google as an OIDC provider", + "path": "./admin/users/oidc-auth/google.md" + }, { "title": "Configure OIDC refresh tokens", "description": "How to configure OIDC refresh tokens", @@ -542,8 +558,7 @@ { "title": "Dynamic Parameters", "description": "Conditional, identity-aware parameter syntax for advanced users.", - "path": "./admin/templates/extending-templates/dynamic-parameters.md", - "state": ["beta"] + "path": "./admin/templates/extending-templates/dynamic-parameters.md" }, { "title": "Prebuilt workspaces", @@ -694,11 +709,6 @@ "description": "Integrate Coder with JFrog Artifactory", "path": "./admin/integrations/jfrog-artifactory.md" }, - { - "title": "JFrog Xray", - "description": "Integrate Coder with JFrog Xray", - "path": "./admin/integrations/jfrog-xray.md" - }, { "title": "Island Secure Browser", "description": "Integrate Coder with Island's Secure Browser", @@ -710,14 +720,19 @@ "path": "./admin/integrations/platformx.md" }, { - "title": "DX", - "description": "Tag Coder Users with DX", + "title": "DX Data Cloud", + "description": "Tag Coder Users with DX Data Cloud", "path": "./admin/integrations/dx-data-cloud.md" }, { "title": "Hashicorp Vault", "description": "Integrate Coder with Hashicorp Vault", "path": "./admin/integrations/vault.md" + }, + { + "title": "OAuth2 Provider", + "description": "Use Coder as an OAuth2 provider", + "path": "./admin/integrations/oauth2-provider.md" } ] }, @@ -949,11 +964,6 @@ "description": "Deploy Coder on Azure with an Application Gateway", "path": "./install/kubernetes/kubernetes-azure-app-gateway.md" }, - { - "title": "Scanning Workspaces with JFrog Xray", - "description": "Integrate Coder with JFrog Xray", - "path": "./admin/integrations/jfrog-xray.md" - }, { "title": "Cloning Git Repositories", "description": "Learn how to clone Git repositories in Coder", @@ -1167,6 +1177,26 @@ "description": "Print auth for an external provider", "path": "reference/cli/external-auth_access-token.md" }, + { + "title": "external-workspaces", + "description": "Create or manage external workspaces", + "path": "reference/cli/external-workspaces.md" + }, + { + "title": "external-workspaces agent-instructions", + "description": "Get the instructions for an external agent", + "path": "reference/cli/external-workspaces_agent-instructions.md" + }, + { + "title": "external-workspaces create", + "description": "Create a new external workspace", + "path": "reference/cli/external-workspaces_create.md" + }, + { + "title": "external-workspaces list", + "description": "List external workspaces", + "path": "reference/cli/external-workspaces_list.md" + }, { "title": "favorite", "description": "Add a workspace to your favorites", diff --git a/docs/reference/api/builds.md b/docs/reference/api/builds.md index fb491405df362..526f5bfd25ff1 100644 --- a/docs/reference/api/builds.md +++ b/docs/reference/api/builds.md @@ -33,6 +33,7 @@ curl -X GET http://coder-server:8080/api/v2/users/{user}/workspace/{workspacenam "daily_cost": 0, "deadline": "2019-08-24T14:15:22Z", "has_ai_task": true, + "has_external_agent": true, "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08", "initiator_id": "06588898-9a84-4b35-ba8f-f9cbd64946f3", "initiator_name": "string", @@ -52,6 +53,7 @@ curl -X GET http://coder-server:8080/api/v2/users/{user}/workspace/{workspacenam "template_version_id": "0ba39c92-1f1b-4c32-aa3e-9925d7713eb1", "workspace_build_id": "badaf2eb-96c5-4050-9f1d-db2d39ca5478" }, + "logs_overflowed": true, "metadata": { "template_display_name": "string", "template_icon": "string", @@ -270,6 +272,7 @@ curl -X GET http://coder-server:8080/api/v2/workspacebuilds/{workspacebuild} \ "daily_cost": 0, "deadline": "2019-08-24T14:15:22Z", "has_ai_task": true, + "has_external_agent": true, "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08", "initiator_id": "06588898-9a84-4b35-ba8f-f9cbd64946f3", "initiator_name": "string", @@ -289,6 +292,7 @@ curl -X GET http://coder-server:8080/api/v2/workspacebuilds/{workspacebuild} \ "template_version_id": "0ba39c92-1f1b-4c32-aa3e-9925d7713eb1", "workspace_build_id": "badaf2eb-96c5-4050-9f1d-db2d39ca5478" }, + "logs_overflowed": true, "metadata": { "template_display_name": "string", "template_icon": "string", @@ -996,6 +1000,7 @@ curl -X GET http://coder-server:8080/api/v2/workspacebuilds/{workspacebuild}/sta "daily_cost": 0, "deadline": "2019-08-24T14:15:22Z", "has_ai_task": true, + "has_external_agent": true, "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08", "initiator_id": "06588898-9a84-4b35-ba8f-f9cbd64946f3", "initiator_name": "string", @@ -1015,6 +1020,7 @@ curl -X GET http://coder-server:8080/api/v2/workspacebuilds/{workspacebuild}/sta "template_version_id": "0ba39c92-1f1b-4c32-aa3e-9925d7713eb1", "workspace_build_id": "badaf2eb-96c5-4050-9f1d-db2d39ca5478" }, + "logs_overflowed": true, "metadata": { "template_display_name": "string", "template_icon": "string", @@ -1306,6 +1312,7 @@ curl -X GET http://coder-server:8080/api/v2/workspaces/{workspace}/builds \ "daily_cost": 0, "deadline": "2019-08-24T14:15:22Z", "has_ai_task": true, + "has_external_agent": true, "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08", "initiator_id": "06588898-9a84-4b35-ba8f-f9cbd64946f3", "initiator_name": "string", @@ -1325,6 +1332,7 @@ curl -X GET http://coder-server:8080/api/v2/workspaces/{workspace}/builds \ "template_version_id": "0ba39c92-1f1b-4c32-aa3e-9925d7713eb1", "workspace_build_id": "badaf2eb-96c5-4050-9f1d-db2d39ca5478" }, + "logs_overflowed": true, "metadata": { "template_display_name": "string", "template_icon": "string", @@ -1524,6 +1532,7 @@ Status Code **200** | `» daily_cost` | integer | false | | | | `» deadline` | string(date-time) | false | | | | `» has_ai_task` | boolean | false | | | +| `» has_external_agent` | boolean | false | | | | `» id` | string(uuid) | false | | | | `» initiator_id` | string(uuid) | false | | | | `» initiator_name` | string | false | | | @@ -1540,6 +1549,7 @@ Status Code **200** | `»»» error` | string | false | | | | `»»» template_version_id` | string(uuid) | false | | | | `»»» workspace_build_id` | string(uuid) | false | | | +| `»» logs_overflowed` | boolean | false | | | | `»» metadata` | [codersdk.ProvisionerJobMetadata](schemas.md#codersdkprovisionerjobmetadata) | false | | | | `»»» template_display_name` | string | false | | | | `»»» template_icon` | string | false | | | @@ -1797,6 +1807,7 @@ curl -X POST http://coder-server:8080/api/v2/workspaces/{workspace}/builds \ "daily_cost": 0, "deadline": "2019-08-24T14:15:22Z", "has_ai_task": true, + "has_external_agent": true, "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08", "initiator_id": "06588898-9a84-4b35-ba8f-f9cbd64946f3", "initiator_name": "string", @@ -1816,6 +1827,7 @@ curl -X POST http://coder-server:8080/api/v2/workspaces/{workspace}/builds \ "template_version_id": "0ba39c92-1f1b-4c32-aa3e-9925d7713eb1", "workspace_build_id": "badaf2eb-96c5-4050-9f1d-db2d39ca5478" }, + "logs_overflowed": true, "metadata": { "template_display_name": "string", "template_icon": "string", diff --git a/docs/reference/api/enterprise.md b/docs/reference/api/enterprise.md index c9b65a97d2f03..b6043544d4766 100644 --- a/docs/reference/api/enterprise.md +++ b/docs/reference/api/enterprise.md @@ -3582,10 +3582,10 @@ curl -X PATCH http://coder-server:8080/api/v2/templates/{template}/acl \ ### Parameters -| Name | In | Type | Required | Description | -|------------|------|--------------------------------------------------------------------|----------|-------------------------| -| `template` | path | string(uuid) | true | Template ID | -| `body` | body | [codersdk.UpdateTemplateACL](schemas.md#codersdkupdatetemplateacl) | true | Update template request | +| Name | In | Type | Required | Description | +|------------|------|--------------------------------------------------------------------|----------|-----------------------------| +| `template` | path | string(uuid) | true | Template ID | +| `body` | body | [codersdk.UpdateTemplateACL](schemas.md#codersdkupdatetemplateacl) | true | Update template ACL request | ### Example responses @@ -4254,3 +4254,42 @@ curl -X PATCH http://coder-server:8080/api/v2/workspaceproxies/{workspaceproxy} | 200 | [OK](https://tools.ietf.org/html/rfc7231#section-6.3.1) | OK | [codersdk.WorkspaceProxy](schemas.md#codersdkworkspaceproxy) | To perform this operation, you must be authenticated. [Learn more](authentication.md). + +## Get workspace external agent credentials + +### Code samples + +```shell +# Example request using curl +curl -X GET http://coder-server:8080/api/v2/workspaces/{workspace}/external-agent/{agent}/credentials \ + -H 'Accept: application/json' \ + -H 'Coder-Session-Token: API_KEY' +``` + +`GET /workspaces/{workspace}/external-agent/{agent}/credentials` + +### Parameters + +| Name | In | Type | Required | Description | +|-------------|------|--------------|----------|--------------| +| `workspace` | path | string(uuid) | true | Workspace ID | +| `agent` | path | string | true | Agent name | + +### Example responses + +> 200 Response + +```json +{ + "agent_token": "string", + "command": "string" +} +``` + +### Responses + +| Status | Meaning | Description | Schema | +|--------|---------------------------------------------------------|-------------|----------------------------------------------------------------------------------| +| 200 | [OK](https://tools.ietf.org/html/rfc7231#section-6.3.1) | OK | [codersdk.ExternalAgentCredentials](schemas.md#codersdkexternalagentcredentials) | + +To perform this operation, you must be authenticated. [Learn more](authentication.md). diff --git a/docs/reference/api/initscript.md b/docs/reference/api/initscript.md new file mode 100644 index 0000000000000..ecd8c8008a6a4 --- /dev/null +++ b/docs/reference/api/initscript.md @@ -0,0 +1,26 @@ +# InitScript + +## Get agent init script + +### Code samples + +```shell +# Example request using curl +curl -X GET http://coder-server:8080/api/v2/init-script/{os}/{arch} + +``` + +`GET /init-script/{os}/{arch}` + +### Parameters + +| Name | In | Type | Required | Description | +|--------|------|--------|----------|------------------| +| `os` | path | string | true | Operating system | +| `arch` | path | string | true | Architecture | + +### Responses + +| Status | Meaning | Description | Schema | +|--------|---------------------------------------------------------|-------------|--------| +| 200 | [OK](https://tools.ietf.org/html/rfc7231#section-6.3.1) | Success | | diff --git a/docs/reference/api/members.md b/docs/reference/api/members.md index 4b0adbf45e338..5a6bd2c861bac 100644 --- a/docs/reference/api/members.md +++ b/docs/reference/api/members.md @@ -213,7 +213,9 @@ Status Code **200** | `resource_type` | `system` | | `resource_type` | `tailnet_coordinator` | | `resource_type` | `template` | +| `resource_type` | `usage_event` | | `resource_type` | `user` | +| `resource_type` | `user_secret` | | `resource_type` | `webpush_subscription` | | `resource_type` | `workspace` | | `resource_type` | `workspace_agent_devcontainers` | @@ -383,7 +385,9 @@ Status Code **200** | `resource_type` | `system` | | `resource_type` | `tailnet_coordinator` | | `resource_type` | `template` | +| `resource_type` | `usage_event` | | `resource_type` | `user` | +| `resource_type` | `user_secret` | | `resource_type` | `webpush_subscription` | | `resource_type` | `workspace` | | `resource_type` | `workspace_agent_devcontainers` | @@ -553,7 +557,9 @@ Status Code **200** | `resource_type` | `system` | | `resource_type` | `tailnet_coordinator` | | `resource_type` | `template` | +| `resource_type` | `usage_event` | | `resource_type` | `user` | +| `resource_type` | `user_secret` | | `resource_type` | `webpush_subscription` | | `resource_type` | `workspace` | | `resource_type` | `workspace_agent_devcontainers` | @@ -692,7 +698,9 @@ Status Code **200** | `resource_type` | `system` | | `resource_type` | `tailnet_coordinator` | | `resource_type` | `template` | +| `resource_type` | `usage_event` | | `resource_type` | `user` | +| `resource_type` | `user_secret` | | `resource_type` | `webpush_subscription` | | `resource_type` | `workspace` | | `resource_type` | `workspace_agent_devcontainers` | @@ -1053,7 +1061,9 @@ Status Code **200** | `resource_type` | `system` | | `resource_type` | `tailnet_coordinator` | | `resource_type` | `template` | +| `resource_type` | `usage_event` | | `resource_type` | `user` | +| `resource_type` | `user_secret` | | `resource_type` | `webpush_subscription` | | `resource_type` | `workspace` | | `resource_type` | `workspace_agent_devcontainers` | diff --git a/docs/reference/api/organizations.md b/docs/reference/api/organizations.md index 497e3f56d4e47..d418a1fcba106 100644 --- a/docs/reference/api/organizations.md +++ b/docs/reference/api/organizations.md @@ -407,6 +407,7 @@ curl -X GET http://coder-server:8080/api/v2/organizations/{organization}/provisi "template_version_id": "0ba39c92-1f1b-4c32-aa3e-9925d7713eb1", "workspace_build_id": "badaf2eb-96c5-4050-9f1d-db2d39ca5478" }, + "logs_overflowed": true, "metadata": { "template_display_name": "string", "template_icon": "string", @@ -457,6 +458,7 @@ Status Code **200** | `»» error` | string | false | | | | `»» template_version_id` | string(uuid) | false | | | | `»» workspace_build_id` | string(uuid) | false | | | +| `» logs_overflowed` | boolean | false | | | | `» metadata` | [codersdk.ProvisionerJobMetadata](schemas.md#codersdkprovisionerjobmetadata) | false | | | | `»» template_display_name` | string | false | | | | `»» template_icon` | string | false | | | @@ -534,6 +536,7 @@ curl -X GET http://coder-server:8080/api/v2/organizations/{organization}/provisi "template_version_id": "0ba39c92-1f1b-4c32-aa3e-9925d7713eb1", "workspace_build_id": "badaf2eb-96c5-4050-9f1d-db2d39ca5478" }, + "logs_overflowed": true, "metadata": { "template_display_name": "string", "template_icon": "string", diff --git a/docs/reference/api/schemas.md b/docs/reference/api/schemas.md index 8370233d2bdd5..c5e99fcdbfc72 100644 --- a/docs/reference/api/schemas.md +++ b/docs/reference/api/schemas.md @@ -1056,6 +1056,21 @@ AuthorizationObject can represent a "set" of objects, such as: all workspaces in | `vscode_connection` | | `jetbrains_connection` | +## codersdk.CORSBehavior + +```json +"simple" +``` + +### Properties + +#### Enumerated Values + +| Value | +|------------| +| `simple` | +| `passthru` | + ## codersdk.ChangePasswordWithOneTimePasscodeRequest ```json @@ -1475,6 +1490,7 @@ AuthorizationObject can represent a "set" of objects, such as: all workspaces in ], "weeks": 0 }, + "cors_behavior": "simple", "default_ttl_ms": 0, "delete_ttl_ms": 0, "description": "string", @@ -1501,6 +1517,7 @@ AuthorizationObject can represent a "set" of objects, such as: all workspaces in | `allow_user_cancel_workspace_jobs` | boolean | false | | Allow users to cancel in-progress workspace jobs. *bool as the default value is "true". | | `autostart_requirement` | [codersdk.TemplateAutostartRequirement](#codersdktemplateautostartrequirement) | false | | Autostart requirement allows optionally specifying the autostart allowed days for workspaces created from this template. This is an enterprise feature. | | `autostop_requirement` | [codersdk.TemplateAutostopRequirement](#codersdktemplateautostoprequirement) | false | | Autostop requirement allows optionally specifying the autostop requirement for workspaces created from this template. This is an enterprise feature. | +| `cors_behavior` | [codersdk.CORSBehavior](#codersdkcorsbehavior) | false | | Cors behavior allows optionally specifying the CORS behavior for all shared ports. | | `default_ttl_ms` | integer | false | | Default ttl ms allows optionally specifying the default TTL for all workspaces created from this template. | | `delete_ttl_ms` | integer | false | | Delete ttl ms allows optionally specifying the max lifetime before Coder permanently deletes dormant workspaces created from this template. | | `description` | string | false | | Description is a description of what the template contains. It must be less than 128 bytes. | @@ -3303,6 +3320,23 @@ CreateWorkspaceRequest provides options for creating a new workspace. Only one o | `web-push` | | `oauth2` | | `mcp-server-http` | +| `workspace-sharing` | + +## codersdk.ExternalAgentCredentials + +```json +{ + "agent_token": "string", + "command": "string" +} +``` + +### Properties + +| Name | Type | Required | Restrictions | Description | +|---------------|--------|----------|--------------|-------------| +| `agent_token` | string | false | | | +| `command` | string | false | | | ## codersdk.ExternalAuth @@ -5513,7 +5547,9 @@ Only certain features set these fields: - FeatureManagedAgentLimit| ```json { "default": true, + "description": "string", "desiredPrebuildInstances": 0, + "icon": "string", "id": "string", "name": "string", "parameters": [ @@ -5530,7 +5566,9 @@ Only certain features set these fields: - FeatureManagedAgentLimit| | Name | Type | Required | Restrictions | Description | |----------------------------|---------------------------------------------------------------|----------|--------------|-------------| | `default` | boolean | false | | | +| `description` | string | false | | | | `desiredPrebuildInstances` | integer | false | | | +| `icon` | string | false | | | | `id` | string | false | | | | `name` | string | false | | | | `parameters` | array of [codersdk.PresetParameter](#codersdkpresetparameter) | false | | | @@ -5885,6 +5923,7 @@ Only certain features set these fields: - FeatureManagedAgentLimit| "template_version_id": "0ba39c92-1f1b-4c32-aa3e-9925d7713eb1", "workspace_build_id": "badaf2eb-96c5-4050-9f1d-db2d39ca5478" }, + "logs_overflowed": true, "metadata": { "template_display_name": "string", "template_icon": "string", @@ -5922,6 +5961,7 @@ Only certain features set these fields: - FeatureManagedAgentLimit| | `file_id` | string | false | | | | `id` | string | false | | | | `input` | [codersdk.ProvisionerJobInput](#codersdkprovisionerjobinput) | false | | | +| `logs_overflowed` | boolean | false | | | | `metadata` | [codersdk.ProvisionerJobMetadata](#codersdkprovisionerjobmetadata) | false | | | | `organization_id` | string | false | | | | `queue_position` | integer | false | | | @@ -6354,7 +6394,9 @@ Only certain features set these fields: - FeatureManagedAgentLimit| | `system` | | `tailnet_coordinator` | | `template` | +| `usage_event` | | `user` | +| `user_secret` | | `webpush_subscription` | | `workspace` | | `workspace_agent_devcontainers` | @@ -6966,6 +7008,7 @@ Only certain features set these fields: - FeatureManagedAgentLimit| "p95": 146 } }, + "cors_behavior": "simple", "created_at": "2019-08-24T14:15:22Z", "created_by_id": "9377d689-01fb-4abf-8450-3368d2c1924f", "created_by_name": "string", @@ -7005,6 +7048,7 @@ Only certain features set these fields: - FeatureManagedAgentLimit| | `autostart_requirement` | [codersdk.TemplateAutostartRequirement](#codersdktemplateautostartrequirement) | false | | | | `autostop_requirement` | [codersdk.TemplateAutostopRequirement](#codersdktemplateautostoprequirement) | false | | Autostop requirement and AutostartRequirement are enterprise features. Its value is only used if your license is entitled to use the advanced template scheduling feature. | | `build_time_stats` | [codersdk.TemplateBuildTimeStats](#codersdktemplatebuildtimestats) | false | | | +| `cors_behavior` | [codersdk.CORSBehavior](#codersdkcorsbehavior) | false | | | | `created_at` | string | false | | | | `created_by_id` | string | false | | | | `created_by_name` | string | false | | | @@ -7586,6 +7630,7 @@ Restarts will only happen on weekdays in this list on weeks which line up with W "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08", "username": "string" }, + "has_external_agent": true, "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08", "job": { "available_workers": [ @@ -7603,6 +7648,7 @@ Restarts will only happen on weekdays in this list on weeks which line up with W "template_version_id": "0ba39c92-1f1b-4c32-aa3e-9925d7713eb1", "workspace_build_id": "badaf2eb-96c5-4050-9f1d-db2d39ca5478" }, + "logs_overflowed": true, "metadata": { "template_display_name": "string", "template_icon": "string", @@ -7649,6 +7695,7 @@ Restarts will only happen on weekdays in this list on weeks which line up with W | `archived` | boolean | false | | | | `created_at` | string | false | | | | `created_by` | [codersdk.MinimalUser](#codersdkminimaluser) | false | | | +| `has_external_agent` | boolean | false | | | | `id` | string | false | | | | `job` | [codersdk.ProvisionerJob](#codersdkprovisionerjob) | false | | | | `matched_provisioners` | [codersdk.MatchedProvisioners](#codersdkmatchedprovisioners) | false | | | @@ -8040,6 +8087,71 @@ Restarts will only happen on weekdays in this list on weeks which line up with W | `user_perms` | object | false | | User perms should be a mapping of user ID to role. The user ID must be the uuid of the user, not a username or email address. | | » `[any property]` | [codersdk.TemplateRole](#codersdktemplaterole) | false | | | +## codersdk.UpdateTemplateMeta + +```json +{ + "activity_bump_ms": 0, + "allow_user_autostart": true, + "allow_user_autostop": true, + "allow_user_cancel_workspace_jobs": true, + "autostart_requirement": { + "days_of_week": [ + "monday" + ] + }, + "autostop_requirement": { + "days_of_week": [ + "monday" + ], + "weeks": 0 + }, + "cors_behavior": "simple", + "default_ttl_ms": 0, + "deprecation_message": "string", + "description": "string", + "disable_everyone_group_access": true, + "display_name": "string", + "failure_ttl_ms": 0, + "icon": "string", + "max_port_share_level": "owner", + "name": "string", + "require_active_version": true, + "time_til_dormant_autodelete_ms": 0, + "time_til_dormant_ms": 0, + "update_workspace_dormant_at": true, + "update_workspace_last_used_at": true, + "use_classic_parameter_flow": true +} +``` + +### Properties + +| Name | Type | Required | Restrictions | Description | +|------------------------------------|--------------------------------------------------------------------------------|----------|--------------|------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| `activity_bump_ms` | integer | false | | Activity bump ms allows optionally specifying the activity bump duration for all workspaces created from this template. Defaults to 1h but can be set to 0 to disable activity bumping. | +| `allow_user_autostart` | boolean | false | | | +| `allow_user_autostop` | boolean | false | | | +| `allow_user_cancel_workspace_jobs` | boolean | false | | | +| `autostart_requirement` | [codersdk.TemplateAutostartRequirement](#codersdktemplateautostartrequirement) | false | | | +| `autostop_requirement` | [codersdk.TemplateAutostopRequirement](#codersdktemplateautostoprequirement) | false | | Autostop requirement and AutostartRequirement can only be set if your license includes the advanced template scheduling feature. If you attempt to set this value while unlicensed, it will be ignored. | +| `cors_behavior` | [codersdk.CORSBehavior](#codersdkcorsbehavior) | false | | | +| `default_ttl_ms` | integer | false | | | +| `deprecation_message` | string | false | | Deprecation message if set, will mark the template as deprecated and block any new workspaces from using this template. If passed an empty string, will remove the deprecated message, making the template usable for new workspaces again. | +| `description` | string | false | | | +| `disable_everyone_group_access` | boolean | false | | Disable everyone group access allows optionally disabling the default behavior of granting the 'everyone' group access to use the template. If this is set to true, the template will not be available to all users, and must be explicitly granted to users or groups in the permissions settings of the template. | +| `display_name` | string | false | | | +| `failure_ttl_ms` | integer | false | | | +| `icon` | string | false | | | +| `max_port_share_level` | [codersdk.WorkspaceAgentPortShareLevel](#codersdkworkspaceagentportsharelevel) | false | | | +| `name` | string | false | | | +| `require_active_version` | boolean | false | | Require active version mandates workspaces built using this template use the active version of the template. This option has no effect on template admins. | +| `time_til_dormant_autodelete_ms` | integer | false | | | +| `time_til_dormant_ms` | integer | false | | | +| `update_workspace_dormant_at` | boolean | false | | Update workspace dormant at updates the dormant_at field of workspaces spawned from the template. This is useful for preventing dormant workspaces being immediately deleted when updating the dormant_ttl field to a new, shorter value. | +| `update_workspace_last_used_at` | boolean | false | | Update workspace last used at updates the last_used_at field of workspaces spawned from the template. This is useful for preventing workspaces being immediately locked when updating the inactivity_ttl field to a new, shorter value. | +| `use_classic_parameter_flow` | boolean | false | | Use classic parameter flow is a flag that switches the default behavior to use the classic parameter flow when creating a workspace. This only affects deployments with the experiment "dynamic-parameters" enabled. This setting will live for a period after the experiment is made the default. An "opt-out" is present in case the new feature breaks some existing templates. | + ## codersdk.UpdateUserAppearanceSettingsRequest ```json @@ -8122,6 +8234,30 @@ Restarts will only happen on weekdays in this list on weeks which line up with W The schedule must be daily with a single time, and should have a timezone specified via a CRON_TZ prefix (otherwise UTC will be used). If the schedule is empty, the user will be updated to use the default schedule.| +## codersdk.UpdateWorkspaceACL + +```json +{ + "group_roles": { + "property1": "admin", + "property2": "admin" + }, + "user_roles": { + "property1": "admin", + "property2": "admin" + } +} +``` + +### Properties + +| Name | Type | Required | Restrictions | Description | +|--------------------|--------------------------------------------------|----------|--------------|-------------------------------------------------------------------------------------------------------------------------------------------------------| +| `group_roles` | object | false | | | +| » `[any property]` | [codersdk.WorkspaceRole](#codersdkworkspacerole) | false | | | +| `user_roles` | object | false | | Keys must be valid UUIDs. To remove a user/group from the ACL use "" as the role name (available as a constant named `codersdk.WorkspaceRoleDeleted`) | +| » `[any property]` | [codersdk.WorkspaceRole](#codersdkworkspacerole) | false | | | + ## codersdk.UpdateWorkspaceAutomaticUpdatesRequest ```json @@ -8760,6 +8896,7 @@ If the schedule is empty, the user will be updated to use the default schedule.| "daily_cost": 0, "deadline": "2019-08-24T14:15:22Z", "has_ai_task": true, + "has_external_agent": true, "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08", "initiator_id": "06588898-9a84-4b35-ba8f-f9cbd64946f3", "initiator_name": "string", @@ -8779,6 +8916,7 @@ If the schedule is empty, the user will be updated to use the default schedule.| "template_version_id": "0ba39c92-1f1b-4c32-aa3e-9925d7713eb1", "workspace_build_id": "badaf2eb-96c5-4050-9f1d-db2d39ca5478" }, + "logs_overflowed": true, "metadata": { "template_display_name": "string", "template_icon": "string", @@ -9869,6 +10007,7 @@ If the schedule is empty, the user will be updated to use the default schedule.| "daily_cost": 0, "deadline": "2019-08-24T14:15:22Z", "has_ai_task": true, + "has_external_agent": true, "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08", "initiator_id": "06588898-9a84-4b35-ba8f-f9cbd64946f3", "initiator_name": "string", @@ -9888,6 +10027,7 @@ If the schedule is empty, the user will be updated to use the default schedule.| "template_version_id": "0ba39c92-1f1b-4c32-aa3e-9925d7713eb1", "workspace_build_id": "badaf2eb-96c5-4050-9f1d-db2d39ca5478" }, + "logs_overflowed": true, "metadata": { "template_display_name": "string", "template_icon": "string", @@ -10077,6 +10217,7 @@ If the schedule is empty, the user will be updated to use the default schedule.| | `daily_cost` | integer | false | | | | `deadline` | string | false | | | | `has_ai_task` | boolean | false | | | +| `has_external_agent` | boolean | false | | | | `id` | string | false | | | | `initiator_id` | string | false | | | | `initiator_name` | string | false | | | @@ -10519,6 +10660,22 @@ If the schedule is empty, the user will be updated to use the default schedule.| | `sensitive` | boolean | false | | | | `value` | string | false | | | +## codersdk.WorkspaceRole + +```json +"admin" +``` + +### Properties + +#### Enumerated Values + +| Value | +|---------| +| `admin` | +| `use` | +| `` | + ## codersdk.WorkspaceStatus ```json @@ -10600,6 +10757,7 @@ If the schedule is empty, the user will be updated to use the default schedule.| "daily_cost": 0, "deadline": "2019-08-24T14:15:22Z", "has_ai_task": true, + "has_external_agent": true, "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08", "initiator_id": "06588898-9a84-4b35-ba8f-f9cbd64946f3", "initiator_name": "string", @@ -10619,6 +10777,7 @@ If the schedule is empty, the user will be updated to use the default schedule.| "template_version_id": "0ba39c92-1f1b-4c32-aa3e-9925d7713eb1", "workspace_build_id": "badaf2eb-96c5-4050-9f1d-db2d39ca5478" }, + "logs_overflowed": true, "metadata": { "template_display_name": "string", "template_icon": "string", diff --git a/docs/reference/api/templates.md b/docs/reference/api/templates.md index 8d84623a15c8e..db5213bdf8ef5 100644 --- a/docs/reference/api/templates.md +++ b/docs/reference/api/templates.md @@ -57,6 +57,7 @@ To include deprecated templates, specify `deprecated:true` in the search query. "p95": 146 } }, + "cors_behavior": "simple", "created_at": "2019-08-24T14:15:22Z", "created_by_id": "9377d689-01fb-4abf-8450-3368d2c1924f", "created_by_name": "string", @@ -113,6 +114,7 @@ Restarts will only happen on weekdays in this list on weeks which line up with W |`»» [any property]`|[codersdk.TransitionStats](schemas.md#codersdktransitionstats)|false||| |`»»» p50`|integer|false||| |`»»» p95`|integer|false||| +|`» cors_behavior`|[codersdk.CORSBehavior](schemas.md#codersdkcorsbehavior)|false||| |`» created_at`|string(date-time)|false||| |`» created_by_id`|string(uuid)|false||| |`» created_by_name`|string|false||| @@ -141,6 +143,8 @@ Restarts will only happen on weekdays in this list on weeks which line up with W | Property | Value | |------------------------|-----------------| +| `cors_behavior` | `simple` | +| `cors_behavior` | `passthru` | | `max_port_share_level` | `owner` | | `max_port_share_level` | `authenticated` | | `max_port_share_level` | `organization` | @@ -182,6 +186,7 @@ curl -X POST http://coder-server:8080/api/v2/organizations/{organization}/templa ], "weeks": 0 }, + "cors_behavior": "simple", "default_ttl_ms": 0, "delete_ttl_ms": 0, "description": "string", @@ -238,6 +243,7 @@ curl -X POST http://coder-server:8080/api/v2/organizations/{organization}/templa "p95": 146 } }, + "cors_behavior": "simple", "created_at": "2019-08-24T14:15:22Z", "created_by_id": "9377d689-01fb-4abf-8450-3368d2c1924f", "created_by_name": "string", @@ -387,6 +393,7 @@ curl -X GET http://coder-server:8080/api/v2/organizations/{organization}/templat "p95": 146 } }, + "cors_behavior": "simple", "created_at": "2019-08-24T14:15:22Z", "created_by_id": "9377d689-01fb-4abf-8450-3368d2c1924f", "created_by_name": "string", @@ -455,6 +462,7 @@ curl -X GET http://coder-server:8080/api/v2/organizations/{organization}/templat "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08", "username": "string" }, + "has_external_agent": true, "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08", "job": { "available_workers": [ @@ -472,6 +480,7 @@ curl -X GET http://coder-server:8080/api/v2/organizations/{organization}/templat "template_version_id": "0ba39c92-1f1b-4c32-aa3e-9925d7713eb1", "workspace_build_id": "badaf2eb-96c5-4050-9f1d-db2d39ca5478" }, + "logs_overflowed": true, "metadata": { "template_display_name": "string", "template_icon": "string", @@ -553,6 +562,7 @@ curl -X GET http://coder-server:8080/api/v2/organizations/{organization}/templat "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08", "username": "string" }, + "has_external_agent": true, "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08", "job": { "available_workers": [ @@ -570,6 +580,7 @@ curl -X GET http://coder-server:8080/api/v2/organizations/{organization}/templat "template_version_id": "0ba39c92-1f1b-4c32-aa3e-9925d7713eb1", "workspace_build_id": "badaf2eb-96c5-4050-9f1d-db2d39ca5478" }, + "logs_overflowed": true, "metadata": { "template_display_name": "string", "template_icon": "string", @@ -675,6 +686,7 @@ curl -X POST http://coder-server:8080/api/v2/organizations/{organization}/templa "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08", "username": "string" }, + "has_external_agent": true, "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08", "job": { "available_workers": [ @@ -692,6 +704,7 @@ curl -X POST http://coder-server:8080/api/v2/organizations/{organization}/templa "template_version_id": "0ba39c92-1f1b-4c32-aa3e-9925d7713eb1", "workspace_build_id": "badaf2eb-96c5-4050-9f1d-db2d39ca5478" }, + "logs_overflowed": true, "metadata": { "template_display_name": "string", "template_icon": "string", @@ -790,6 +803,7 @@ To include deprecated templates, specify `deprecated:true` in the search query. "p95": 146 } }, + "cors_behavior": "simple", "created_at": "2019-08-24T14:15:22Z", "created_by_id": "9377d689-01fb-4abf-8450-3368d2c1924f", "created_by_name": "string", @@ -846,6 +860,7 @@ Restarts will only happen on weekdays in this list on weeks which line up with W |`»» [any property]`|[codersdk.TransitionStats](schemas.md#codersdktransitionstats)|false||| |`»»» p50`|integer|false||| |`»»» p95`|integer|false||| +|`» cors_behavior`|[codersdk.CORSBehavior](schemas.md#codersdkcorsbehavior)|false||| |`» created_at`|string(date-time)|false||| |`» created_by_id`|string(uuid)|false||| |`» created_by_name`|string|false||| @@ -874,6 +889,8 @@ Restarts will only happen on weekdays in this list on weeks which line up with W | Property | Value | |------------------------|-----------------| +| `cors_behavior` | `simple` | +| `cors_behavior` | `passthru` | | `max_port_share_level` | `owner` | | `max_port_share_level` | `authenticated` | | `max_port_share_level` | `organization` | @@ -938,7 +955,7 @@ Status Code **200** To perform this operation, you must be authenticated. [Learn more](authentication.md). -## Get template metadata by ID +## Get template settings by ID ### Code samples @@ -990,6 +1007,7 @@ curl -X GET http://coder-server:8080/api/v2/templates/{template} \ "p95": 146 } }, + "cors_behavior": "simple", "created_at": "2019-08-24T14:15:22Z", "created_by_id": "9377d689-01fb-4abf-8450-3368d2c1924f", "created_by_name": "string", @@ -1068,24 +1086,64 @@ curl -X DELETE http://coder-server:8080/api/v2/templates/{template} \ To perform this operation, you must be authenticated. [Learn more](authentication.md). -## Update template metadata by ID +## Update template settings by ID ### Code samples ```shell # Example request using curl curl -X PATCH http://coder-server:8080/api/v2/templates/{template} \ + -H 'Content-Type: application/json' \ -H 'Accept: application/json' \ -H 'Coder-Session-Token: API_KEY' ``` `PATCH /templates/{template}` +> Body parameter + +```json +{ + "activity_bump_ms": 0, + "allow_user_autostart": true, + "allow_user_autostop": true, + "allow_user_cancel_workspace_jobs": true, + "autostart_requirement": { + "days_of_week": [ + "monday" + ] + }, + "autostop_requirement": { + "days_of_week": [ + "monday" + ], + "weeks": 0 + }, + "cors_behavior": "simple", + "default_ttl_ms": 0, + "deprecation_message": "string", + "description": "string", + "disable_everyone_group_access": true, + "display_name": "string", + "failure_ttl_ms": 0, + "icon": "string", + "max_port_share_level": "owner", + "name": "string", + "require_active_version": true, + "time_til_dormant_autodelete_ms": 0, + "time_til_dormant_ms": 0, + "update_workspace_dormant_at": true, + "update_workspace_last_used_at": true, + "use_classic_parameter_flow": true +} +``` + ### Parameters -| Name | In | Type | Required | Description | -|------------|------|--------------|----------|-------------| -| `template` | path | string(uuid) | true | Template ID | +| Name | In | Type | Required | Description | +|------------|------|----------------------------------------------------------------------|----------|---------------------------------| +| `template` | path | string(uuid) | true | Template ID | +| `body` | body | [codersdk.UpdateTemplateMeta](schemas.md#codersdkupdatetemplatemeta) | true | Patch template settings request | ### Example responses @@ -1120,6 +1178,7 @@ curl -X PATCH http://coder-server:8080/api/v2/templates/{template} \ "p95": 146 } }, + "cors_behavior": "simple", "created_at": "2019-08-24T14:15:22Z", "created_by_id": "9377d689-01fb-4abf-8450-3368d2c1924f", "created_by_name": "string", @@ -1234,6 +1293,7 @@ curl -X GET http://coder-server:8080/api/v2/templates/{template}/versions \ "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08", "username": "string" }, + "has_external_agent": true, "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08", "job": { "available_workers": [ @@ -1251,6 +1311,7 @@ curl -X GET http://coder-server:8080/api/v2/templates/{template}/versions \ "template_version_id": "0ba39c92-1f1b-4c32-aa3e-9925d7713eb1", "workspace_build_id": "badaf2eb-96c5-4050-9f1d-db2d39ca5478" }, + "logs_overflowed": true, "metadata": { "template_display_name": "string", "template_icon": "string", @@ -1310,6 +1371,7 @@ Status Code **200** | `»» avatar_url` | string(uri) | false | | | | `»» id` | string(uuid) | true | | | | `»» username` | string | true | | | +| `» has_external_agent` | boolean | false | | | | `» id` | string(uuid) | false | | | | `» job` | [codersdk.ProvisionerJob](schemas.md#codersdkprovisionerjob) | false | | | | `»» available_workers` | array | false | | | @@ -1324,6 +1386,7 @@ Status Code **200** | `»»» error` | string | false | | | | `»»» template_version_id` | string(uuid) | false | | | | `»»» workspace_build_id` | string(uuid) | false | | | +| `»» logs_overflowed` | boolean | false | | | | `»» metadata` | [codersdk.ProvisionerJobMetadata](schemas.md#codersdkprovisionerjobmetadata) | false | | | | `»»» template_display_name` | string | false | | | | `»»» template_icon` | string | false | | | @@ -1513,6 +1576,7 @@ curl -X GET http://coder-server:8080/api/v2/templates/{template}/versions/{templ "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08", "username": "string" }, + "has_external_agent": true, "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08", "job": { "available_workers": [ @@ -1530,6 +1594,7 @@ curl -X GET http://coder-server:8080/api/v2/templates/{template}/versions/{templ "template_version_id": "0ba39c92-1f1b-4c32-aa3e-9925d7713eb1", "workspace_build_id": "badaf2eb-96c5-4050-9f1d-db2d39ca5478" }, + "logs_overflowed": true, "metadata": { "template_display_name": "string", "template_icon": "string", @@ -1589,6 +1654,7 @@ Status Code **200** | `»» avatar_url` | string(uri) | false | | | | `»» id` | string(uuid) | true | | | | `»» username` | string | true | | | +| `» has_external_agent` | boolean | false | | | | `» id` | string(uuid) | false | | | | `» job` | [codersdk.ProvisionerJob](schemas.md#codersdkprovisionerjob) | false | | | | `»» available_workers` | array | false | | | @@ -1603,6 +1669,7 @@ Status Code **200** | `»»» error` | string | false | | | | `»»» template_version_id` | string(uuid) | false | | | | `»»» workspace_build_id` | string(uuid) | false | | | +| `»» logs_overflowed` | boolean | false | | | | `»» metadata` | [codersdk.ProvisionerJobMetadata](schemas.md#codersdkprovisionerjobmetadata) | false | | | | `»»» template_display_name` | string | false | | | | `»»» template_icon` | string | false | | | @@ -1682,6 +1749,7 @@ curl -X GET http://coder-server:8080/api/v2/templateversions/{templateversion} \ "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08", "username": "string" }, + "has_external_agent": true, "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08", "job": { "available_workers": [ @@ -1699,6 +1767,7 @@ curl -X GET http://coder-server:8080/api/v2/templateversions/{templateversion} \ "template_version_id": "0ba39c92-1f1b-4c32-aa3e-9925d7713eb1", "workspace_build_id": "badaf2eb-96c5-4050-9f1d-db2d39ca5478" }, + "logs_overflowed": true, "metadata": { "template_display_name": "string", "template_icon": "string", @@ -1789,6 +1858,7 @@ curl -X PATCH http://coder-server:8080/api/v2/templateversions/{templateversion} "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08", "username": "string" }, + "has_external_agent": true, "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08", "job": { "available_workers": [ @@ -1806,6 +1876,7 @@ curl -X PATCH http://coder-server:8080/api/v2/templateversions/{templateversion} "template_version_id": "0ba39c92-1f1b-4c32-aa3e-9925d7713eb1", "workspace_build_id": "badaf2eb-96c5-4050-9f1d-db2d39ca5478" }, + "logs_overflowed": true, "metadata": { "template_display_name": "string", "template_icon": "string", @@ -2003,6 +2074,7 @@ curl -X POST http://coder-server:8080/api/v2/templateversions/{templateversion}/ "template_version_id": "0ba39c92-1f1b-4c32-aa3e-9925d7713eb1", "workspace_build_id": "badaf2eb-96c5-4050-9f1d-db2d39ca5478" }, + "logs_overflowed": true, "metadata": { "template_display_name": "string", "template_icon": "string", @@ -2076,6 +2148,7 @@ curl -X GET http://coder-server:8080/api/v2/templateversions/{templateversion}/d "template_version_id": "0ba39c92-1f1b-4c32-aa3e-9925d7713eb1", "workspace_build_id": "badaf2eb-96c5-4050-9f1d-db2d39ca5478" }, + "logs_overflowed": true, "metadata": { "template_display_name": "string", "template_icon": "string", @@ -2914,7 +2987,9 @@ curl -X GET http://coder-server:8080/api/v2/templateversions/{templateversion}/p [ { "default": true, + "description": "string", "desiredPrebuildInstances": 0, + "icon": "string", "id": "string", "name": "string", "parameters": [ @@ -2941,7 +3016,9 @@ Status Code **200** |------------------------------|---------|----------|--------------|-------------| | `[array item]` | array | false | | | | `» default` | boolean | false | | | +| `» description` | string | false | | | | `» desiredPrebuildInstances` | integer | false | | | +| `» icon` | string | false | | | | `» id` | string | false | | | | `» name` | string | false | | | | `» parameters` | array | false | | | diff --git a/docs/reference/api/users.md b/docs/reference/api/users.md index 43842fde6539b..bef79ddaad4e3 100644 --- a/docs/reference/api/users.md +++ b/docs/reference/api/users.md @@ -919,10 +919,10 @@ curl -X GET http://coder-server:8080/api/v2/users/{user}/keys/{keyid} \ ### Parameters -| Name | In | Type | Required | Description | -|---------|------|--------------|----------|----------------------| -| `user` | path | string | true | User ID, name, or me | -| `keyid` | path | string(uuid) | true | Key ID | +| Name | In | Type | Required | Description | +|---------|------|----------------|----------|----------------------| +| `user` | path | string | true | User ID, name, or me | +| `keyid` | path | string(string) | true | Key ID | ### Example responses @@ -965,10 +965,10 @@ curl -X DELETE http://coder-server:8080/api/v2/users/{user}/keys/{keyid} \ ### Parameters -| Name | In | Type | Required | Description | -|---------|------|--------------|----------|----------------------| -| `user` | path | string | true | User ID, name, or me | -| `keyid` | path | string(uuid) | true | Key ID | +| Name | In | Type | Required | Description | +|---------|------|----------------|----------|----------------------| +| `user` | path | string | true | User ID, name, or me | +| `keyid` | path | string(string) | true | Key ID | ### Responses diff --git a/docs/reference/api/workspaces.md b/docs/reference/api/workspaces.md index debcb421e02e3..ffa18b46c8df9 100644 --- a/docs/reference/api/workspaces.md +++ b/docs/reference/api/workspaces.md @@ -88,6 +88,7 @@ of the template will be used. "daily_cost": 0, "deadline": "2019-08-24T14:15:22Z", "has_ai_task": true, + "has_external_agent": true, "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08", "initiator_id": "06588898-9a84-4b35-ba8f-f9cbd64946f3", "initiator_name": "string", @@ -107,6 +108,7 @@ of the template will be used. "template_version_id": "0ba39c92-1f1b-4c32-aa3e-9925d7713eb1", "workspace_build_id": "badaf2eb-96c5-4050-9f1d-db2d39ca5478" }, + "logs_overflowed": true, "metadata": { "template_display_name": "string", "template_icon": "string", @@ -375,6 +377,7 @@ curl -X GET http://coder-server:8080/api/v2/users/{user}/workspace/{workspacenam "daily_cost": 0, "deadline": "2019-08-24T14:15:22Z", "has_ai_task": true, + "has_external_agent": true, "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08", "initiator_id": "06588898-9a84-4b35-ba8f-f9cbd64946f3", "initiator_name": "string", @@ -394,6 +397,7 @@ curl -X GET http://coder-server:8080/api/v2/users/{user}/workspace/{workspacenam "template_version_id": "0ba39c92-1f1b-4c32-aa3e-9925d7713eb1", "workspace_build_id": "badaf2eb-96c5-4050-9f1d-db2d39ca5478" }, + "logs_overflowed": true, "metadata": { "template_display_name": "string", "template_icon": "string", @@ -687,6 +691,7 @@ of the template will be used. "daily_cost": 0, "deadline": "2019-08-24T14:15:22Z", "has_ai_task": true, + "has_external_agent": true, "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08", "initiator_id": "06588898-9a84-4b35-ba8f-f9cbd64946f3", "initiator_name": "string", @@ -706,6 +711,7 @@ of the template will be used. "template_version_id": "0ba39c92-1f1b-4c32-aa3e-9925d7713eb1", "workspace_build_id": "badaf2eb-96c5-4050-9f1d-db2d39ca5478" }, + "logs_overflowed": true, "metadata": { "template_display_name": "string", "template_icon": "string", @@ -927,11 +933,11 @@ curl -X GET http://coder-server:8080/api/v2/workspaces \ ### Parameters -| Name | In | Type | Required | Description | -|----------|-------|---------|----------|----------------------------------------------------------------------------------------------------------------------------------------------------------------| -| `q` | query | string | false | Search query in the format `key:value`. Available keys are: owner, template, name, status, has-agent, dormant, last_used_after, last_used_before, has-ai-task. | -| `limit` | query | integer | false | Page limit | -| `offset` | query | integer | false | Page offset | +| Name | In | Type | Required | Description | +|----------|-------|---------|----------|------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| `q` | query | string | false | Search query in the format `key:value`. Available keys are: owner, template, name, status, has-agent, dormant, last_used_after, last_used_before, has-ai-task, has_external_agent. | +| `limit` | query | integer | false | Page limit | +| `offset` | query | integer | false | Page offset | ### Example responses @@ -977,6 +983,7 @@ curl -X GET http://coder-server:8080/api/v2/workspaces \ "daily_cost": 0, "deadline": "2019-08-24T14:15:22Z", "has_ai_task": true, + "has_external_agent": true, "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08", "initiator_id": "06588898-9a84-4b35-ba8f-f9cbd64946f3", "initiator_name": "string", @@ -996,6 +1003,7 @@ curl -X GET http://coder-server:8080/api/v2/workspaces \ "template_version_id": "0ba39c92-1f1b-4c32-aa3e-9925d7713eb1", "workspace_build_id": "badaf2eb-96c5-4050-9f1d-db2d39ca5478" }, + "logs_overflowed": true, "metadata": { "template_display_name": "string", "template_icon": "string", @@ -1248,6 +1256,7 @@ curl -X GET http://coder-server:8080/api/v2/workspaces/{workspace} \ "daily_cost": 0, "deadline": "2019-08-24T14:15:22Z", "has_ai_task": true, + "has_external_agent": true, "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08", "initiator_id": "06588898-9a84-4b35-ba8f-f9cbd64946f3", "initiator_name": "string", @@ -1267,6 +1276,7 @@ curl -X GET http://coder-server:8080/api/v2/workspaces/{workspace} \ "template_version_id": "0ba39c92-1f1b-4c32-aa3e-9925d7713eb1", "workspace_build_id": "badaf2eb-96c5-4050-9f1d-db2d39ca5478" }, + "logs_overflowed": true, "metadata": { "template_display_name": "string", "template_icon": "string", @@ -1509,6 +1519,49 @@ curl -X PATCH http://coder-server:8080/api/v2/workspaces/{workspace} \ To perform this operation, you must be authenticated. [Learn more](authentication.md). +## Update workspace ACL + +### Code samples + +```shell +# Example request using curl +curl -X PATCH http://coder-server:8080/api/v2/workspaces/{workspace}/acl \ + -H 'Content-Type: application/json' \ + -H 'Coder-Session-Token: API_KEY' +``` + +`PATCH /workspaces/{workspace}/acl` + +> Body parameter + +```json +{ + "group_roles": { + "property1": "admin", + "property2": "admin" + }, + "user_roles": { + "property1": "admin", + "property2": "admin" + } +} +``` + +### Parameters + +| Name | In | Type | Required | Description | +|-------------|------|----------------------------------------------------------------------|----------|------------------------------| +| `workspace` | path | string(uuid) | true | Workspace ID | +| `body` | body | [codersdk.UpdateWorkspaceACL](schemas.md#codersdkupdateworkspaceacl) | true | Update workspace ACL request | + +### Responses + +| Status | Meaning | Description | Schema | +|--------|-----------------------------------------------------------------|-------------|--------| +| 204 | [No Content](https://tools.ietf.org/html/rfc7231#section-6.3.5) | No Content | | + +To perform this operation, you must be authenticated. [Learn more](authentication.md). + ## Update workspace autostart schedule by ID ### Code samples @@ -1651,6 +1704,7 @@ curl -X PUT http://coder-server:8080/api/v2/workspaces/{workspace}/dormant \ "daily_cost": 0, "deadline": "2019-08-24T14:15:22Z", "has_ai_task": true, + "has_external_agent": true, "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08", "initiator_id": "06588898-9a84-4b35-ba8f-f9cbd64946f3", "initiator_name": "string", @@ -1670,6 +1724,7 @@ curl -X PUT http://coder-server:8080/api/v2/workspaces/{workspace}/dormant \ "template_version_id": "0ba39c92-1f1b-4c32-aa3e-9925d7713eb1", "workspace_build_id": "badaf2eb-96c5-4050-9f1d-db2d39ca5478" }, + "logs_overflowed": true, "metadata": { "template_display_name": "string", "template_icon": "string", diff --git a/docs/reference/cli/create.md b/docs/reference/cli/create.md index 58c0fad4a14e8..d18b4ea5c8e05 100644 --- a/docs/reference/cli/create.md +++ b/docs/reference/cli/create.md @@ -37,6 +37,15 @@ Specify a template name. Specify a template version name. +### --preset + +| | | +|-------------|---------------------------------| +| Type | string | +| Environment | $CODER_PRESET_NAME | + +Specify the name of a template version preset. Use 'none' to explicitly indicate that no preset should be used. + ### --start-at | | | diff --git a/docs/reference/cli/external-workspaces.md b/docs/reference/cli/external-workspaces.md new file mode 100644 index 0000000000000..5e1f27a7794ad --- /dev/null +++ b/docs/reference/cli/external-workspaces.md @@ -0,0 +1,29 @@ + +# external-workspaces + +Create or manage external workspaces + +## Usage + +```console +coder external-workspaces [flags] [subcommand] +``` + +## Subcommands + +| Name | Purpose | +|--------------------------------------------------------------------------------|--------------------------------------------| +| [create](./external-workspaces_create.md) | Create a new external workspace | +| [agent-instructions](./external-workspaces_agent-instructions.md) | Get the instructions for an external agent | +| [list](./external-workspaces_list.md) | List external workspaces | + +## Options + +### -O, --org + +| | | +|-------------|----------------------------------| +| Type | string | +| Environment | $CODER_ORGANIZATION | + +Select which organization (uuid or name) to use. diff --git a/docs/reference/cli/external-workspaces_agent-instructions.md b/docs/reference/cli/external-workspaces_agent-instructions.md new file mode 100644 index 0000000000000..d284a48de7173 --- /dev/null +++ b/docs/reference/cli/external-workspaces_agent-instructions.md @@ -0,0 +1,21 @@ + +# external-workspaces agent-instructions + +Get the instructions for an external agent + +## Usage + +```console +coder external-workspaces agent-instructions [flags] [user/]workspace[.agent] +``` + +## Options + +### -o, --output + +| | | +|---------|-------------------------| +| Type | text\|json | +| Default | text | + +Output format. diff --git a/docs/reference/cli/external-workspaces_create.md b/docs/reference/cli/external-workspaces_create.md new file mode 100644 index 0000000000000..b0744387a1d70 --- /dev/null +++ b/docs/reference/cli/external-workspaces_create.md @@ -0,0 +1,128 @@ + +# external-workspaces create + +Create a new external workspace + +## Usage + +```console +coder external-workspaces create [flags] [workspace] +``` + +## Description + +```console + - Create a workspace for another user (if you have permission): + + $ coder create / +``` + +## Options + +### -t, --template + +| | | +|-------------|-----------------------------------| +| Type | string | +| Environment | $CODER_TEMPLATE_NAME | + +Specify a template name. + +### --template-version + +| | | +|-------------|--------------------------------------| +| Type | string | +| Environment | $CODER_TEMPLATE_VERSION | + +Specify a template version name. + +### --preset + +| | | +|-------------|---------------------------------| +| Type | string | +| Environment | $CODER_PRESET_NAME | + +Specify the name of a template version preset. Use 'none' to explicitly indicate that no preset should be used. + +### --start-at + +| | | +|-------------|----------------------------------------| +| Type | string | +| Environment | $CODER_WORKSPACE_START_AT | + +Specify the workspace autostart schedule. Check coder schedule start --help for the syntax. + +### --stop-after + +| | | +|-------------|------------------------------------------| +| Type | duration | +| Environment | $CODER_WORKSPACE_STOP_AFTER | + +Specify a duration after which the workspace should shut down (e.g. 8h). + +### --automatic-updates + +| | | +|-------------|-------------------------------------------------| +| Type | string | +| Environment | $CODER_WORKSPACE_AUTOMATIC_UPDATES | +| Default | never | + +Specify automatic updates setting for the workspace (accepts 'always' or 'never'). + +### --copy-parameters-from + +| | | +|-------------|----------------------------------------------------| +| Type | string | +| Environment | $CODER_WORKSPACE_COPY_PARAMETERS_FROM | + +Specify the source workspace name to copy parameters from. + +### -y, --yes + +| | | +|------|-------------------| +| Type | bool | + +Bypass prompts. + +### --parameter + +| | | +|-------------|------------------------------------| +| Type | string-array | +| Environment | $CODER_RICH_PARAMETER | + +Rich parameter value in the format "name=value". + +### --rich-parameter-file + +| | | +|-------------|-----------------------------------------| +| Type | string | +| Environment | $CODER_RICH_PARAMETER_FILE | + +Specify a file path with values for rich parameters defined in the template. The file should be in YAML format, containing key-value pairs for the parameters. + +### --parameter-default + +| | | +|-------------|--------------------------------------------| +| Type | string-array | +| Environment | $CODER_RICH_PARAMETER_DEFAULT | + +Rich parameter default values in the format "name=value". + +### -O, --org + +| | | +|-------------|----------------------------------| +| Type | string | +| Environment | $CODER_ORGANIZATION | + +Select which organization (uuid or name) to use. diff --git a/docs/reference/cli/external-workspaces_list.md b/docs/reference/cli/external-workspaces_list.md new file mode 100644 index 0000000000000..061aaa29d7a0b --- /dev/null +++ b/docs/reference/cli/external-workspaces_list.md @@ -0,0 +1,51 @@ + +# external-workspaces list + +List external workspaces + +Aliases: + +* ls + +## Usage + +```console +coder external-workspaces list [flags] +``` + +## Options + +### -a, --all + +| | | +|------|-------------------| +| Type | bool | + +Specifies whether all workspaces will be listed or not. + +### --search + +| | | +|---------|-----------------------| +| Type | string | +| Default | owner:me | + +Search for a workspace with a query. + +### -c, --column + +| | | +|---------|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| Type | [favorite\|workspace\|organization id\|organization name\|template\|status\|healthy\|last built\|current version\|outdated\|starts at\|starts next\|stops after\|stops next\|daily cost] | +| Default | workspace,template,status,healthy,last built,current version,outdated | + +Columns to display in table output. + +### -o, --output + +| | | +|---------|--------------------------| +| Type | table\|json | +| Default | table | + +Output format. diff --git a/docs/reference/cli/index.md b/docs/reference/cli/index.md index 1992e5d6e9ac3..101186eeea91e 100644 --- a/docs/reference/cli/index.md +++ b/docs/reference/cli/index.md @@ -22,51 +22,52 @@ Coder — A tool for provisioning self-hosted development environments with Terr ## Subcommands -| Name | Purpose | -|----------------------------------------------------|------------------------------------------------------------------------------------------------------------------------------| -| [completion](./completion.md) | Install or update shell completion scripts for the detected or chosen shell. | -| [dotfiles](./dotfiles.md) | Personalize your workspace by applying a canonical dotfiles repository | -| [external-auth](./external-auth.md) | Manage external authentication | -| [login](./login.md) | Authenticate with Coder deployment | -| [logout](./logout.md) | Unauthenticate your local session | -| [netcheck](./netcheck.md) | Print network debug information for DERP and STUN | -| [notifications](./notifications.md) | Manage Coder notifications | -| [organizations](./organizations.md) | Organization related commands | -| [port-forward](./port-forward.md) | Forward ports from a workspace to the local machine. For reverse port forwarding, use "coder ssh -R". | -| [publickey](./publickey.md) | Output your Coder public key used for Git operations | -| [reset-password](./reset-password.md) | Directly connect to the database to reset a user's password | -| [state](./state.md) | Manually manage Terraform state to fix broken workspaces | -| [templates](./templates.md) | Manage templates | -| [tokens](./tokens.md) | Manage personal access tokens | -| [users](./users.md) | Manage users | -| [version](./version.md) | Show coder version | -| [autoupdate](./autoupdate.md) | Toggle auto-update policy for a workspace | -| [config-ssh](./config-ssh.md) | Add an SSH Host entry for your workspaces "ssh workspace.coder" | -| [create](./create.md) | Create a workspace | -| [delete](./delete.md) | Delete a workspace | -| [favorite](./favorite.md) | Add a workspace to your favorites | -| [list](./list.md) | List workspaces | -| [open](./open.md) | Open a workspace | -| [ping](./ping.md) | Ping a workspace | -| [rename](./rename.md) | Rename a workspace | -| [restart](./restart.md) | Restart a workspace | -| [schedule](./schedule.md) | Schedule automated start and stop times for workspaces | -| [show](./show.md) | Display details of a workspace's resources and agents | -| [speedtest](./speedtest.md) | Run upload and download tests from your machine to a workspace | -| [ssh](./ssh.md) | Start a shell into a workspace or run a command | -| [start](./start.md) | Start a workspace | -| [stat](./stat.md) | Show resource usage for the current workspace. | -| [stop](./stop.md) | Stop a workspace | -| [unfavorite](./unfavorite.md) | Remove a workspace from your favorites | -| [update](./update.md) | Will update and start a given workspace if it is out of date. If the workspace is already running, it will be stopped first. | -| [whoami](./whoami.md) | Fetch authenticated user info for Coder deployment | -| [support](./support.md) | Commands for troubleshooting issues with a Coder deployment. | -| [server](./server.md) | Start a Coder server | -| [features](./features.md) | List Enterprise features | -| [licenses](./licenses.md) | Add, delete, and list licenses | -| [groups](./groups.md) | Manage groups | -| [prebuilds](./prebuilds.md) | Manage Coder prebuilds | -| [provisioner](./provisioner.md) | View and manage provisioner daemons and jobs | +| Name | Purpose | +|--------------------------------------------------------------|------------------------------------------------------------------------------------------------------------------------------| +| [completion](./completion.md) | Install or update shell completion scripts for the detected or chosen shell. | +| [dotfiles](./dotfiles.md) | Personalize your workspace by applying a canonical dotfiles repository | +| [external-auth](./external-auth.md) | Manage external authentication | +| [login](./login.md) | Authenticate with Coder deployment | +| [logout](./logout.md) | Unauthenticate your local session | +| [netcheck](./netcheck.md) | Print network debug information for DERP and STUN | +| [notifications](./notifications.md) | Manage Coder notifications | +| [organizations](./organizations.md) | Organization related commands | +| [port-forward](./port-forward.md) | Forward ports from a workspace to the local machine. For reverse port forwarding, use "coder ssh -R". | +| [publickey](./publickey.md) | Output your Coder public key used for Git operations | +| [reset-password](./reset-password.md) | Directly connect to the database to reset a user's password | +| [state](./state.md) | Manually manage Terraform state to fix broken workspaces | +| [templates](./templates.md) | Manage templates | +| [tokens](./tokens.md) | Manage personal access tokens | +| [users](./users.md) | Manage users | +| [version](./version.md) | Show coder version | +| [autoupdate](./autoupdate.md) | Toggle auto-update policy for a workspace | +| [config-ssh](./config-ssh.md) | Add an SSH Host entry for your workspaces "ssh workspace.coder" | +| [create](./create.md) | Create a workspace | +| [delete](./delete.md) | Delete a workspace | +| [favorite](./favorite.md) | Add a workspace to your favorites | +| [list](./list.md) | List workspaces | +| [open](./open.md) | Open a workspace | +| [ping](./ping.md) | Ping a workspace | +| [rename](./rename.md) | Rename a workspace | +| [restart](./restart.md) | Restart a workspace | +| [schedule](./schedule.md) | Schedule automated start and stop times for workspaces | +| [show](./show.md) | Display details of a workspace's resources and agents | +| [speedtest](./speedtest.md) | Run upload and download tests from your machine to a workspace | +| [ssh](./ssh.md) | Start a shell into a workspace or run a command | +| [start](./start.md) | Start a workspace | +| [stat](./stat.md) | Show resource usage for the current workspace. | +| [stop](./stop.md) | Stop a workspace | +| [unfavorite](./unfavorite.md) | Remove a workspace from your favorites | +| [update](./update.md) | Will update and start a given workspace if it is out of date. If the workspace is already running, it will be stopped first. | +| [whoami](./whoami.md) | Fetch authenticated user info for Coder deployment | +| [support](./support.md) | Commands for troubleshooting issues with a Coder deployment. | +| [server](./server.md) | Start a Coder server | +| [features](./features.md) | List Enterprise features | +| [licenses](./licenses.md) | Add, delete, and list licenses | +| [groups](./groups.md) | Manage groups | +| [prebuilds](./prebuilds.md) | Manage Coder prebuilds | +| [provisioner](./provisioner.md) | View and manage provisioner daemons and jobs | +| [external-workspaces](./external-workspaces.md) | Create or manage external workspaces | ## Options diff --git a/docs/reference/cli/provisioner_jobs_list.md b/docs/reference/cli/provisioner_jobs_list.md index 07ad02f419bde..a0bff8554d610 100644 --- a/docs/reference/cli/provisioner_jobs_list.md +++ b/docs/reference/cli/provisioner_jobs_list.md @@ -45,10 +45,10 @@ Select which organization (uuid or name) to use. ### -c, --column -| | | -|---------|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| Type | [id\|created at\|started at\|completed at\|canceled at\|error\|error code\|status\|worker id\|worker name\|file id\|tags\|queue position\|queue size\|organization id\|template version id\|workspace build id\|type\|available workers\|template version name\|template id\|template name\|template display name\|template icon\|workspace id\|workspace name\|organization\|queue] | -| Default | created at,id,type,template display name,status,queue,tags | +| | | +|---------|--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| Type | [id\|created at\|started at\|completed at\|canceled at\|error\|error code\|status\|worker id\|worker name\|file id\|tags\|queue position\|queue size\|organization id\|template version id\|workspace build id\|type\|available workers\|template version name\|template id\|template name\|template display name\|template icon\|workspace id\|workspace name\|logs overflowed\|organization\|queue] | +| Default | created at,id,type,template display name,status,queue,tags | Columns to display in table output. diff --git a/docs/reference/cli/provisioner_list.md b/docs/reference/cli/provisioner_list.md index 128d76caf4c7e..aa67dcd815f67 100644 --- a/docs/reference/cli/provisioner_list.md +++ b/docs/reference/cli/provisioner_list.md @@ -25,6 +25,33 @@ coder provisioner list [flags] Limit the number of provisioners returned. +### -f, --show-offline + +| | | +|-------------|----------------------------------------------| +| Type | bool | +| Environment | $CODER_PROVISIONER_SHOW_OFFLINE | + +Show offline provisioners. + +### -s, --status + +| | | +|-------------|---------------------------------------------| +| Type | [offline\|idle\|busy] | +| Environment | $CODER_PROVISIONER_LIST_STATUS | + +Filter by provisioner status. + +### -m, --max-age + +| | | +|-------------|----------------------------------------------| +| Type | duration | +| Environment | $CODER_PROVISIONER_LIST_MAX_AGE | + +Filter provisioners by maximum age. + ### -O, --org | | | diff --git a/docs/reference/cli/schedule_extend.md b/docs/reference/cli/schedule_extend.md index e4b696ad5c4a7..aa4540b4d7d31 100644 --- a/docs/reference/cli/schedule_extend.md +++ b/docs/reference/cli/schedule_extend.md @@ -16,7 +16,7 @@ coder schedule extend ## Description ```console - +Extends the workspace deadline. * The new stop time is calculated from *now*. * The new stop time must be at least 30 minutes in the future. * The workspace template may restrict the maximum workspace runtime. diff --git a/docs/reference/cli/templates_presets_list.md b/docs/reference/cli/templates_presets_list.md index 69dd12faadc7b..5c2d26859f018 100644 --- a/docs/reference/cli/templates_presets_list.md +++ b/docs/reference/cli/templates_presets_list.md @@ -30,10 +30,10 @@ Select which organization (uuid or name) to use. ### -c, --column -| | | -|---------|----------------------------------------------------------------------| -| Type | [name\|parameters\|default\|desired prebuild instances] | -| Default | name,parameters,default,desired prebuild instances | +| | | +|---------|-----------------------------------------------------------------------------------| +| Type | [name\|description\|parameters\|default\|desired prebuild instances] | +| Default | name,description,parameters,default,desired prebuild instances | Columns to display in table output. diff --git a/docs/support/support-bundle.md b/docs/support/support-bundle.md index 7cac0058f4812..1741dbfb663f3 100644 --- a/docs/support/support-bundle.md +++ b/docs/support/support-bundle.md @@ -73,6 +73,7 @@ A brief overview of all files contained in the bundle is provided below: prompt. The support bundle will be generated in the current directory with the filename `coder-support-$TIMESTAMP.zip`. + > [!NOTE] > While support bundles can be generated without a running workspace, it is > recommended to specify one to maximize troubleshooting information. diff --git a/docs/tutorials/testing-templates.md b/docs/tutorials/testing-templates.md index bcfa33a74e16f..025c0d6ace26f 100644 --- a/docs/tutorials/testing-templates.md +++ b/docs/tutorials/testing-templates.md @@ -86,7 +86,7 @@ jobs: - name: Get short commit SHA to use as template version name id: name - run: echo "version_name=$(git rev-parse --short HEAD)" >> $GITHUB_OUTPUT + run: echo "version_name=$(git rev-parse --short HEAD)" >> "$GITHUB_OUTPUT" - name: Get latest commit title to use as template version description id: message diff --git a/docs/user-guides/desktop/index.md b/docs/user-guides/desktop/index.md index 116f7d4d6de69..d5f5e5aabb3c2 100644 --- a/docs/user-guides/desktop/index.md +++ b/docs/user-guides/desktop/index.md @@ -8,12 +8,6 @@ Coder Desktop requires a Coder deployment running [v2.20.0](https://github.com/c ## Install Coder Desktop -> [!IMPORTANT] -> Coder Desktop can't connect through a corporate VPN. -> -> Due to a [known issue](#coder-desktop-cant-connect-through-another-vpn), -> if your Coder deployment requires that you connect through a corporate VPN, Desktop will timeout when it tries to connect. -
You can install Coder Desktop on macOS or Windows. @@ -144,9 +138,7 @@ To avoid system VPN configuration conflicts, only one copy of `Coder Desktop.app If the logged in Coder deployment requires a corporate VPN to connect, Coder Connect can't establish communication through the VPN, and will time out. -This is due to known issues with [macOS](https://github.com/coder/coder-desktop-macos/issues/201) and -[Windows](https://github.com/coder/coder-desktop-windows/issues/147) networking. -A resolution is in progress. +This issue has been fixed in Coder v2.24.3 and later. For macOS clients, Coder Desktop v0.8.0 or later is also required. ## Next Steps diff --git a/docs/user-guides/workspace-access/code-server.md b/docs/user-guides/workspace-access/code-server.md new file mode 100644 index 0000000000000..baa36b010c0c0 --- /dev/null +++ b/docs/user-guides/workspace-access/code-server.md @@ -0,0 +1,29 @@ +# code-server + +[code-server](https://github.com/coder/code-server) is our supported method of running VS Code in the web browser. + +![code-server in a workspace](../../images/code-server-ide.png) + +## Differences between code-server and VS Code Web + +Some of the key differences between code-server and VS Code Web are: + +| Feature | code-server | VS Code Web | +|--------------------------|-----------------------------------------------------------------------------|-------------------------------------------------------------------| +| Authentication | Optional login form | No built-in auth | +| Built-in proxy | Includes development proxy (not needed with Coder) | No built-in development proxy | +| Clipboard integration | Supports piping text from terminal (similar to `xclip`) | More limited | +| Display languages | Supports language pack extensions | Limited language support | +| File operations | Options to disable downloads and uploads | No built-in restrictions | +| Health endpoint | Provides `/healthz` endpoint | Limited health monitoring | +| Marketplace | Open VSX by default, configurable via flags/env vars | Uses Microsoft marketplace; modify `product.json` to use your own | +| Path-based routing | Has fixes for state collisions when used path-based | May have issues with path-based routing in certain configurations | +| Proposed API | Always enabled for all extensions | Only Microsoft extensions without configuration | +| Proxy integration | Integrates with Coder's proxy for ports panel | Integration is more limited | +| Sourcemaps | Loads locally | Uses CDN | +| Telemetry | Configurable endpoint | Does not allow a configurable endpoint | +| Terminal access to files | You can use a terminal outside of the integrated one to interact with files | Limited to integrated terminal access | +| User settings | Stored on remote disk | Stored in browser | +| Web views | Self-contained | Uses Microsoft CDN | + +For more information about code-server, visit the [code-server FAQ](https://coder.com/docs/code-server/FAQ). diff --git a/docs/user-guides/workspace-access/index.md b/docs/user-guides/workspace-access/index.md index 1bf4d9d8c9927..266e76e94757f 100644 --- a/docs/user-guides/workspace-access/index.md +++ b/docs/user-guides/workspace-access/index.md @@ -78,12 +78,12 @@ Your workspace is now accessible via `ssh coder.` ## Visual Studio Code You can develop in your Coder workspace remotely with -[VSCode](https://code.visualstudio.com/download). We support connecting with the -desktop client and VSCode in the browser with [code-server](#code-server). +[VS Code](https://code.visualstudio.com/download). +We support connecting with the desktop client and VS Code in the browser with [code-server](#code-server). ![Demo](https://github.com/coder/vscode-coder/raw/main/demo.gif?raw=true) -Read more details on [using VSCode in your workspace](./vscode.md). +Read more details on [using VS Code in your workspace](./vscode.md). ## Cursor @@ -118,7 +118,8 @@ on connecting your JetBrains IDEs. ## code-server [code-server](https://github.com/coder/code-server) is our supported method of -running VS Code in the web browser. You can read more in our +running VS Code in the web browser. +Learn more about [what makes code-server different from VS Code web](./code-server.md) or visit the [documentation for code-server](https://coder.com/docs/code-server/latest). ![code-server in a workspace](../../images/code-server-ide.png) diff --git a/docs/user-guides/workspace-access/jetbrains/gateway.md b/docs/user-guides/workspace-access/jetbrains/gateway.md index 09c54a10e854f..b7065b56a0729 100644 --- a/docs/user-guides/workspace-access/jetbrains/gateway.md +++ b/docs/user-guides/workspace-access/jetbrains/gateway.md @@ -10,6 +10,7 @@ manually configured SSH connection. ### How to use the plugin +> [!NOTE] > If you experience problems, please > [create a GitHub issue](https://github.com/coder/coder/issues) or share in > [our Discord channel](https://discord.gg/coder). diff --git a/docs/user-guides/workspace-access/vscode.md b/docs/user-guides/workspace-access/vscode.md index cd67c2a775bbd..3f89ac8e258bb 100644 --- a/docs/user-guides/workspace-access/vscode.md +++ b/docs/user-guides/workspace-access/vscode.md @@ -1,13 +1,15 @@ # Visual Studio Code You can develop in your Coder workspace remotely with -[VSCode](https://code.visualstudio.com/download). We support connecting with the -desktop client and VSCode in the browser with +[VS Code](https://code.visualstudio.com/download). +We support connecting with the desktop client and VS Code in the browser with [code-server](https://github.com/coder/code-server). +Learn more about how VS Code Web and code-server compare in the +[code-server doc](./code-server.md). -## VSCode Desktop +## VS Code Desktop -VSCode desktop is a default app for workspaces. +VS Code desktop is a default app for workspaces. Click `VS Code Desktop` in the dashboard to one-click enter a workspace. This automatically installs the [Coder Remote](https://github.com/coder/vscode-coder) @@ -21,7 +23,7 @@ extension, authenticates with Coder, and connects to the workspace. ### Manual Installation -You can install our extension manually in VSCode using the command palette. +You can install our extension manually in VS Code using the command palette. Launch VS Code Quick Open (Ctrl+P), paste the following command, and press enter. diff --git a/dogfood/coder-envbuilder/main.tf b/dogfood/coder-envbuilder/main.tf index fb57bebffa9a1..73cef7dec5b9d 100644 --- a/dogfood/coder-envbuilder/main.tf +++ b/dogfood/coder-envbuilder/main.tf @@ -110,20 +110,20 @@ data "coder_workspace_owner" "me" {} module "slackme" { source = "dev.registry.coder.com/coder/slackme/coder" - version = "1.0.30" + version = "1.0.31" agent_id = coder_agent.dev.id auth_provider_id = "slack" } module "dotfiles" { source = "dev.registry.coder.com/coder/dotfiles/coder" - version = "1.2.0" + version = "1.2.1" agent_id = coder_agent.dev.id } module "personalize" { source = "dev.registry.coder.com/coder/personalize/coder" - version = "1.0.30" + version = "1.0.31" agent_id = coder_agent.dev.id } @@ -148,13 +148,13 @@ module "jetbrains_gateway" { module "filebrowser" { source = "dev.registry.coder.com/coder/filebrowser/coder" - version = "1.1.1" + version = "1.1.2" agent_id = coder_agent.dev.id } module "coder-login" { source = "dev.registry.coder.com/coder/coder-login/coder" - version = "1.0.30" + version = "1.0.31" agent_id = coder_agent.dev.id } diff --git a/dogfood/coder/Dockerfile b/dogfood/coder/Dockerfile index dbafcd7add427..0b5a36244ccdc 100644 --- a/dogfood/coder/Dockerfile +++ b/dogfood/coder/Dockerfile @@ -4,14 +4,15 @@ FROM rust:slim@sha256:3f391b0678a6e0c88fd26f13e399c9c515ac47354e3cadfee7daee3b21 ENV CARGO_INSTALL_ROOT=/tmp/ # Use more reliable mirrors for Debian packages RUN sed -i 's|http://deb.debian.org/debian|http://mirrors.edge.kernel.org/debian|g' /etc/apt/sources.list && \ - apt-get update || true + apt-get update || true RUN apt-get update && apt-get install -y libssl-dev openssl pkg-config build-essential RUN cargo install jj-cli typos-cli watchexec-cli FROM ubuntu:jammy@sha256:0e5e4a57c2499249aafc3b40fcd541e9a456aab7296681a3994d631587203f97 AS go # Install Go manually, so that we can control the version -ARG GO_VERSION=1.24.4 +ARG GO_VERSION=1.24.6 +ARG GO_CHECKSUM="bbca37cc395c974ffa4893ee35819ad23ebb27426df87af92e93a9ec66ef8712" # Boring Go is needed to build FIPS-compliant binaries. RUN apt-get update && \ @@ -19,6 +20,7 @@ RUN apt-get update && \ curl --silent --show-error --location \ "https://go.dev/dl/go${GO_VERSION}.linux-amd64.tar.gz" \ -o /usr/local/go.tar.gz && \ + echo "$GO_CHECKSUM /usr/local/go.tar.gz" | sha256sum -c && \ rm -rf /var/lib/apt/lists/* ENV PATH=$PATH:/usr/local/go/bin @@ -29,6 +31,7 @@ RUN apt-get update && \ mkdir --parents /usr/local/go && \ tar --extract --gzip --directory=/usr/local/go --file=/usr/local/go.tar.gz --strip-components=1 && \ mkdir --parents "$GOPATH" && \ + go env -w GOSUMDB=sum.golang.org && \ # moq for Go tests. go install github.com/matryer/moq@v0.2.3 && \ # swag for Swagger doc generation @@ -123,8 +126,8 @@ RUN mkdir -p /etc/sudoers.d && \ # Use more reliable mirrors for Ubuntu packages RUN sed -i 's|http://archive.ubuntu.com/ubuntu/|http://mirrors.edge.kernel.org/ubuntu/|g' /etc/apt/sources.list && \ - sed -i 's|http://security.ubuntu.com/ubuntu/|http://mirrors.edge.kernel.org/ubuntu/|g' /etc/apt/sources.list && \ - apt-get update --quiet && apt-get install --yes \ + sed -i 's|http://security.ubuntu.com/ubuntu/|http://mirrors.edge.kernel.org/ubuntu/|g' /etc/apt/sources.list && \ + apt-get update --quiet && apt-get install --yes \ ansible \ apt-transport-https \ apt-utils \ @@ -242,7 +245,7 @@ RUN DOCTL_VERSION=$(curl -s "https://api.github.com/repos/digitalocean/doctl/rel ARG NVM_INSTALL_SHA=bdea8c52186c4dd12657e77e7515509cda5bf9fa5a2f0046bce749e62645076d # Install frontend utilities ENV NVM_DIR=/usr/local/nvm -ENV NODE_VERSION=20.16.0 +ENV NODE_VERSION=20.19.4 RUN mkdir -p $NVM_DIR RUN curl -o nvm_install.sh https://raw.githubusercontent.com/nvm-sh/nvm/v0.40.0/install.sh && \ echo "${NVM_INSTALL_SHA} nvm_install.sh" | sha256sum -c && \ @@ -252,9 +255,9 @@ RUN source $NVM_DIR/nvm.sh && \ nvm install $NODE_VERSION && \ nvm use $NODE_VERSION ENV PATH=$NVM_DIR/versions/node/v$NODE_VERSION/bin:$PATH -# Allow patch updates for npm and pnpm -RUN npm install -g npm@10.8.1 --integrity=sha512-Dp1C6SvSMYQI7YHq/y2l94uvI+59Eqbu1EpuKQHQ8p16txXRuRit5gH3Lnaagk2aXDIjg/Iru9pd05bnneKgdw== -RUN npm install -g pnpm@9.15.1 --integrity=sha512-GstWXmGT7769p3JwKVBGkVDPErzHZCYudYfnHRncmKQj3/lTblfqRMSb33kP9pToPCe+X6oj1n4MAztYO+S/zw== +RUN corepack enable && \ + corepack prepare npm@10.8.1 --activate && \ + corepack prepare pnpm@10.14.0 --activate RUN pnpx playwright@1.47.0 install --with-deps chromium diff --git a/dogfood/coder/main.tf b/dogfood/coder/main.tf index b5c974efab1ad..a464972cb05b6 100644 --- a/dogfood/coder/main.tf +++ b/dogfood/coder/main.tf @@ -2,7 +2,7 @@ terraform { required_providers { coder = { source = "coder/coder" - version = "~> 2.5" + version = "~> 2.9" } docker = { source = "kreuzwerker/docker" @@ -38,10 +38,13 @@ locals { repo_base_dir = data.coder_parameter.repo_base_dir.value == "~" ? "/home/coder" : replace(data.coder_parameter.repo_base_dir.value, "/^~\\//", "/home/coder/") repo_dir = replace(try(module.git-clone[0].repo_dir, ""), "/^~\\//", "/home/coder/") container_name = "coder-${data.coder_workspace_owner.me.name}-${lower(data.coder_workspace.me.name)}" + has_ai_prompt = data.coder_parameter.ai_prompt.value != "" } data "coder_workspace_preset" "cpt" { - name = "Cape Town" + name = "Cape Town" + description = "Development workspace hosted in South Africa with 1 prebuild instance" + icon = "/emojis/1f1ff-1f1e6.png" parameters = { (data.coder_parameter.region.name) = "za-cpt" (data.coder_parameter.image_type.name) = "codercom/oss-dogfood:latest" @@ -56,7 +59,9 @@ data "coder_workspace_preset" "cpt" { } data "coder_workspace_preset" "pittsburgh" { - name = "Pittsburgh" + name = "Pittsburgh" + description = "Development workspace hosted in United States with 2 prebuild instances" + icon = "/emojis/1f1fa-1f1f8.png" parameters = { (data.coder_parameter.region.name) = "us-pittsburgh" (data.coder_parameter.image_type.name) = "codercom/oss-dogfood:latest" @@ -71,7 +76,9 @@ data "coder_workspace_preset" "pittsburgh" { } data "coder_workspace_preset" "falkenstein" { - name = "Falkenstein" + name = "Falkenstein" + description = "Development workspace hosted in Europe with 1 prebuild instance" + icon = "/emojis/1f1ea-1f1fa.png" parameters = { (data.coder_parameter.region.name) = "eu-helsinki" (data.coder_parameter.image_type.name) = "codercom/oss-dogfood:latest" @@ -86,7 +93,9 @@ data "coder_workspace_preset" "falkenstein" { } data "coder_workspace_preset" "sydney" { - name = "Sydney" + name = "Sydney" + description = "Development workspace hosted in Australia with 1 prebuild instance" + icon = "/emojis/1f1e6-1f1fa.png" parameters = { (data.coder_parameter.region.name) = "ap-sydney" (data.coder_parameter.image_type.name) = "codercom/oss-dogfood:latest" @@ -101,7 +110,9 @@ data "coder_workspace_preset" "sydney" { } data "coder_workspace_preset" "saopaulo" { - name = "São Paulo" + name = "São Paulo" + description = "Development workspace hosted in Brazil with 1 prebuild instance" + icon = "/emojis/1f1e7-1f1f7.png" parameters = { (data.coder_parameter.region.name) = "sa-saopaulo" (data.coder_parameter.image_type.name) = "codercom/oss-dogfood:latest" @@ -140,6 +151,13 @@ data "coder_parameter" "image_type" { } } +variable "anthropic_api_key" { + type = string + description = "The API key used to authenticate with the Anthropic API." + default = "" + sensitive = true +} + locals { default_regions = { // keys should match group names @@ -157,7 +175,6 @@ locals { ], ["us-pittsburgh"])[0] } - data "coder_parameter" "region" { type = "string" name = "Region" @@ -232,6 +249,14 @@ data "coder_parameter" "devcontainer_autostart" { mutable = true } +data "coder_parameter" "ai_prompt" { + type = "string" + name = "AI Prompt" + default = "" + description = "Prompt for Claude Code" + mutable = true // Workaround for issue with claiming a prebuild from a preset that does not include this parameter. +} + provider "docker" { host = lookup(local.docker_host, data.coder_parameter.region.value) } @@ -251,10 +276,78 @@ data "coder_workspace_tags" "tags" { } } +data "coder_parameter" "ide_choices" { + type = "list(string)" + name = "Select IDEs" + form_type = "multi-select" + mutable = true + description = "Choose one or more IDEs to enable in your workspace" + default = jsonencode(["vscode", "code-server", "cursor"]) + option { + name = "VS Code Desktop" + value = "vscode" + icon = "/icon/code.svg" + } + option { + name = "code-server" + value = "code-server" + icon = "/icon/code.svg" + } + option { + name = "VS Code Web" + value = "vscode-web" + icon = "/icon/code.svg" + } + option { + name = "JetBrains IDEs" + value = "jetbrains" + icon = "/icon/jetbrains.svg" + } + option { + name = "JetBrains Fleet" + value = "fleet" + icon = "/icon/fleet.svg" + } + option { + name = "Cursor" + value = "cursor" + icon = "/icon/cursor.svg" + } + option { + name = "Windsurf" + value = "windsurf" + icon = "/icon/windsurf.svg" + } + option { + name = "Zed" + value = "zed" + icon = "/icon/zed.svg" + } +} + +data "coder_parameter" "vscode_channel" { + count = contains(jsondecode(data.coder_parameter.ide_choices.value), "vscode") ? 1 : 0 + type = "string" + name = "VS Code Desktop channel" + description = "Choose the VS Code Desktop channel" + mutable = true + default = "stable" + option { + value = "stable" + name = "Stable" + icon = "/icon/code.svg" + } + option { + value = "insiders" + name = "Insiders" + icon = "/icon/code-insiders.svg" + } +} + module "slackme" { count = data.coder_workspace.me.start_count source = "dev.registry.coder.com/coder/slackme/coder" - version = "1.0.30" + version = "1.0.31" agent_id = coder_agent.dev.id auth_provider_id = "slack" } @@ -262,14 +355,21 @@ module "slackme" { module "dotfiles" { count = data.coder_workspace.me.start_count source = "dev.registry.coder.com/coder/dotfiles/coder" - version = "1.2.0" + version = "1.2.1" + agent_id = coder_agent.dev.id +} + +module "git-config" { + count = data.coder_workspace.me.start_count + source = "dev.registry.coder.com/coder/git-config/coder" + version = "1.0.31" agent_id = coder_agent.dev.id } module "git-clone" { count = data.coder_workspace.me.start_count source = "dev.registry.coder.com/coder/git-clone/coder" - version = "1.1.0" + version = "1.1.1" agent_id = coder_agent.dev.id url = "https://github.com/coder/coder" base_dir = local.repo_base_dir @@ -278,12 +378,12 @@ module "git-clone" { module "personalize" { count = data.coder_workspace.me.start_count source = "dev.registry.coder.com/coder/personalize/coder" - version = "1.0.30" + version = "1.0.31" agent_id = coder_agent.dev.id } module "code-server" { - count = data.coder_workspace.me.start_count + count = contains(jsondecode(data.coder_parameter.ide_choices.value), "code-server") ? data.coder_workspace.me.start_count : 0 source = "dev.registry.coder.com/coder/code-server/coder" version = "1.3.1" agent_id = coder_agent.dev.id @@ -293,7 +393,7 @@ module "code-server" { } module "vscode-web" { - count = data.coder_workspace.me.start_count + count = contains(jsondecode(data.coder_parameter.ide_choices.value), "vscode-web") ? data.coder_workspace.me.start_count : 0 source = "dev.registry.coder.com/coder/vscode-web/coder" version = "1.3.1" agent_id = coder_agent.dev.id @@ -305,9 +405,9 @@ module "vscode-web" { } module "jetbrains" { - count = data.coder_workspace.me.start_count + count = contains(jsondecode(data.coder_parameter.ide_choices.value), "jetbrains") ? data.coder_workspace.me.start_count : 0 source = "dev.registry.coder.com/coder/jetbrains/coder" - version = "1.0.0" + version = "1.0.3" agent_id = coder_agent.dev.id agent_name = "dev" folder = local.repo_dir @@ -317,7 +417,7 @@ module "jetbrains" { module "filebrowser" { count = data.coder_workspace.me.start_count source = "dev.registry.coder.com/coder/filebrowser/coder" - version = "1.1.1" + version = "1.1.2" agent_id = coder_agent.dev.id agent_name = "dev" } @@ -325,20 +425,20 @@ module "filebrowser" { module "coder-login" { count = data.coder_workspace.me.start_count source = "dev.registry.coder.com/coder/coder-login/coder" - version = "1.0.30" + version = "1.0.31" agent_id = coder_agent.dev.id } module "cursor" { - count = data.coder_workspace.me.start_count + count = contains(jsondecode(data.coder_parameter.ide_choices.value), "cursor") ? data.coder_workspace.me.start_count : 0 source = "dev.registry.coder.com/coder/cursor/coder" - version = "1.2.1" + version = "1.3.0" agent_id = coder_agent.dev.id folder = local.repo_dir } module "windsurf" { - count = data.coder_workspace.me.start_count + count = contains(jsondecode(data.coder_parameter.ide_choices.value), "windsurf") ? data.coder_workspace.me.start_count : 0 source = "dev.registry.coder.com/coder/windsurf/coder" version = "1.1.1" agent_id = coder_agent.dev.id @@ -346,16 +446,16 @@ module "windsurf" { } module "zed" { - count = data.coder_workspace.me.start_count + count = contains(jsondecode(data.coder_parameter.ide_choices.value), "zed") ? data.coder_workspace.me.start_count : 0 source = "dev.registry.coder.com/coder/zed/coder" - version = "1.0.0" + version = "1.1.0" agent_id = coder_agent.dev.id agent_name = "dev" folder = local.repo_dir } module "jetbrains-fleet" { - count = data.coder_workspace.me.start_count + count = contains(jsondecode(data.coder_parameter.ide_choices.value), "fleet") ? data.coder_workspace.me.start_count : 0 source = "registry.coder.com/coder/jetbrains-fleet/coder" version = "1.0.1" agent_id = coder_agent.dev.id @@ -370,6 +470,24 @@ module "devcontainers-cli" { agent_id = coder_agent.dev.id } +module "claude-code" { + count = local.has_ai_prompt ? data.coder_workspace.me.start_count : 0 + source = "dev.registry.coder.com/coder/claude-code/coder" + version = "2.0.7" + agent_id = coder_agent.dev.id + folder = local.repo_dir + install_claude_code = true + claude_code_version = "latest" + order = 999 + + experiment_report_tasks = true + experiment_post_install_script = <<-EOT + claude mcp add playwright npx -- @playwright/mcp@latest --headless --isolated --no-sandbox + claude mcp add desktop-commander npx -- @wonderwhy-er/desktop-commander@latest + EOT +} + + resource "coder_agent" "dev" { arch = "amd64" os = "linux" @@ -379,6 +497,11 @@ resource "coder_agent" "dev" { } startup_script_behavior = "blocking" + display_apps { + vscode = contains(jsondecode(data.coder_parameter.ide_choices.value), "vscode") && try(data.coder_parameter.vscode_channel[0].value, "stable") == "stable" + vscode_insiders = contains(jsondecode(data.coder_parameter.ide_choices.value), "vscode") && try(data.coder_parameter.vscode_channel[0].value, "stable") == "insiders" + } + # The following metadata blocks are optional. They are used to display # information about your workspace in the dashboard. You can remove them # if you don't want to display any information. @@ -700,4 +823,130 @@ resource "coder_metadata" "container_info" { key = "region" value = data.coder_parameter.region.option[index(data.coder_parameter.region.option.*.value, data.coder_parameter.region.value)].name } + item { + key = "ai_task" + value = local.has_ai_prompt ? "yes" : "no" + } +} + +resource "coder_env" "claude_system_prompt" { + count = local.has_ai_prompt ? data.coder_workspace.me.start_count : 0 + agent_id = coder_agent.dev.id + name = "CODER_MCP_CLAUDE_SYSTEM_PROMPT" + value = <<-EOT + + -- Framing -- + You are a helpful Coding assistant. Aim to autonomously investigate + and solve issues the user gives you and test your work, whenever possible. + + Avoid shortcuts like mocking tests. When you get stuck, you can ask the user + but opt for autonomy. + + -- Tool Selection -- + - coder_report_task: providing status updates or requesting user input. + - playwright: previewing your changes after you made them + to confirm it worked as expected + - desktop-commander - use only for commands that keep running + (servers, dev watchers, GUI apps). + - Built-in tools - use for everything else: + (file operations, git commands, builds & installs, one-off shell commands) + + Remember this decision rule: + - Stays running? → desktop-commander + - Finishes immediately? → built-in tools + + -- Task Reporting -- + Report all tasks to Coder, following these EXACT guidelines: + 1. Be granular. If you are investigating with multiple steps, report each step + to coder. + 2. IMMEDIATELY report status after receiving ANY user message + 3. Use "state": "working" when actively processing WITHOUT needing + additional user input + 4. Use "state": "complete" only when finished with a task + 5. Use "state": "failure" when you need ANY user input, lack sufficient + details, or encounter blockers + + In your summary: + - Be specific about what you're doing + - Clearly indicate what information you need from the user when in + "failure" state + - Keep it under 160 characters + - Make it actionable + + -- Context -- + There is an existing application in the current directory. + Be sure to read CLAUDE.md before making any changes. + + This is a real-world production application. As such, make sure to think carefully, use TODO lists, and plan carefully before making changes. + + EOT +} + +resource "coder_env" "claude_task_prompt" { + count = local.has_ai_prompt ? data.coder_workspace.me.start_count : 0 + agent_id = coder_agent.dev.id + name = "CODER_MCP_CLAUDE_TASK_PROMPT" + value = data.coder_parameter.ai_prompt.value +} + +# coder exp mcp configure claude-code reads from CLAUDE_API_KEY +resource "coder_env" "claude_api_key" { + count = local.has_ai_prompt ? data.coder_workspace.me.start_count : 0 + agent_id = coder_agent.dev.id + name = "CLAUDE_API_KEY" + value = var.anthropic_api_key +} + +resource "coder_app" "develop_sh" { + count = local.has_ai_prompt ? data.coder_workspace.me.start_count : 0 + agent_id = coder_agent.dev.id + slug = "develop-sh" + display_name = "develop.sh" + icon = "${data.coder_workspace.me.access_url}/emojis/1f4bb.png" // 💻 + command = "screen -x develop_sh" + share = "authenticated" + subdomain = true + open_in = "tab" + order = 0 +} + +resource "coder_script" "develop_sh" { + count = local.has_ai_prompt ? data.coder_workspace.me.start_count : 0 + display_name = "develop.sh" + agent_id = coder_agent.dev.id + run_on_start = true + start_blocks_login = false + icon = "${data.coder_workspace.me.access_url}/emojis/1f4bb.png" // 💻 + script = <<-EOT + #!/usr/bin/env bash + set -eux -o pipefail + + # Wait for the agent startup script to finish. + for attempt in {1..60}; do + if [[ -f /tmp/.coder-startup-script.done ]]; then + break + fi + echo "Waiting for agent startup script to finish... ($attempt/60)" + sleep 10 + done + cd "${local.repo_dir}" && screen -dmS develop_sh /bin/sh -c 'while true; do ./scripts/develop.sh --; echo "develop.sh exited with code $? restarting in 30s"; sleep 30; done' + EOT +} + +resource "coder_app" "preview" { + count = local.has_ai_prompt ? data.coder_workspace.me.start_count : 0 + agent_id = coder_agent.dev.id + slug = "preview" + display_name = "Preview" + icon = "${data.coder_workspace.me.access_url}/emojis/1f50e.png" // 🔎 + url = "http://localhost:8080" + share = "authenticated" + subdomain = true + open_in = "tab" + order = 1 + healthcheck { + url = "http://localhost:8080/healthz" + interval = 5 + threshold = 15 + } } diff --git a/dogfood/main.tf b/dogfood/main.tf index 72cd868f61645..c79e950efadf4 100644 --- a/dogfood/main.tf +++ b/dogfood/main.tf @@ -33,6 +33,13 @@ variable "CODER_TEMPLATE_MESSAGE" { type = string } +variable "CODER_DOGFOOD_ANTHROPIC_API_KEY" { + type = string + description = "The API key that workspaces will use to authenticate with the Anthropic API." + default = "" + sensitive = true +} + resource "coderd_template" "dogfood" { name = var.CODER_TEMPLATE_NAME display_name = "Write Coder on Coder" @@ -45,6 +52,12 @@ resource "coderd_template" "dogfood" { message = var.CODER_TEMPLATE_MESSAGE directory = var.CODER_TEMPLATE_DIR active = true + tf_vars = [ + { + name = "anthropic_api_key" + value = var.CODER_DOGFOOD_ANTHROPIC_API_KEY + } + ] } ] acl = { diff --git a/enterprise/audit/table.go b/enterprise/audit/table.go index 6c1f907abfa00..0519efd72f31b 100644 --- a/enterprise/audit/table.go +++ b/enterprise/audit/table.go @@ -115,6 +115,7 @@ var auditableResourcesTypes = map[any]map[string]Action{ "max_port_sharing_level": ActionTrack, "activity_bump": ActionTrack, "use_classic_parameter_flow": ActionTrack, + "cors_behavior": ActionTrack, }, &database.TemplateVersion{}: { "id": ActionTrack, @@ -134,6 +135,7 @@ var auditableResourcesTypes = map[any]map[string]Action{ "archived": ActionTrack, "source_example_id": ActionIgnore, // Never changes. "has_ai_task": ActionIgnore, // Never changes. + "has_external_agent": ActionIgnore, // Never changes. }, &database.User{}: { "id": ActionTrack, @@ -172,6 +174,8 @@ var auditableResourcesTypes = map[any]map[string]Action{ "automatic_updates": ActionTrack, "favorite": ActionTrack, "next_start_at": ActionTrack, + "group_acl": ActionTrack, + "user_acl": ActionTrack, }, &database.WorkspaceBuild{}: { "id": ActionIgnore, @@ -194,6 +198,7 @@ var auditableResourcesTypes = map[any]map[string]Action{ "template_version_preset_id": ActionIgnore, // Never changes. "has_ai_task": ActionIgnore, // Never changes. "ai_task_sidebar_app_id": ActionIgnore, // Never changes. + "has_external_agent": ActionIgnore, // Never changes. }, &database.AuditableGroup{}: { "id": ActionTrack, diff --git a/enterprise/cli/create_test.go b/enterprise/cli/create_test.go index 040768473c55d..44218abb5a58d 100644 --- a/enterprise/cli/create_test.go +++ b/enterprise/cli/create_test.go @@ -2,14 +2,33 @@ package cli_test import ( "context" + "database/sql" "fmt" "sync" + "sync/atomic" "testing" + "time" + + "github.com/coder/coder/v2/cli" + + "github.com/coder/coder/v2/coderd/wsbuilder" "github.com/google/uuid" + "github.com/prometheus/client_golang/prometheus" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" + "github.com/coder/coder/v2/coderd/database" + "github.com/coder/coder/v2/coderd/database/dbtestutil" + "github.com/coder/coder/v2/coderd/files" + "github.com/coder/coder/v2/coderd/notifications" + agplprebuilds "github.com/coder/coder/v2/coderd/prebuilds" + "github.com/coder/coder/v2/enterprise/coderd/prebuilds" + "github.com/coder/coder/v2/provisioner/echo" + "github.com/coder/coder/v2/provisionersdk/proto" + "github.com/coder/coder/v2/testutil" + "github.com/coder/quartz" + "github.com/coder/coder/v2/cli/clitest" "github.com/coder/coder/v2/coderd/coderdtest" "github.com/coder/coder/v2/coderd/rbac" @@ -202,3 +221,375 @@ func TestEnterpriseCreate(t *testing.T) { require.ErrorContains(t, err, fmt.Sprintf("--org=%q", "coder")) }) } + +func TestEnterpriseCreateWithPreset(t *testing.T) { + t.Parallel() + + const ( + firstParameterName = "first_parameter" + firstParameterDisplayName = "First Parameter" + firstParameterDescription = "This is the first parameter" + firstParameterValue = "1" + + firstOptionalParameterName = "first_optional_parameter" + firstOptionParameterDescription = "This is the first optional parameter" + firstOptionalParameterValue = "1" + secondOptionalParameterName = "second_optional_parameter" + secondOptionalParameterDescription = "This is the second optional parameter" + secondOptionalParameterValue = "2" + + thirdParameterName = "third_parameter" + thirdParameterDescription = "This is the third parameter" + thirdParameterValue = "3" + ) + + echoResponses := func(presets ...*proto.Preset) *echo.Responses { + return prepareEchoResponses([]*proto.RichParameter{ + { + Name: firstParameterName, + DisplayName: firstParameterDisplayName, + Description: firstParameterDescription, + Mutable: true, + DefaultValue: firstParameterValue, + Options: []*proto.RichParameterOption{ + { + Name: firstOptionalParameterName, + Description: firstOptionParameterDescription, + Value: firstOptionalParameterValue, + }, + { + Name: secondOptionalParameterName, + Description: secondOptionalParameterDescription, + Value: secondOptionalParameterValue, + }, + }, + }, + { + Name: thirdParameterName, + Description: thirdParameterDescription, + DefaultValue: thirdParameterValue, + Mutable: true, + }, + }, presets...) + } + + runReconciliationLoop := func( + t *testing.T, + ctx context.Context, + db database.Store, + reconciler *prebuilds.StoreReconciler, + presets []codersdk.Preset, + ) { + t.Helper() + + state, err := reconciler.SnapshotState(ctx, db) + require.NoError(t, err) + require.Len(t, presets, 1) + ps, err := state.FilterByPreset(presets[0].ID) + require.NoError(t, err) + require.NotNil(t, ps) + actions, err := reconciler.CalculateActions(ctx, *ps) + require.NoError(t, err) + require.NotNil(t, actions) + require.NoError(t, reconciler.ReconcilePreset(ctx, *ps)) + } + + getRunningPrebuilds := func( + t *testing.T, + ctx context.Context, + db database.Store, + prebuildInstances int, + ) []database.GetRunningPrebuiltWorkspacesRow { + t.Helper() + + var runningPrebuilds []database.GetRunningPrebuiltWorkspacesRow + testutil.Eventually(ctx, t, func(context.Context) bool { + runningPrebuilds = nil + rows, err := db.GetRunningPrebuiltWorkspaces(ctx) + if err != nil { + return false + } + + for _, row := range rows { + runningPrebuilds = append(runningPrebuilds, row) + + agents, err := db.GetWorkspaceAgentsInLatestBuildByWorkspaceID(ctx, row.ID) + if err != nil || len(agents) == 0 { + return false + } + + for _, agent := range agents { + err = db.UpdateWorkspaceAgentLifecycleStateByID(ctx, database.UpdateWorkspaceAgentLifecycleStateByIDParams{ + ID: agent.ID, + LifecycleState: database.WorkspaceAgentLifecycleStateReady, + StartedAt: sql.NullTime{Time: time.Now().Add(time.Hour), Valid: true}, + ReadyAt: sql.NullTime{Time: time.Now().Add(-1 * time.Hour), Valid: true}, + }) + if err != nil { + return false + } + } + } + + t.Logf("found %d running prebuilds so far, want %d", len(runningPrebuilds), prebuildInstances) + return len(runningPrebuilds) == prebuildInstances + }, testutil.IntervalSlow, "prebuilds not running") + + return runningPrebuilds + } + + // This test verifies that when the selected preset has running prebuilds, + // one of those prebuilds is claimed for the user upon workspace creation. + t.Run("PresetFlagClaimsPrebuiltWorkspace", func(t *testing.T) { + t.Parallel() + + // Setup + ctx := testutil.Context(t, testutil.WaitSuperLong) + db, pb := dbtestutil.NewDB(t, dbtestutil.WithDumpOnFailure()) + client, _, api, owner := coderdenttest.NewWithAPI(t, &coderdenttest.Options{ + Options: &coderdtest.Options{ + Database: db, + Pubsub: pb, + IncludeProvisionerDaemon: true, + }, + }) + + // Setup Prebuild reconciler + cache := files.New(prometheus.NewRegistry(), &coderdtest.FakeAuthorizer{}) + newNoopUsageCheckerPtr := func() *atomic.Pointer[wsbuilder.UsageChecker] { + var noopUsageChecker wsbuilder.UsageChecker = wsbuilder.NoopUsageChecker{} + buildUsageChecker := atomic.Pointer[wsbuilder.UsageChecker]{} + buildUsageChecker.Store(&noopUsageChecker) + return &buildUsageChecker + } + reconciler := prebuilds.NewStoreReconciler( + db, pb, cache, + codersdk.PrebuildsConfig{}, + testutil.Logger(t), + quartz.NewMock(t), + prometheus.NewRegistry(), + notifications.NewNoopEnqueuer(), + newNoopUsageCheckerPtr(), + ) + var claimer agplprebuilds.Claimer = prebuilds.NewEnterpriseClaimer(db) + api.AGPL.PrebuildsClaimer.Store(&claimer) + + // Given: a template and a template version where the preset defines values for all required parameters, + // and is configured to have 1 prebuild instance + prebuildInstances := int32(1) + preset := proto.Preset{ + Name: "preset-test", + Parameters: []*proto.PresetParameter{ + {Name: firstParameterName, Value: secondOptionalParameterValue}, + {Name: thirdParameterName, Value: thirdParameterValue}, + }, + Prebuild: &proto.Prebuild{ + Instances: prebuildInstances, + }, + } + member, _ := coderdtest.CreateAnotherUser(t, client, owner.OrganizationID) + version := coderdtest.CreateTemplateVersion(t, client, owner.OrganizationID, echoResponses(&preset)) + coderdtest.AwaitTemplateVersionJobCompleted(t, client, version.ID) + template := coderdtest.CreateTemplate(t, client, owner.OrganizationID, version.ID) + presets, err := client.TemplateVersionPresets(ctx, version.ID) + require.NoError(t, err) + require.Len(t, presets, 1) + require.Equal(t, preset.Name, presets[0].Name) + + // Given: Reconciliation loop runs and starts prebuilt workspaces + runReconciliationLoop(t, ctx, db, reconciler, presets) + runningPrebuilds := getRunningPrebuilds(t, ctx, db, int(prebuildInstances)) + require.Len(t, runningPrebuilds, int(prebuildInstances)) + require.Equal(t, presets[0].ID, runningPrebuilds[0].CurrentPresetID.UUID) + + // Given: a running prebuilt workspace, ready to be claimed + prebuild := coderdtest.MustWorkspace(t, client, runningPrebuilds[0].ID) + require.Equal(t, codersdk.WorkspaceTransitionStart, prebuild.LatestBuild.Transition) + require.Equal(t, template.ID, prebuild.TemplateID) + require.Equal(t, version.ID, prebuild.TemplateActiveVersionID) + require.Equal(t, presets[0].ID, *prebuild.LatestBuild.TemplateVersionPresetID) + + // When: running the create command with the specified preset + workspaceName := "my-workspace" + inv, root := clitest.New(t, "create", workspaceName, "--template", template.Name, "-y", "--preset", preset.Name) + clitest.SetupConfig(t, member, root) + pty := ptytest.New(t).Attach(inv) + inv.Stdout = pty.Output() + inv.Stderr = pty.Output() + err = inv.Run() + require.NoError(t, err) + + // Should: display the selected preset as well as its parameters + presetName := fmt.Sprintf("Preset '%s' applied:", preset.Name) + pty.ExpectMatch(presetName) + pty.ExpectMatch(fmt.Sprintf("%s: '%s'", firstParameterName, secondOptionalParameterValue)) + pty.ExpectMatch(fmt.Sprintf("%s: '%s'", thirdParameterName, thirdParameterValue)) + + // Verify if the new workspace uses expected parameters. + ctx, cancel := context.WithTimeout(context.Background(), testutil.WaitShort) + defer cancel() + + // Should: create the user's workspace by claiming the existing prebuilt workspace + workspaces, err := client.Workspaces(ctx, codersdk.WorkspaceFilter{ + Name: workspaceName, + }) + require.NoError(t, err) + require.Len(t, workspaces.Workspaces, 1) + require.Equal(t, prebuild.ID, workspaces.Workspaces[0].ID) + + // Should: create a workspace using the expected template version and the preset-defined parameters + workspaceLatestBuild := workspaces.Workspaces[0].LatestBuild + require.Equal(t, version.ID, workspaceLatestBuild.TemplateVersionID) + require.Equal(t, presets[0].ID, *workspaceLatestBuild.TemplateVersionPresetID) + buildParameters, err := client.WorkspaceBuildParameters(ctx, workspaceLatestBuild.ID) + require.NoError(t, err) + require.Len(t, buildParameters, 2) + require.Contains(t, buildParameters, codersdk.WorkspaceBuildParameter{Name: firstParameterName, Value: secondOptionalParameterValue}) + require.Contains(t, buildParameters, codersdk.WorkspaceBuildParameter{Name: thirdParameterName, Value: thirdParameterValue}) + }) + + // This test verifies that when the user provides `--preset None`, + // no preset is applied, no prebuilt workspace is claimed, and + // a new regular workspace is created instead. + t.Run("PresetNoneDoesNotClaimPrebuiltWorkspace", func(t *testing.T) { + t.Parallel() + + // Setup + ctx := testutil.Context(t, testutil.WaitSuperLong) + db, pb := dbtestutil.NewDB(t, dbtestutil.WithDumpOnFailure()) + client, _, api, owner := coderdenttest.NewWithAPI(t, &coderdenttest.Options{ + Options: &coderdtest.Options{ + Database: db, + Pubsub: pb, + IncludeProvisionerDaemon: true, + }, + }) + + // Setup Prebuild reconciler + cache := files.New(prometheus.NewRegistry(), &coderdtest.FakeAuthorizer{}) + newNoopUsageCheckerPtr := func() *atomic.Pointer[wsbuilder.UsageChecker] { + var noopUsageChecker wsbuilder.UsageChecker = wsbuilder.NoopUsageChecker{} + buildUsageChecker := atomic.Pointer[wsbuilder.UsageChecker]{} + buildUsageChecker.Store(&noopUsageChecker) + return &buildUsageChecker + } + reconciler := prebuilds.NewStoreReconciler( + db, pb, cache, + codersdk.PrebuildsConfig{}, + testutil.Logger(t), + quartz.NewMock(t), + prometheus.NewRegistry(), + notifications.NewNoopEnqueuer(), + newNoopUsageCheckerPtr(), + ) + var claimer agplprebuilds.Claimer = prebuilds.NewEnterpriseClaimer(db) + api.AGPL.PrebuildsClaimer.Store(&claimer) + + // Given: a template and a template version where the preset defines values for all required parameters, + // and is configured to have 1 prebuild instance + prebuildInstances := int32(1) + presetWithPrebuild := proto.Preset{ + Name: "preset-test", + Parameters: []*proto.PresetParameter{ + {Name: firstParameterName, Value: secondOptionalParameterValue}, + {Name: thirdParameterName, Value: thirdParameterValue}, + }, + Prebuild: &proto.Prebuild{ + Instances: prebuildInstances, + }, + } + member, _ := coderdtest.CreateAnotherUser(t, client, owner.OrganizationID) + version := coderdtest.CreateTemplateVersion(t, client, owner.OrganizationID, echoResponses(&presetWithPrebuild)) + coderdtest.AwaitTemplateVersionJobCompleted(t, client, version.ID) + template := coderdtest.CreateTemplate(t, client, owner.OrganizationID, version.ID) + presets, err := client.TemplateVersionPresets(ctx, version.ID) + require.NoError(t, err) + require.Len(t, presets, 1) + + // Given: Reconciliation loop runs and starts prebuilt workspaces + runReconciliationLoop(t, ctx, db, reconciler, presets) + runningPrebuilds := getRunningPrebuilds(t, ctx, db, int(prebuildInstances)) + require.Len(t, runningPrebuilds, int(prebuildInstances)) + require.Equal(t, presets[0].ID, runningPrebuilds[0].CurrentPresetID.UUID) + + // Given: a running prebuilt workspace, ready to be claimed + prebuild := coderdtest.MustWorkspace(t, client, runningPrebuilds[0].ID) + require.Equal(t, codersdk.WorkspaceTransitionStart, prebuild.LatestBuild.Transition) + require.Equal(t, template.ID, prebuild.TemplateID) + require.Equal(t, version.ID, prebuild.TemplateActiveVersionID) + require.Equal(t, presets[0].ID, *prebuild.LatestBuild.TemplateVersionPresetID) + + // When: running the create command without a preset flag + workspaceName := "my-workspace" + inv, root := clitest.New(t, "create", workspaceName, "--template", template.Name, "-y", + "--preset", cli.PresetNone, + "--parameter", fmt.Sprintf("%s=%s", firstParameterName, firstParameterValue), + "--parameter", fmt.Sprintf("%s=%s", thirdParameterName, thirdParameterValue)) + clitest.SetupConfig(t, member, root) + pty := ptytest.New(t).Attach(inv) + inv.Stdout = pty.Output() + inv.Stderr = pty.Output() + err = inv.Run() + require.NoError(t, err) + pty.ExpectMatch("No preset applied.") + + // Verify if the new workspace uses expected parameters. + ctx, cancel := context.WithTimeout(context.Background(), testutil.WaitShort) + defer cancel() + + // Should: create a new user's workspace without claiming the existing prebuilt workspace + workspaces, err := client.Workspaces(ctx, codersdk.WorkspaceFilter{ + Name: workspaceName, + }) + require.NoError(t, err) + require.Len(t, workspaces.Workspaces, 1) + require.NotEqual(t, prebuild.ID, workspaces.Workspaces[0].ID) + + // Should: create a workspace using the expected template version and the specified parameters + workspaceLatestBuild := workspaces.Workspaces[0].LatestBuild + require.Equal(t, version.ID, workspaceLatestBuild.TemplateVersionID) + require.Nil(t, workspaceLatestBuild.TemplateVersionPresetID) + buildParameters, err := client.WorkspaceBuildParameters(ctx, workspaceLatestBuild.ID) + require.NoError(t, err) + require.Len(t, buildParameters, 2) + require.Contains(t, buildParameters, codersdk.WorkspaceBuildParameter{Name: firstParameterName, Value: firstParameterValue}) + require.Contains(t, buildParameters, codersdk.WorkspaceBuildParameter{Name: thirdParameterName, Value: thirdParameterValue}) + }) +} + +func prepareEchoResponses(parameters []*proto.RichParameter, presets ...*proto.Preset) *echo.Responses { + return &echo.Responses{ + Parse: echo.ParseComplete, + ProvisionPlan: []*proto.Response{ + { + Type: &proto.Response_Plan{ + Plan: &proto.PlanComplete{ + Parameters: parameters, + Presets: presets, + }, + }, + }, + }, + ProvisionApply: []*proto.Response{ + { + Type: &proto.Response_Apply{ + Apply: &proto.ApplyComplete{ + Resources: []*proto.Resource{ + { + Type: "compute", + Name: "main", + Agents: []*proto.Agent{ + { + Name: "smith", + OperatingSystem: "linux", + Architecture: "i386", + }, + }, + }, + }, + }, + }, + }, + }, + } +} diff --git a/enterprise/cli/externalworkspaces.go b/enterprise/cli/externalworkspaces.go new file mode 100644 index 0000000000000..081cbb765e170 --- /dev/null +++ b/enterprise/cli/externalworkspaces.go @@ -0,0 +1,261 @@ +package cli + +import ( + "context" + "fmt" + "strings" + + "github.com/google/uuid" + "golang.org/x/xerrors" + + agpl "github.com/coder/coder/v2/cli" + "github.com/coder/coder/v2/cli/cliui" + "github.com/coder/coder/v2/codersdk" + "github.com/coder/pretty" + "github.com/coder/serpent" +) + +type externalAgent struct { + WorkspaceName string `json:"workspace_name"` + AgentName string `json:"agent_name"` + AuthType string `json:"auth_type"` + AuthToken string `json:"auth_token"` + InitScript string `json:"init_script"` +} + +func (r *RootCmd) externalWorkspaces() *serpent.Command { + orgContext := agpl.NewOrganizationContext() + + cmd := &serpent.Command{ + Use: "external-workspaces [subcommand]", + Short: "Create or manage external workspaces", + Handler: func(inv *serpent.Invocation) error { + return inv.Command.HelpHandler(inv) + }, + Children: []*serpent.Command{ + r.externalWorkspaceCreate(), + r.externalWorkspaceAgentInstructions(), + r.externalWorkspaceList(), + }, + } + + orgContext.AttachOptions(cmd) + return cmd +} + +// externalWorkspaceCreate extends `coder create` to create an external workspace. +func (r *RootCmd) externalWorkspaceCreate() *serpent.Command { + opts := agpl.CreateOptions{ + BeforeCreate: func(ctx context.Context, client *codersdk.Client, _ codersdk.Template, templateVersionID uuid.UUID) error { + version, err := client.TemplateVersion(ctx, templateVersionID) + if err != nil { + return xerrors.Errorf("get template version: %w", err) + } + if !version.HasExternalAgent { + return xerrors.Errorf("template version %q does not have an external agent. Only templates with external agents can be used for external workspace creation", templateVersionID) + } + + return nil + }, + AfterCreate: func(ctx context.Context, inv *serpent.Invocation, client *codersdk.Client, workspace codersdk.Workspace) error { + workspace, err := client.WorkspaceByOwnerAndName(ctx, codersdk.Me, workspace.Name, codersdk.WorkspaceOptions{}) + if err != nil { + return xerrors.Errorf("get workspace by name: %w", err) + } + + externalAgents, err := fetchExternalAgents(inv, client, workspace, workspace.LatestBuild.Resources) + if err != nil { + return xerrors.Errorf("fetch external agents: %w", err) + } + + formatted := formatExternalAgent(workspace.Name, externalAgents) + _, err = fmt.Fprintln(inv.Stdout, formatted) + return err + }, + } + + cmd := r.Create(opts) + cmd.Use = "create [workspace]" + cmd.Short = "Create a new external workspace" + cmd.Middleware = serpent.Chain( + cmd.Middleware, + serpent.RequireNArgs(1), + ) + + for i := range cmd.Options { + if cmd.Options[i].Flag == "template" { + cmd.Options[i].Required = true + } + } + + return cmd +} + +// externalWorkspaceAgentInstructions prints the instructions for an external agent. +func (r *RootCmd) externalWorkspaceAgentInstructions() *serpent.Command { + client := new(codersdk.Client) + formatter := cliui.NewOutputFormatter( + cliui.ChangeFormatterData(cliui.TextFormat(), func(data any) (any, error) { + agent, ok := data.(externalAgent) + if !ok { + return "", xerrors.Errorf("expected externalAgent, got %T", data) + } + + return formatExternalAgent(agent.WorkspaceName, []externalAgent{agent}), nil + }), + cliui.JSONFormat(), + ) + + cmd := &serpent.Command{ + Use: "agent-instructions [user/]workspace[.agent]", + Short: "Get the instructions for an external agent", + Middleware: serpent.Chain(r.InitClient(client), serpent.RequireNArgs(1)), + Handler: func(inv *serpent.Invocation) error { + workspace, workspaceAgent, _, err := agpl.GetWorkspaceAndAgent(inv.Context(), inv, client, false, inv.Args[0]) + if err != nil { + return xerrors.Errorf("find workspace and agent: %w", err) + } + + credentials, err := client.WorkspaceExternalAgentCredentials(inv.Context(), workspace.ID, workspaceAgent.Name) + if err != nil { + return xerrors.Errorf("get external agent token for agent %q: %w", workspaceAgent.Name, err) + } + + agentInfo := externalAgent{ + WorkspaceName: workspace.Name, + AgentName: workspaceAgent.Name, + AuthType: "token", + AuthToken: credentials.AgentToken, + InitScript: credentials.Command, + } + + out, err := formatter.Format(inv.Context(), agentInfo) + if err != nil { + return err + } + + _, err = fmt.Fprintln(inv.Stdout, out) + return err + }, + } + + formatter.AttachOptions(&cmd.Options) + return cmd +} + +func (r *RootCmd) externalWorkspaceList() *serpent.Command { + var ( + filter cliui.WorkspaceFilter + formatter = cliui.NewOutputFormatter( + cliui.TableFormat( + []agpl.WorkspaceListRow{}, + []string{ + "workspace", + "template", + "status", + "healthy", + "last built", + "current version", + "outdated", + }, + ), + cliui.JSONFormat(), + ) + ) + client := new(codersdk.Client) + cmd := &serpent.Command{ + Annotations: map[string]string{ + "workspaces": "", + }, + Use: "list", + Short: "List external workspaces", + Aliases: []string{"ls"}, + Middleware: serpent.Chain( + serpent.RequireNArgs(0), + r.InitClient(client), + ), + Handler: func(inv *serpent.Invocation) error { + baseFilter := filter.Filter() + + if baseFilter.FilterQuery == "" { + baseFilter.FilterQuery = "has_external_agent:true" + } else { + baseFilter.FilterQuery += " has_external_agent:true" + } + + res, err := agpl.QueryConvertWorkspaces(inv.Context(), client, baseFilter, agpl.WorkspaceListRowFromWorkspace) + if err != nil { + return err + } + + if len(res) == 0 && formatter.FormatID() != cliui.JSONFormat().ID() { + pretty.Fprintf(inv.Stderr, cliui.DefaultStyles.Prompt, "No workspaces found! Create one:\n") + _, _ = fmt.Fprintln(inv.Stderr) + _, _ = fmt.Fprintln(inv.Stderr, " "+pretty.Sprint(cliui.DefaultStyles.Code, "coder external-workspaces create ")) + _, _ = fmt.Fprintln(inv.Stderr) + return nil + } + + out, err := formatter.Format(inv.Context(), res) + if err != nil { + return err + } + + _, err = fmt.Fprintln(inv.Stdout, out) + return err + }, + } + filter.AttachOptions(&cmd.Options) + formatter.AttachOptions(&cmd.Options) + return cmd +} + +// fetchExternalAgents fetches the external agents for a workspace. +func fetchExternalAgents(inv *serpent.Invocation, client *codersdk.Client, workspace codersdk.Workspace, resources []codersdk.WorkspaceResource) ([]externalAgent, error) { + if len(resources) == 0 { + return nil, xerrors.Errorf("no resources found for workspace") + } + + var externalAgents []externalAgent + + for _, resource := range resources { + if resource.Type != "coder_external_agent" || len(resource.Agents) == 0 { + continue + } + + agent := resource.Agents[0] + credentials, err := client.WorkspaceExternalAgentCredentials(inv.Context(), workspace.ID, agent.Name) + if err != nil { + return nil, xerrors.Errorf("get external agent token for agent %q: %w", agent.Name, err) + } + + externalAgents = append(externalAgents, externalAgent{ + AgentName: agent.Name, + AuthType: "token", + AuthToken: credentials.AgentToken, + InitScript: credentials.Command, + }) + } + + return externalAgents, nil +} + +// formatExternalAgent formats the instructions for an external agent. +func formatExternalAgent(workspaceName string, externalAgents []externalAgent) string { + var output strings.Builder + _, _ = output.WriteString(fmt.Sprintf("\nPlease run the following command to attach external agent to the workspace %s:\n\n", cliui.Keyword(workspaceName))) + + for i, agent := range externalAgents { + if len(externalAgents) > 1 { + _, _ = output.WriteString(fmt.Sprintf("For agent %s:\n", cliui.Keyword(agent.AgentName))) + } + + _, _ = output.WriteString(fmt.Sprintf("%s\n", pretty.Sprint(cliui.DefaultStyles.Code, agent.InitScript))) + + if i < len(externalAgents)-1 { + _, _ = output.WriteString("\n") + } + } + + return output.String() +} diff --git a/enterprise/cli/externalworkspaces_test.go b/enterprise/cli/externalworkspaces_test.go new file mode 100644 index 0000000000000..9ce39c7c28afb --- /dev/null +++ b/enterprise/cli/externalworkspaces_test.go @@ -0,0 +1,560 @@ +package cli_test + +import ( + "bytes" + "context" + "encoding/json" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/coder/coder/v2/cli/clitest" + "github.com/coder/coder/v2/coderd/coderdtest" + "github.com/coder/coder/v2/codersdk" + "github.com/coder/coder/v2/enterprise/coderd/coderdenttest" + "github.com/coder/coder/v2/enterprise/coderd/license" + "github.com/coder/coder/v2/provisioner/echo" + "github.com/coder/coder/v2/provisionersdk/proto" + "github.com/coder/coder/v2/pty/ptytest" + "github.com/coder/coder/v2/testutil" +) + +// completeWithExternalAgent creates a template version with an external agent resource +func completeWithExternalAgent() *echo.Responses { + return &echo.Responses{ + Parse: echo.ParseComplete, + ProvisionPlan: []*proto.Response{ + { + Type: &proto.Response_Plan{ + Plan: &proto.PlanComplete{ + Resources: []*proto.Resource{ + { + Type: "coder_external_agent", + Name: "main", + Agents: []*proto.Agent{ + { + Name: "external-agent", + OperatingSystem: "linux", + Architecture: "amd64", + }, + }, + }, + }, + HasExternalAgents: true, + }, + }, + }, + }, + ProvisionApply: []*proto.Response{ + { + Type: &proto.Response_Apply{ + Apply: &proto.ApplyComplete{ + Resources: []*proto.Resource{ + { + Type: "coder_external_agent", + Name: "main", + Agents: []*proto.Agent{ + { + Name: "external-agent", + OperatingSystem: "linux", + Architecture: "amd64", + }, + }, + }, + }, + }, + }, + }, + }, + } +} + +// completeWithRegularAgent creates a template version with a regular agent (no external agent) +func completeWithRegularAgent() *echo.Responses { + return &echo.Responses{ + Parse: echo.ParseComplete, + ProvisionPlan: []*proto.Response{ + { + Type: &proto.Response_Plan{ + Plan: &proto.PlanComplete{ + Resources: []*proto.Resource{ + { + Type: "compute", + Name: "main", + Agents: []*proto.Agent{ + { + Name: "regular-agent", + OperatingSystem: "linux", + Architecture: "amd64", + }, + }, + }, + }, + }, + }, + }, + }, + ProvisionApply: []*proto.Response{ + { + Type: &proto.Response_Apply{ + Apply: &proto.ApplyComplete{ + Resources: []*proto.Resource{ + { + Type: "compute", + Name: "main", + Agents: []*proto.Agent{ + { + Name: "regular-agent", + OperatingSystem: "linux", + Architecture: "amd64", + }, + }, + }, + }, + }, + }, + }, + }, + } +} + +func TestExternalWorkspaces(t *testing.T) { + t.Parallel() + + t.Run("Create", func(t *testing.T) { + t.Parallel() + client, owner := coderdenttest.New(t, &coderdenttest.Options{ + Options: &coderdtest.Options{ + IncludeProvisionerDaemon: true, + }, + LicenseOptions: &coderdenttest.LicenseOptions{ + Features: license.Features{ + codersdk.FeatureWorkspaceExternalAgent: 1, + }, + }, + }) + member, _ := coderdtest.CreateAnotherUser(t, client, owner.OrganizationID) + version := coderdtest.CreateTemplateVersion(t, client, owner.OrganizationID, completeWithExternalAgent()) + coderdtest.AwaitTemplateVersionJobCompleted(t, client, version.ID) + template := coderdtest.CreateTemplate(t, client, owner.OrganizationID, version.ID) + + args := []string{ + "external-workspaces", + "create", + "my-external-workspace", + "--template", template.Name, + } + inv, root := newCLI(t, args...) + clitest.SetupConfig(t, member, root) + doneChan := make(chan struct{}) + pty := ptytest.New(t).Attach(inv) + go func() { + defer close(doneChan) + err := inv.Run() + assert.NoError(t, err) + }() + + // Expect the workspace creation confirmation + pty.ExpectMatch("coder_external_agent.main") + pty.ExpectMatch("external-agent (linux, amd64)") + pty.ExpectMatch("Confirm create") + pty.WriteLine("yes") + + // Expect the external agent instructions + pty.ExpectMatch("Please run the following command to attach external agent") + pty.ExpectRegexMatch("curl -fsSL .* | CODER_AGENT_TOKEN=.* sh") + + ctx := testutil.Context(t, testutil.WaitLong) + testutil.TryReceive(ctx, t, doneChan) + + // Verify the workspace was created + ws, err := member.WorkspaceByOwnerAndName(context.Background(), codersdk.Me, "my-external-workspace", codersdk.WorkspaceOptions{}) + require.NoError(t, err) + assert.Equal(t, template.Name, ws.TemplateName) + }) + + t.Run("CreateWithoutTemplate", func(t *testing.T) { + t.Parallel() + client, owner := coderdenttest.New(t, &coderdenttest.Options{ + Options: &coderdtest.Options{ + IncludeProvisionerDaemon: true, + }, + LicenseOptions: &coderdenttest.LicenseOptions{ + Features: license.Features{ + codersdk.FeatureWorkspaceExternalAgent: 1, + }, + }, + }) + member, _ := coderdtest.CreateAnotherUser(t, client, owner.OrganizationID) + + args := []string{ + "external-workspaces", + "create", + "my-external-workspace", + } + inv, root := newCLI(t, args...) + clitest.SetupConfig(t, member, root) + + err := inv.Run() + require.Error(t, err) + assert.Contains(t, err.Error(), "Missing values for the required flags: template") + }) + + t.Run("CreateWithRegularTemplate", func(t *testing.T) { + t.Parallel() + client, owner := coderdenttest.New(t, &coderdenttest.Options{ + Options: &coderdtest.Options{ + IncludeProvisionerDaemon: true, + }, + LicenseOptions: &coderdenttest.LicenseOptions{ + Features: license.Features{ + codersdk.FeatureWorkspaceExternalAgent: 1, + }, + }, + }) + member, _ := coderdtest.CreateAnotherUser(t, client, owner.OrganizationID) + version := coderdtest.CreateTemplateVersion(t, client, owner.OrganizationID, completeWithRegularAgent()) + coderdtest.AwaitTemplateVersionJobCompleted(t, client, version.ID) + template := coderdtest.CreateTemplate(t, client, owner.OrganizationID, version.ID) + + args := []string{ + "external-workspaces", + "create", + "my-external-workspace", + "--template", template.Name, + } + inv, root := newCLI(t, args...) + clitest.SetupConfig(t, member, root) + + err := inv.Run() + require.Error(t, err) + assert.Contains(t, err.Error(), "does not have an external agent") + }) + + t.Run("List", func(t *testing.T) { + t.Parallel() + client, owner := coderdenttest.New(t, &coderdenttest.Options{ + Options: &coderdtest.Options{ + IncludeProvisionerDaemon: true, + }, + LicenseOptions: &coderdenttest.LicenseOptions{ + Features: license.Features{ + codersdk.FeatureWorkspaceExternalAgent: 1, + }, + }, + }) + member, _ := coderdtest.CreateAnotherUser(t, client, owner.OrganizationID) + version := coderdtest.CreateTemplateVersion(t, client, owner.OrganizationID, completeWithExternalAgent()) + coderdtest.AwaitTemplateVersionJobCompleted(t, client, version.ID) + template := coderdtest.CreateTemplate(t, client, owner.OrganizationID, version.ID) + + // Create an external workspace + ws := coderdtest.CreateWorkspace(t, member, template.ID) + coderdtest.AwaitWorkspaceBuildJobCompleted(t, client, ws.LatestBuild.ID) + + args := []string{ + "external-workspaces", + "list", + } + inv, root := newCLI(t, args...) + clitest.SetupConfig(t, member, root) + pty := ptytest.New(t).Attach(inv) + + ctx, cancelFunc := context.WithTimeout(context.Background(), testutil.WaitLong) + defer cancelFunc() + done := make(chan any) + go func() { + errC := inv.WithContext(ctx).Run() + assert.NoError(t, errC) + close(done) + }() + pty.ExpectMatch(ws.Name) + pty.ExpectMatch(template.Name) + cancelFunc() + <-done + }) + + t.Run("ListJSON", func(t *testing.T) { + t.Parallel() + client, owner := coderdenttest.New(t, &coderdenttest.Options{ + Options: &coderdtest.Options{ + IncludeProvisionerDaemon: true, + }, + LicenseOptions: &coderdenttest.LicenseOptions{ + Features: license.Features{ + codersdk.FeatureWorkspaceExternalAgent: 1, + }, + }, + }) + member, _ := coderdtest.CreateAnotherUser(t, client, owner.OrganizationID) + version := coderdtest.CreateTemplateVersion(t, client, owner.OrganizationID, completeWithExternalAgent()) + coderdtest.AwaitTemplateVersionJobCompleted(t, client, version.ID) + template := coderdtest.CreateTemplate(t, client, owner.OrganizationID, version.ID) + + // Create an external workspace + ws := coderdtest.CreateWorkspace(t, member, template.ID) + coderdtest.AwaitWorkspaceBuildJobCompleted(t, client, ws.LatestBuild.ID) + + args := []string{ + "external-workspaces", + "list", + "--output=json", + } + inv, root := newCLI(t, args...) + clitest.SetupConfig(t, member, root) + + ctx, cancelFunc := context.WithTimeout(context.Background(), testutil.WaitLong) + defer cancelFunc() + + out := bytes.NewBuffer(nil) + inv.Stdout = out + err := inv.WithContext(ctx).Run() + require.NoError(t, err) + + var workspaces []codersdk.Workspace + require.NoError(t, json.Unmarshal(out.Bytes(), &workspaces)) + require.Len(t, workspaces, 1) + assert.Equal(t, ws.Name, workspaces[0].Name) + }) + + t.Run("ListNoWorkspaces", func(t *testing.T) { + t.Parallel() + client, owner := coderdenttest.New(t, &coderdenttest.Options{ + Options: &coderdtest.Options{ + IncludeProvisionerDaemon: true, + }, + LicenseOptions: &coderdenttest.LicenseOptions{ + Features: license.Features{ + codersdk.FeatureWorkspaceExternalAgent: 1, + }, + }, + }) + member, _ := coderdtest.CreateAnotherUser(t, client, owner.OrganizationID) + + args := []string{ + "external-workspaces", + "list", + } + inv, root := newCLI(t, args...) + clitest.SetupConfig(t, member, root) + pty := ptytest.New(t).Attach(inv) + + ctx, cancelFunc := context.WithTimeout(context.Background(), testutil.WaitLong) + defer cancelFunc() + done := make(chan any) + go func() { + errC := inv.WithContext(ctx).Run() + assert.NoError(t, errC) + close(done) + }() + pty.ExpectMatch("No workspaces found!") + pty.ExpectMatch("coder external-workspaces create") + cancelFunc() + <-done + }) + + t.Run("AgentInstructions", func(t *testing.T) { + t.Parallel() + client, owner := coderdenttest.New(t, &coderdenttest.Options{ + Options: &coderdtest.Options{ + IncludeProvisionerDaemon: true, + }, + LicenseOptions: &coderdenttest.LicenseOptions{ + Features: license.Features{ + codersdk.FeatureWorkspaceExternalAgent: 1, + }, + }, + }) + member, _ := coderdtest.CreateAnotherUser(t, client, owner.OrganizationID) + version := coderdtest.CreateTemplateVersion(t, client, owner.OrganizationID, completeWithExternalAgent()) + coderdtest.AwaitTemplateVersionJobCompleted(t, client, version.ID) + template := coderdtest.CreateTemplate(t, client, owner.OrganizationID, version.ID) + + // Create an external workspace + ws := coderdtest.CreateWorkspace(t, member, template.ID) + coderdtest.AwaitWorkspaceBuildJobCompleted(t, client, ws.LatestBuild.ID) + + args := []string{ + "external-workspaces", + "agent-instructions", + ws.Name, + } + inv, root := newCLI(t, args...) + clitest.SetupConfig(t, member, root) + pty := ptytest.New(t).Attach(inv) + + ctx, cancelFunc := context.WithTimeout(context.Background(), testutil.WaitLong) + defer cancelFunc() + done := make(chan any) + go func() { + errC := inv.WithContext(ctx).Run() + assert.NoError(t, errC) + close(done) + }() + pty.ExpectMatch("Please run the following command to attach external agent to the workspace") + pty.ExpectRegexMatch("curl -fsSL .* | CODER_AGENT_TOKEN=.* sh") + cancelFunc() + + ctx = testutil.Context(t, testutil.WaitLong) + testutil.TryReceive(ctx, t, done) + }) + + t.Run("AgentInstructionsJSON", func(t *testing.T) { + t.Parallel() + client, owner := coderdenttest.New(t, &coderdenttest.Options{ + Options: &coderdtest.Options{ + IncludeProvisionerDaemon: true, + }, + LicenseOptions: &coderdenttest.LicenseOptions{ + Features: license.Features{ + codersdk.FeatureWorkspaceExternalAgent: 1, + }, + }, + }) + member, _ := coderdtest.CreateAnotherUser(t, client, owner.OrganizationID) + version := coderdtest.CreateTemplateVersion(t, client, owner.OrganizationID, completeWithExternalAgent()) + coderdtest.AwaitTemplateVersionJobCompleted(t, client, version.ID) + template := coderdtest.CreateTemplate(t, client, owner.OrganizationID, version.ID) + + // Create an external workspace + ws := coderdtest.CreateWorkspace(t, member, template.ID) + coderdtest.AwaitWorkspaceBuildJobCompleted(t, client, ws.LatestBuild.ID) + + args := []string{ + "external-workspaces", + "agent-instructions", + ws.Name, + "--output=json", + } + inv, root := newCLI(t, args...) + clitest.SetupConfig(t, member, root) + + ctx, cancelFunc := context.WithTimeout(context.Background(), testutil.WaitLong) + defer cancelFunc() + + out := bytes.NewBuffer(nil) + inv.Stdout = out + err := inv.WithContext(ctx).Run() + require.NoError(t, err) + + var agentInfo map[string]interface{} + require.NoError(t, json.Unmarshal(out.Bytes(), &agentInfo)) + assert.Equal(t, "token", agentInfo["auth_type"]) + assert.NotEmpty(t, agentInfo["auth_token"]) + assert.NotEmpty(t, agentInfo["init_script"]) + }) + + t.Run("AgentInstructionsNonExistentWorkspace", func(t *testing.T) { + t.Parallel() + client, owner := coderdenttest.New(t, &coderdenttest.Options{ + Options: &coderdtest.Options{ + IncludeProvisionerDaemon: true, + }, + LicenseOptions: &coderdenttest.LicenseOptions{ + Features: license.Features{ + codersdk.FeatureWorkspaceExternalAgent: 1, + }, + }, + }) + member, _ := coderdtest.CreateAnotherUser(t, client, owner.OrganizationID) + + args := []string{ + "external-workspaces", + "agent-instructions", + "non-existent-workspace", + } + inv, root := newCLI(t, args...) + clitest.SetupConfig(t, member, root) + + err := inv.Run() + require.Error(t, err) + assert.Contains(t, err.Error(), "Resource not found") + }) + + t.Run("AgentInstructionsNonExistentAgent", func(t *testing.T) { + t.Parallel() + client, owner := coderdenttest.New(t, &coderdenttest.Options{ + Options: &coderdtest.Options{ + IncludeProvisionerDaemon: true, + }, + LicenseOptions: &coderdenttest.LicenseOptions{ + Features: license.Features{ + codersdk.FeatureWorkspaceExternalAgent: 1, + }, + }, + }) + member, _ := coderdtest.CreateAnotherUser(t, client, owner.OrganizationID) + version := coderdtest.CreateTemplateVersion(t, client, owner.OrganizationID, completeWithExternalAgent()) + coderdtest.AwaitTemplateVersionJobCompleted(t, client, version.ID) + template := coderdtest.CreateTemplate(t, client, owner.OrganizationID, version.ID) + + // Create an external workspace + ws := coderdtest.CreateWorkspace(t, member, template.ID) + coderdtest.AwaitWorkspaceBuildJobCompleted(t, client, ws.LatestBuild.ID) + + args := []string{ + "external-workspaces", + "agent-instructions", + ws.Name + ".non-existent-agent", + } + inv, root := newCLI(t, args...) + clitest.SetupConfig(t, member, root) + + err := inv.Run() + require.Error(t, err) + assert.Contains(t, err.Error(), "agent not found by name") + }) + + t.Run("CreateWithTemplateVersion", func(t *testing.T) { + t.Parallel() + client, owner := coderdenttest.New(t, &coderdenttest.Options{ + Options: &coderdtest.Options{ + IncludeProvisionerDaemon: true, + }, + LicenseOptions: &coderdenttest.LicenseOptions{ + Features: license.Features{ + codersdk.FeatureWorkspaceExternalAgent: 1, + }, + }, + }) + member, _ := coderdtest.CreateAnotherUser(t, client, owner.OrganizationID) + version := coderdtest.CreateTemplateVersion(t, client, owner.OrganizationID, completeWithExternalAgent()) + coderdtest.AwaitTemplateVersionJobCompleted(t, client, version.ID) + template := coderdtest.CreateTemplate(t, client, owner.OrganizationID, version.ID) + + args := []string{ + "external-workspaces", + "create", + "my-external-workspace", + "--template", template.Name, + "--template-version", version.Name, + "-y", + } + inv, root := newCLI(t, args...) + clitest.SetupConfig(t, member, root) + doneChan := make(chan struct{}) + pty := ptytest.New(t).Attach(inv) + go func() { + defer close(doneChan) + err := inv.Run() + assert.NoError(t, err) + }() + + // Expect the workspace creation confirmation + pty.ExpectMatch("coder_external_agent.main") + pty.ExpectMatch("external-agent (linux, amd64)") + + // Expect the external agent instructions + pty.ExpectMatch("Please run the following command to attach external agent") + pty.ExpectRegexMatch("curl -fsSL .* | CODER_AGENT_TOKEN=.* sh") + + ctx := testutil.Context(t, testutil.WaitLong) + testutil.TryReceive(ctx, t, doneChan) + + // Verify the workspace was created + ws, err := member.WorkspaceByOwnerAndName(context.Background(), codersdk.Me, "my-external-workspace", codersdk.WorkspaceOptions{}) + require.NoError(t, err) + assert.Equal(t, template.Name, ws.TemplateName) + }) +} diff --git a/enterprise/cli/prebuilds_test.go b/enterprise/cli/prebuilds_test.go index b5960436edcfb..cf0c74105020c 100644 --- a/enterprise/cli/prebuilds_test.go +++ b/enterprise/cli/prebuilds_test.go @@ -2,17 +2,30 @@ package cli_test import ( "bytes" + "database/sql" "net/http" "testing" + "time" + "github.com/google/uuid" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" "github.com/coder/coder/v2/cli/clitest" "github.com/coder/coder/v2/coderd/coderdtest" + "github.com/coder/coder/v2/coderd/database" + "github.com/coder/coder/v2/coderd/database/dbauthz" + "github.com/coder/coder/v2/coderd/database/dbfake" + "github.com/coder/coder/v2/coderd/database/dbgen" + "github.com/coder/coder/v2/coderd/database/dbtime" + "github.com/coder/coder/v2/coderd/util/ptr" "github.com/coder/coder/v2/codersdk" "github.com/coder/coder/v2/enterprise/coderd/coderdenttest" "github.com/coder/coder/v2/enterprise/coderd/license" + "github.com/coder/coder/v2/provisionersdk/proto" + "github.com/coder/coder/v2/pty/ptytest" + "github.com/coder/coder/v2/testutil" + "github.com/coder/quartz" ) func TestPrebuildsPause(t *testing.T) { @@ -341,3 +354,138 @@ func TestPrebuildsSettingsAPI(t *testing.T) { assert.False(t, settings.ReconciliationPaused) }) } + +// TestSchedulePrebuilds verifies the CLI schedule command when used with prebuilds. +// Running the command on an unclaimed prebuild fails, but after the prebuild is +// claimed (becoming a regular workspace) it succeeds as expected. +func TestSchedulePrebuilds(t *testing.T) { + t.Parallel() + + cases := []struct { + name string + cliErrorMsg string + cmdArgs func(string) []string + }{ + { + name: "AutostartPrebuildError", + cliErrorMsg: "autostart configuration is not supported for prebuilt workspaces", + cmdArgs: func(workspaceName string) []string { + return []string{"schedule", "start", workspaceName, "7:30AM", "Mon-Fri", "Europe/Lisbon"} + }, + }, + { + name: "AutostopPrebuildError", + cliErrorMsg: "autostop configuration is not supported for prebuilt workspaces", + cmdArgs: func(workspaceName string) []string { + return []string{"schedule", "stop", workspaceName, "8h30m"} + }, + }, + { + name: "ExtendPrebuildError", + cliErrorMsg: "extend configuration is not supported for prebuilt workspaces", + cmdArgs: func(workspaceName string) []string { + return []string{"schedule", "extend", workspaceName, "90m"} + }, + }, + } + + for _, tc := range cases { + tc := tc + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + + clock := quartz.NewMock(t) + clock.Set(dbtime.Now()) + + // Setup + client, db, owner := coderdenttest.NewWithDatabase(t, &coderdenttest.Options{ + Options: &coderdtest.Options{ + IncludeProvisionerDaemon: true, + Clock: clock, + }, + LicenseOptions: &coderdenttest.LicenseOptions{ + Features: license.Features{ + codersdk.FeatureWorkspacePrebuilds: 1, + }, + }, + }) + + // Given: a template and a template version with preset and a prebuilt workspace + presetID := uuid.New() + version := coderdtest.CreateTemplateVersion(t, client, owner.OrganizationID, nil) + _ = coderdtest.AwaitTemplateVersionJobCompleted(t, client, version.ID) + template := coderdtest.CreateTemplate(t, client, owner.OrganizationID, version.ID) + dbgen.Preset(t, db, database.InsertPresetParams{ + ID: presetID, + TemplateVersionID: version.ID, + DesiredInstances: sql.NullInt32{Int32: 1, Valid: true}, + }) + workspaceBuild := dbfake.WorkspaceBuild(t, db, database.WorkspaceTable{ + OwnerID: database.PrebuildsSystemUserID, + TemplateID: template.ID, + }).Seed(database.WorkspaceBuild{ + TemplateVersionID: version.ID, + TemplateVersionPresetID: uuid.NullUUID{ + UUID: presetID, + Valid: true, + }, + }).WithAgent(func(agent []*proto.Agent) []*proto.Agent { + return agent + }).Do() + + // Mark the prebuilt workspace's agent as ready so the prebuild can be claimed + ctx := dbauthz.AsSystemRestricted(testutil.Context(t, testutil.WaitLong)) + agent, err := db.GetWorkspaceAgentAndLatestBuildByAuthToken(ctx, uuid.MustParse(workspaceBuild.AgentToken)) + require.NoError(t, err) + err = db.UpdateWorkspaceAgentLifecycleStateByID(ctx, database.UpdateWorkspaceAgentLifecycleStateByIDParams{ + ID: agent.WorkspaceAgent.ID, + LifecycleState: database.WorkspaceAgentLifecycleStateReady, + }) + require.NoError(t, err) + + // Given: a prebuilt workspace + prebuild := coderdtest.MustWorkspace(t, client, workspaceBuild.Workspace.ID) + + // When: running the schedule command over a prebuilt workspace + inv, root := clitest.New(t, tc.cmdArgs(prebuild.OwnerName+"/"+prebuild.Name)...) + clitest.SetupConfig(t, client, root) + ptytest.New(t).Attach(inv) + doneChan := make(chan struct{}) + var runErr error + go func() { + defer close(doneChan) + runErr = inv.Run() + }() + <-doneChan + + // Then: an error should be returned, with an error message specific to the lifecycle parameter + require.Error(t, runErr) + require.Contains(t, runErr.Error(), tc.cliErrorMsg) + + // Given: the prebuilt workspace is claimed by a user + user, err := client.User(ctx, "testUser") + require.NoError(t, err) + claimedWorkspace, err := client.CreateUserWorkspace(ctx, user.ID.String(), codersdk.CreateWorkspaceRequest{ + TemplateVersionID: version.ID, + TemplateVersionPresetID: presetID, + Name: coderdtest.RandomUsername(t), + // The 'extend' command requires the workspace to have an existing deadline. + // To ensure this, we set the workspace's TTL to 1 hour. + TTLMillis: ptr.Ref[int64](time.Hour.Milliseconds()), + }) + require.NoError(t, err) + coderdtest.AwaitWorkspaceBuildJobCompleted(t, client, claimedWorkspace.LatestBuild.ID) + workspace := coderdtest.MustWorkspace(t, client, claimedWorkspace.ID) + require.Equal(t, prebuild.ID, workspace.ID) + + // When: running the schedule command over the claimed workspace + inv, root = clitest.New(t, tc.cmdArgs(workspace.OwnerName+"/"+workspace.Name)...) + clitest.SetupConfig(t, client, root) + pty := ptytest.New(t).Attach(inv) + require.NoError(t, inv.Run()) + + // Then: the updated schedule should be shown + pty.ExpectMatch(workspace.OwnerName + "/" + workspace.Name) + }) + } +} diff --git a/enterprise/cli/root.go b/enterprise/cli/root.go index 5b101fdbbb4b8..ed54a76f90487 100644 --- a/enterprise/cli/root.go +++ b/enterprise/cli/root.go @@ -19,6 +19,7 @@ func (r *RootCmd) enterpriseOnly() []*serpent.Command { r.prebuilds(), r.provisionerDaemons(), r.provisionerd(), + r.externalWorkspaces(), } } diff --git a/enterprise/cli/server.go b/enterprise/cli/server.go index 3b1fd63ab1c4c..f58ec86b58a43 100644 --- a/enterprise/cli/server.go +++ b/enterprise/cli/server.go @@ -20,6 +20,7 @@ import ( "github.com/coder/coder/v2/enterprise/audit/backends" "github.com/coder/coder/v2/enterprise/coderd" "github.com/coder/coder/v2/enterprise/coderd/dormancy" + "github.com/coder/coder/v2/enterprise/coderd/usage" "github.com/coder/coder/v2/enterprise/dbcrypt" "github.com/coder/coder/v2/enterprise/trialer" "github.com/coder/coder/v2/tailnet" @@ -116,11 +117,33 @@ func (r *RootCmd) Server(_ func()) *serpent.Command { o.ExternalTokenEncryption = cs } + if o.LicenseKeys == nil { + o.LicenseKeys = coderd.Keys + } + + closers := &multiCloser{} + + // Create the enterprise API. api, err := coderd.New(ctx, o) if err != nil { return nil, nil, err } - return api.AGPL, api, nil + closers.Add(api) + + // Start the enterprise usage publisher routine. This won't do anything + // unless the deployment is licensed and one of the licenses has usage + // publishing enabled. + publisher := usage.NewTallymanPublisher(ctx, options.Logger, options.Database, o.LicenseKeys, + usage.PublisherWithHTTPClient(api.HTTPClient), + ) + err = publisher.Start() + if err != nil { + _ = closers.Close() + return nil, nil, xerrors.Errorf("start usage publisher: %w", err) + } + closers.Add(publisher) + + return api.AGPL, closers, nil }) cmd.AddSubcommands( @@ -128,3 +151,23 @@ func (r *RootCmd) Server(_ func()) *serpent.Command { ) return cmd } + +type multiCloser struct { + closers []io.Closer +} + +var _ io.Closer = &multiCloser{} + +func (m *multiCloser) Add(closer io.Closer) { + m.closers = append(m.closers, closer) +} + +func (m *multiCloser) Close() error { + var errs []error + for _, closer := range m.closers { + if err := closer.Close(); err != nil { + errs = append(errs, xerrors.Errorf("close %T: %w", closer, err)) + } + } + return errors.Join(errs...) +} diff --git a/enterprise/cli/templateedit_test.go b/enterprise/cli/templateedit_test.go index fbff3e75dffcf..01d4784fd3c1e 100644 --- a/enterprise/cli/templateedit_test.go +++ b/enterprise/cli/templateedit_test.go @@ -219,9 +219,9 @@ func TestTemplateEdit(t *testing.T) { template, err := ownerClient.UpdateTemplateMeta(ctx, dbtemplate.ID, codersdk.UpdateTemplateMeta{ Name: expectedName, - DisplayName: expectedDisplayName, - Description: expectedDescription, - Icon: expectedIcon, + DisplayName: &expectedDisplayName, + Description: &expectedDescription, + Icon: &expectedIcon, DefaultTTLMillis: expectedDefaultTTLMillis, AllowUserAutostop: expectedAllowAutostop, AllowUserAutostart: expectedAllowAutostart, @@ -267,9 +267,9 @@ func TestTemplateEdit(t *testing.T) { template, err = ownerClient.UpdateTemplateMeta(ctx, dbtemplate.ID, codersdk.UpdateTemplateMeta{ Name: expectedName, - DisplayName: expectedDisplayName, - Description: expectedDescription, - Icon: expectedIcon, + DisplayName: &expectedDisplayName, + Description: &expectedDescription, + Icon: &expectedIcon, DefaultTTLMillis: expectedDefaultTTLMillis, AllowUserAutostop: expectedAllowAutostop, AllowUserAutostart: expectedAllowAutostart, diff --git a/enterprise/cli/testdata/coder_--help.golden b/enterprise/cli/testdata/coder_--help.golden index fc16bb29b9010..ddb44f78ae524 100644 --- a/enterprise/cli/testdata/coder_--help.golden +++ b/enterprise/cli/testdata/coder_--help.golden @@ -14,12 +14,13 @@ USAGE: $ coder templates init SUBCOMMANDS: - features List Enterprise features - groups Manage groups - licenses Add, delete, and list licenses - prebuilds Manage Coder prebuilds - provisioner View and manage provisioner daemons and jobs - server Start a Coder server + external-workspaces Create or manage external workspaces + features List Enterprise features + groups Manage groups + licenses Add, delete, and list licenses + prebuilds Manage Coder prebuilds + provisioner View and manage provisioner daemons and jobs + server Start a Coder server GLOBAL OPTIONS: Global options are applied to all commands. They can be set using environment diff --git a/enterprise/cli/testdata/coder_external-workspaces_--help.golden b/enterprise/cli/testdata/coder_external-workspaces_--help.golden new file mode 100644 index 0000000000000..d8b1ca8363f66 --- /dev/null +++ b/enterprise/cli/testdata/coder_external-workspaces_--help.golden @@ -0,0 +1,18 @@ +coder v0.0.0-devel + +USAGE: + coder external-workspaces [flags] [subcommand] + + Create or manage external workspaces + +SUBCOMMANDS: + agent-instructions Get the instructions for an external agent + create Create a new external workspace + list List external workspaces + +OPTIONS: + -O, --org string, $CODER_ORGANIZATION + Select which organization (uuid or name) to use. + +——— +Run `coder --help` for a list of global options. diff --git a/enterprise/cli/testdata/coder_external-workspaces_agent-instructions_--help.golden b/enterprise/cli/testdata/coder_external-workspaces_agent-instructions_--help.golden new file mode 100644 index 0000000000000..150a21313ed8c --- /dev/null +++ b/enterprise/cli/testdata/coder_external-workspaces_agent-instructions_--help.golden @@ -0,0 +1,13 @@ +coder v0.0.0-devel + +USAGE: + coder external-workspaces agent-instructions [flags] [user/]workspace[.agent] + + Get the instructions for an external agent + +OPTIONS: + -o, --output text|json (default: text) + Output format. + +——— +Run `coder --help` for a list of global options. diff --git a/enterprise/cli/testdata/coder_external-workspaces_create_--help.golden b/enterprise/cli/testdata/coder_external-workspaces_create_--help.golden new file mode 100644 index 0000000000000..208d2cc2296d7 --- /dev/null +++ b/enterprise/cli/testdata/coder_external-workspaces_create_--help.golden @@ -0,0 +1,56 @@ +coder v0.0.0-devel + +USAGE: + coder external-workspaces create [flags] [workspace] + + Create a new external workspace + + - Create a workspace for another user (if you have permission): + + $ coder create / + +OPTIONS: + -O, --org string, $CODER_ORGANIZATION + Select which organization (uuid or name) to use. + + --automatic-updates string, $CODER_WORKSPACE_AUTOMATIC_UPDATES (default: never) + Specify automatic updates setting for the workspace (accepts 'always' + or 'never'). + + --copy-parameters-from string, $CODER_WORKSPACE_COPY_PARAMETERS_FROM + Specify the source workspace name to copy parameters from. + + --parameter string-array, $CODER_RICH_PARAMETER + Rich parameter value in the format "name=value". + + --parameter-default string-array, $CODER_RICH_PARAMETER_DEFAULT + Rich parameter default values in the format "name=value". + + --preset string, $CODER_PRESET_NAME + Specify the name of a template version preset. Use 'none' to + explicitly indicate that no preset should be used. + + --rich-parameter-file string, $CODER_RICH_PARAMETER_FILE + Specify a file path with values for rich parameters defined in the + template. The file should be in YAML format, containing key-value + pairs for the parameters. + + --start-at string, $CODER_WORKSPACE_START_AT + Specify the workspace autostart schedule. Check coder schedule start + --help for the syntax. + + --stop-after duration, $CODER_WORKSPACE_STOP_AFTER + Specify a duration after which the workspace should shut down (e.g. + 8h). + + -t, --template string, $CODER_TEMPLATE_NAME + Specify a template name. + + --template-version string, $CODER_TEMPLATE_VERSION + Specify a template version name. + + -y, --yes bool + Bypass prompts. + +——— +Run `coder --help` for a list of global options. diff --git a/enterprise/cli/testdata/coder_external-workspaces_list_--help.golden b/enterprise/cli/testdata/coder_external-workspaces_list_--help.golden new file mode 100644 index 0000000000000..1210bea5aa186 --- /dev/null +++ b/enterprise/cli/testdata/coder_external-workspaces_list_--help.golden @@ -0,0 +1,24 @@ +coder v0.0.0-devel + +USAGE: + coder external-workspaces list [flags] + + List external workspaces + + Aliases: ls + +OPTIONS: + -a, --all bool + Specifies whether all workspaces will be listed or not. + + -c, --column [favorite|workspace|organization id|organization name|template|status|healthy|last built|current version|outdated|starts at|starts next|stops after|stops next|daily cost] (default: workspace,template,status,healthy,last built,current version,outdated) + Columns to display in table output. + + -o, --output table|json (default: table) + Output format. + + --search string (default: owner:me) + Search for a workspace with a query. + +——— +Run `coder --help` for a list of global options. diff --git a/enterprise/cli/testdata/coder_provisioner_jobs_list_--help.golden b/enterprise/cli/testdata/coder_provisioner_jobs_list_--help.golden index f380a0334867c..8e22f78e978f2 100644 --- a/enterprise/cli/testdata/coder_provisioner_jobs_list_--help.golden +++ b/enterprise/cli/testdata/coder_provisioner_jobs_list_--help.golden @@ -11,7 +11,7 @@ OPTIONS: -O, --org string, $CODER_ORGANIZATION Select which organization (uuid or name) to use. - -c, --column [id|created at|started at|completed at|canceled at|error|error code|status|worker id|worker name|file id|tags|queue position|queue size|organization id|template version id|workspace build id|type|available workers|template version name|template id|template name|template display name|template icon|workspace id|workspace name|organization|queue] (default: created at,id,type,template display name,status,queue,tags) + -c, --column [id|created at|started at|completed at|canceled at|error|error code|status|worker id|worker name|file id|tags|queue position|queue size|organization id|template version id|workspace build id|type|available workers|template version name|template id|template name|template display name|template icon|workspace id|workspace name|logs overflowed|organization|queue] (default: created at,id,type,template display name,status,queue,tags) Columns to display in table output. -l, --limit int, $CODER_PROVISIONER_JOB_LIST_LIMIT (default: 50) diff --git a/enterprise/cli/testdata/coder_provisioner_list_--help.golden b/enterprise/cli/testdata/coder_provisioner_list_--help.golden index 7a1807bb012f5..ce6d0754073a4 100644 --- a/enterprise/cli/testdata/coder_provisioner_list_--help.golden +++ b/enterprise/cli/testdata/coder_provisioner_list_--help.golden @@ -17,8 +17,17 @@ OPTIONS: -l, --limit int, $CODER_PROVISIONER_LIST_LIMIT (default: 50) Limit the number of provisioners returned. + -m, --max-age duration, $CODER_PROVISIONER_LIST_MAX_AGE + Filter provisioners by maximum age. + -o, --output table|json (default: table) Output format. + -f, --show-offline bool, $CODER_PROVISIONER_SHOW_OFFLINE + Show offline provisioners. + + -s, --status [offline|idle|busy], $CODER_PROVISIONER_LIST_STATUS + Filter by provisioner status. + ——— Run `coder --help` for a list of global options. diff --git a/enterprise/coderd/coderd.go b/enterprise/coderd/coderd.go index 9583e14cd7fd3..a81e16585473b 100644 --- a/enterprise/coderd/coderd.go +++ b/enterprise/coderd/coderd.go @@ -10,22 +10,25 @@ import ( "strconv" "strings" "sync" + "sync/atomic" "time" - "github.com/coder/quartz" - "github.com/coder/coder/v2/buildinfo" "github.com/coder/coder/v2/coderd/appearance" "github.com/coder/coder/v2/coderd/database" "github.com/coder/coder/v2/coderd/entitlements" "github.com/coder/coder/v2/coderd/idpsync" agplportsharing "github.com/coder/coder/v2/coderd/portsharing" + "github.com/coder/coder/v2/coderd/pproflabel" agplprebuilds "github.com/coder/coder/v2/coderd/prebuilds" "github.com/coder/coder/v2/coderd/rbac/policy" + agplusage "github.com/coder/coder/v2/coderd/usage" "github.com/coder/coder/v2/coderd/wsbuilder" "github.com/coder/coder/v2/enterprise/coderd/connectionlog" "github.com/coder/coder/v2/enterprise/coderd/enidpsync" "github.com/coder/coder/v2/enterprise/coderd/portsharing" + "github.com/coder/coder/v2/enterprise/coderd/usage" + "github.com/coder/quartz" "golang.org/x/xerrors" "tailscale.com/tailcfg" @@ -90,6 +93,13 @@ func New(ctx context.Context, options *Options) (_ *API, err error) { if options.Entitlements == nil { options.Entitlements = entitlements.New() } + if options.Options.UsageInserter == nil { + options.Options.UsageInserter = &atomic.Pointer[agplusage.Inserter]{} + } + if options.Options.UsageInserter.Load() == nil { + collector := usage.NewDBInserter() + options.Options.UsageInserter.Store(&collector) + } ctx, cancelFunc := context.WithCancel(ctx) @@ -506,6 +516,15 @@ func New(ctx context.Context, options *Options) (_ *API, err error) { apiKeyMiddleware, httpmw.ExtractNotificationTemplateParam(options.Database), ).Put("/notifications/templates/{notification_template}/method", api.updateNotificationTemplateMethod) + + r.Route("/workspaces/{workspace}/external-agent", func(r chi.Router) { + r.Use( + apiKeyMiddleware, + httpmw.ExtractWorkspaceParam(options.Database), + api.RequireFeatureMW(codersdk.FeatureWorkspaceExternalAgent), + ) + r.Get("/{agent}/credentials", api.workspaceExternalAgentCredentials) + }) }) if len(options.SCIMAPIKey) != 0 { @@ -903,7 +922,9 @@ func (api *API) updateEntitlements(ctx context.Context) error { } api.AGPL.PrebuildsReconciler.Store(&reconciler) - go reconciler.Run(context.Background()) + // TODO: Should this context be the api.ctx context? To cancel when + // the API (and entire app) is closed via shutdown? + pproflabel.Go(context.Background(), pproflabel.Service(pproflabel.ServicePrebuildReconciler), reconciler.Run) api.AGPL.PrebuildsClaimer.Store(&claimer) } @@ -918,17 +939,9 @@ func (api *API) updateEntitlements(ctx context.Context) error { } reloadedEntitlements.Features[codersdk.FeatureExternalTokenEncryption] = featureExternalTokenEncryption - // If there's a license installed, we will use the enterprise build - // limit checker. - // This checker currently only enforces the managed agent limit. - if reloadedEntitlements.HasLicense { - var checker wsbuilder.UsageChecker = api - api.AGPL.BuildUsageChecker.Store(&checker) - } else { - // Don't check any usage, just like AGPL. - var checker wsbuilder.UsageChecker = wsbuilder.NoopUsageChecker{} - api.AGPL.BuildUsageChecker.Store(&checker) - } + // Always use the enterprise usage checker + var checker wsbuilder.UsageChecker = api + api.AGPL.BuildUsageChecker.Store(&checker) return reloadedEntitlements, nil }) @@ -937,9 +950,17 @@ func (api *API) updateEntitlements(ctx context.Context) error { var _ wsbuilder.UsageChecker = &API{} func (api *API) CheckBuildUsage(ctx context.Context, store database.Store, templateVersion *database.TemplateVersion) (wsbuilder.UsageCheckResponse, error) { - // We assume that if this function is called, a valid license is installed. - // When there are no licenses installed, a noop usage checker is used - // instead. + // If the template version has an external agent, we need to check that the + // license is entitled to this feature. + if templateVersion.HasExternalAgent.Valid && templateVersion.HasExternalAgent.Bool { + feature, ok := api.Entitlements.Feature(codersdk.FeatureWorkspaceExternalAgent) + if !ok || !feature.Enabled { + return wsbuilder.UsageCheckResponse{ + Permitted: false, + Message: "You have a template which uses external agents but your license is not entitled to this feature. You will be unable to create new workspaces from these templates.", + }, nil + } + } // If the template version doesn't have an AI task, we don't need to check // usage. @@ -949,32 +970,35 @@ func (api *API) CheckBuildUsage(ctx context.Context, store database.Store, templ }, nil } - // Otherwise, we need to check that we haven't breached the managed agent + // When unlicensed, we need to check that we haven't breached the managed agent // limit. - managedAgentLimit, ok := api.Entitlements.Feature(codersdk.FeatureManagedAgentLimit) - if !ok || !managedAgentLimit.Enabled || managedAgentLimit.Limit == nil || managedAgentLimit.UsagePeriod == nil { - return wsbuilder.UsageCheckResponse{ - Permitted: false, - Message: "Your license is not entitled to managed agents. Please contact sales to continue using managed agents.", - }, nil - } + // Unlicensed deployments are allowed to use unlimited managed agents. + if api.Entitlements.HasLicense() { + managedAgentLimit, ok := api.Entitlements.Feature(codersdk.FeatureManagedAgentLimit) + if !ok || !managedAgentLimit.Enabled || managedAgentLimit.Limit == nil || managedAgentLimit.UsagePeriod == nil { + return wsbuilder.UsageCheckResponse{ + Permitted: false, + Message: "Your license is not entitled to managed agents. Please contact sales to continue using managed agents.", + }, nil + } - // This check is intentionally not committed to the database. It's fine if - // it's not 100% accurate or allows for minor breaches due to build races. - // nolint:gocritic // Requires permission to read all workspaces to read managed agent count. - managedAgentCount, err := store.GetManagedAgentCount(agpldbauthz.AsSystemRestricted(ctx), database.GetManagedAgentCountParams{ - StartTime: managedAgentLimit.UsagePeriod.Start, - EndTime: managedAgentLimit.UsagePeriod.End, - }) - if err != nil { - return wsbuilder.UsageCheckResponse{}, xerrors.Errorf("get managed agent count: %w", err) - } + // This check is intentionally not committed to the database. It's fine if + // it's not 100% accurate or allows for minor breaches due to build races. + // nolint:gocritic // Requires permission to read all workspaces to read managed agent count. + managedAgentCount, err := store.GetManagedAgentCount(agpldbauthz.AsSystemRestricted(ctx), database.GetManagedAgentCountParams{ + StartTime: managedAgentLimit.UsagePeriod.Start, + EndTime: managedAgentLimit.UsagePeriod.End, + }) + if err != nil { + return wsbuilder.UsageCheckResponse{}, xerrors.Errorf("get managed agent count: %w", err) + } - if managedAgentCount >= *managedAgentLimit.Limit { - return wsbuilder.UsageCheckResponse{ - Permitted: false, - Message: "You have breached the managed agent limit in your license. Please contact sales to continue using managed agents.", - }, nil + if managedAgentCount >= *managedAgentLimit.Limit { + return wsbuilder.UsageCheckResponse{ + Permitted: false, + Message: "You have breached the managed agent limit in your license. Please contact sales to continue using managed agents.", + }, nil + } } return wsbuilder.UsageCheckResponse{ diff --git a/enterprise/coderd/coderd_test.go b/enterprise/coderd/coderd_test.go index 94d9e4fda20df..302b367c304cd 100644 --- a/enterprise/coderd/coderd_test.go +++ b/enterprise/coderd/coderd_test.go @@ -154,7 +154,6 @@ func TestEntitlements(t *testing.T) { entitlements, err := anotherClient.Entitlements(context.Background()) require.NoError(t, err) require.False(t, entitlements.HasLicense) - //nolint:gocritic // unit test ctx := testDBAuthzRole(context.Background()) _, err = api.Database.InsertLicense(ctx, database.InsertLicenseParams{ UploadedAt: dbtime.Now(), @@ -186,7 +185,6 @@ func TestEntitlements(t *testing.T) { require.False(t, entitlements.HasLicense) // Valid ctx := context.Background() - //nolint:gocritic // unit test _, err = api.Database.InsertLicense(testDBAuthzRole(ctx), database.InsertLicenseParams{ UploadedAt: dbtime.Now(), Exp: dbtime.Now().AddDate(1, 0, 0), @@ -198,7 +196,6 @@ func TestEntitlements(t *testing.T) { }) require.NoError(t, err) // Expired - //nolint:gocritic // unit test _, err = api.Database.InsertLicense(testDBAuthzRole(ctx), database.InsertLicenseParams{ UploadedAt: dbtime.Now(), Exp: dbtime.Now().AddDate(-1, 0, 0), @@ -208,7 +205,6 @@ func TestEntitlements(t *testing.T) { }) require.NoError(t, err) // Invalid - //nolint:gocritic // unit test _, err = api.Database.InsertLicense(testDBAuthzRole(ctx), database.InsertLicenseParams{ UploadedAt: dbtime.Now(), Exp: dbtime.Now().AddDate(1, 0, 0), diff --git a/enterprise/coderd/coderdenttest/coderdenttest.go b/enterprise/coderd/coderdenttest/coderdenttest.go index 47d248335dda1..c9986c97580e0 100644 --- a/enterprise/coderd/coderdenttest/coderdenttest.go +++ b/enterprise/coderd/coderdenttest/coderdenttest.go @@ -105,7 +105,7 @@ func NewWithAPI(t *testing.T, options *Options) ( AuditLogging: options.AuditLogging, BrowserOnly: options.BrowserOnly, SCIMAPIKey: options.SCIMAPIKey, - DERPServerRelayAddress: oop.AccessURL.String(), + DERPServerRelayAddress: serverURL.String(), DERPServerRegionID: oop.BaseDERPMap.RegionIDs()[0], ReplicaSyncUpdateInterval: options.ReplicaSyncUpdateInterval, ReplicaErrorGracePeriod: options.ReplicaErrorGracePeriod, @@ -161,12 +161,13 @@ func NewWithAPI(t *testing.T, options *Options) ( // LicenseOptions is used to generate a license for testing. // It supports the builder pattern for easy customization. type LicenseOptions struct { - AccountType string - AccountID string - DeploymentIDs []string - Trial bool - FeatureSet codersdk.FeatureSet - AllFeatures bool + AccountType string + AccountID string + DeploymentIDs []string + Trial bool + FeatureSet codersdk.FeatureSet + AllFeatures bool + PublishUsageData bool // GraceAt is the time at which the license will enter the grace period. GraceAt time.Time // ExpiresAt is the time at which the license will hard expire. @@ -271,6 +272,13 @@ func GenerateLicense(t *testing.T, options LicenseOptions) string { issuedAt = time.Now().Add(-time.Minute) } + if options.AccountType == "" { + options.AccountType = license.AccountTypeSalesforce + } + if options.AccountID == "" { + options.AccountID = "test-account-id" + } + c := &license.Claims{ RegisteredClaims: jwt.RegisteredClaims{ ID: uuid.NewString(), @@ -279,15 +287,16 @@ func GenerateLicense(t *testing.T, options LicenseOptions) string { NotBefore: jwt.NewNumericDate(options.NotBefore), IssuedAt: jwt.NewNumericDate(issuedAt), }, - LicenseExpires: jwt.NewNumericDate(options.GraceAt), - AccountType: options.AccountType, - AccountID: options.AccountID, - DeploymentIDs: options.DeploymentIDs, - Trial: options.Trial, - Version: license.CurrentVersion, - AllFeatures: options.AllFeatures, - FeatureSet: options.FeatureSet, - Features: options.Features, + LicenseExpires: jwt.NewNumericDate(options.GraceAt), + AccountType: options.AccountType, + AccountID: options.AccountID, + DeploymentIDs: options.DeploymentIDs, + Trial: options.Trial, + Version: license.CurrentVersion, + AllFeatures: options.AllFeatures, + FeatureSet: options.FeatureSet, + Features: options.Features, + PublishUsageData: options.PublishUsageData, } return GenerateLicenseRaw(t, c) } diff --git a/enterprise/coderd/coderdenttest/proxytest.go b/enterprise/coderd/coderdenttest/proxytest.go index 5aaaf4a88a725..c4e5ed6019f61 100644 --- a/enterprise/coderd/coderdenttest/proxytest.go +++ b/enterprise/coderd/coderdenttest/proxytest.go @@ -109,7 +109,7 @@ func NewWorkspaceProxyReplica(t *testing.T, coderdAPI *coderd.API, owner *coders serverURL, err := url.Parse(srv.URL) require.NoError(t, err) - serverURL.Host = fmt.Sprintf("localhost:%d", tcpAddr.Port) + serverURL.Host = fmt.Sprintf("127.0.0.1:%d", tcpAddr.Port) accessURL := options.ProxyURL if accessURL == nil { diff --git a/enterprise/coderd/dormancy/dormantusersjob.go b/enterprise/coderd/dormancy/dormantusersjob.go index cae442ce07507..d331001a560ff 100644 --- a/enterprise/coderd/dormancy/dormantusersjob.go +++ b/enterprise/coderd/dormancy/dormantusersjob.go @@ -37,12 +37,13 @@ func CheckInactiveUsersWithOptions(ctx context.Context, logger slog.Logger, clk ctx, cancelFunc := context.WithCancel(ctx) tf := clk.TickerFunc(ctx, checkInterval, func() error { startTime := time.Now() - lastSeenAfter := dbtime.Now().Add(-dormancyPeriod) + now := dbtime.Time(clk.Now()).UTC() + lastSeenAfter := now.Add(-dormancyPeriod) logger.Debug(ctx, "check inactive user accounts", slog.F("dormancy_period", dormancyPeriod), slog.F("last_seen_after", lastSeenAfter)) updatedUsers, err := db.UpdateInactiveUsersToDormant(ctx, database.UpdateInactiveUsersToDormantParams{ LastSeenAfter: lastSeenAfter, - UpdatedAt: dbtime.Now(), + UpdatedAt: now, }) if err != nil && !xerrors.Is(err, sql.ErrNoRows) { logger.Error(ctx, "can't mark inactive users as dormant", slog.Error(err)) diff --git a/enterprise/coderd/dormancy/dormantusersjob_test.go b/enterprise/coderd/dormancy/dormantusersjob_test.go index e5e5276fe67a9..885a112c6141a 100644 --- a/enterprise/coderd/dormancy/dormantusersjob_test.go +++ b/enterprise/coderd/dormancy/dormantusersjob_test.go @@ -31,20 +31,28 @@ func TestCheckInactiveUsers(t *testing.T) { ctx, cancelFunc := context.WithCancel(context.Background()) t.Cleanup(cancelFunc) - inactiveUser1 := setupUser(ctx, t, db, "dormant-user-1@coder.com", database.UserStatusActive, time.Now().Add(-dormancyPeriod).Add(-time.Minute)) - inactiveUser2 := setupUser(ctx, t, db, "dormant-user-2@coder.com", database.UserStatusActive, time.Now().Add(-dormancyPeriod).Add(-time.Hour)) - inactiveUser3 := setupUser(ctx, t, db, "dormant-user-3@coder.com", database.UserStatusActive, time.Now().Add(-dormancyPeriod).Add(-6*time.Hour)) + // Use a fixed base time to avoid timing races + baseTime := time.Date(2023, 1, 1, 0, 0, 0, 0, time.UTC) + dormancyThreshold := baseTime.Add(-dormancyPeriod) - activeUser1 := setupUser(ctx, t, db, "active-user-1@coder.com", database.UserStatusActive, time.Now().Add(-dormancyPeriod).Add(time.Minute)) - activeUser2 := setupUser(ctx, t, db, "active-user-2@coder.com", database.UserStatusActive, time.Now().Add(-dormancyPeriod).Add(time.Hour)) - activeUser3 := setupUser(ctx, t, db, "active-user-3@coder.com", database.UserStatusActive, time.Now().Add(-dormancyPeriod).Add(6*time.Hour)) + // Create inactive users (last seen BEFORE dormancy threshold) + inactiveUser1 := setupUser(ctx, t, db, "dormant-user-1@coder.com", database.UserStatusActive, dormancyThreshold.Add(-time.Minute)) + inactiveUser2 := setupUser(ctx, t, db, "dormant-user-2@coder.com", database.UserStatusActive, dormancyThreshold.Add(-time.Hour)) + inactiveUser3 := setupUser(ctx, t, db, "dormant-user-3@coder.com", database.UserStatusActive, dormancyThreshold.Add(-6*time.Hour)) - suspendedUser1 := setupUser(ctx, t, db, "suspended-user-1@coder.com", database.UserStatusSuspended, time.Now().Add(-dormancyPeriod).Add(-time.Minute)) - suspendedUser2 := setupUser(ctx, t, db, "suspended-user-2@coder.com", database.UserStatusSuspended, time.Now().Add(-dormancyPeriod).Add(-time.Hour)) - suspendedUser3 := setupUser(ctx, t, db, "suspended-user-3@coder.com", database.UserStatusSuspended, time.Now().Add(-dormancyPeriod).Add(-6*time.Hour)) + // Create active users (last seen AFTER dormancy threshold) + activeUser1 := setupUser(ctx, t, db, "active-user-1@coder.com", database.UserStatusActive, baseTime.Add(-time.Minute)) + activeUser2 := setupUser(ctx, t, db, "active-user-2@coder.com", database.UserStatusActive, baseTime.Add(-time.Hour)) + activeUser3 := setupUser(ctx, t, db, "active-user-3@coder.com", database.UserStatusActive, baseTime.Add(-6*time.Hour)) + + suspendedUser1 := setupUser(ctx, t, db, "suspended-user-1@coder.com", database.UserStatusSuspended, dormancyThreshold.Add(-time.Minute)) + suspendedUser2 := setupUser(ctx, t, db, "suspended-user-2@coder.com", database.UserStatusSuspended, dormancyThreshold.Add(-time.Hour)) + suspendedUser3 := setupUser(ctx, t, db, "suspended-user-3@coder.com", database.UserStatusSuspended, dormancyThreshold.Add(-6*time.Hour)) mAudit := audit.NewMock() mClock := quartz.NewMock(t) + // Set the mock clock to the base time to ensure consistent behavior + mClock.Set(baseTime) // Run the periodic job closeFunc := dormancy.CheckInactiveUsersWithOptions(ctx, logger, mClock, db, mAudit, interval, dormancyPeriod) t.Cleanup(closeFunc) diff --git a/enterprise/coderd/enidpsync/groups.go b/enterprise/coderd/enidpsync/groups.go index 7cabce412a1ea..c67d8d53f0501 100644 --- a/enterprise/coderd/enidpsync/groups.go +++ b/enterprise/coderd/enidpsync/groups.go @@ -2,7 +2,6 @@ package enidpsync import ( "context" - "net/http" "github.com/golang-jwt/jwt/v4" @@ -20,51 +19,12 @@ func (e EnterpriseIDPSync) GroupSyncEntitled() bool { // GroupAllowList is implemented here to prevent login by unauthorized users. // TODO: GroupAllowList overlaps with the default organization group sync settings. func (e EnterpriseIDPSync) ParseGroupClaims(ctx context.Context, mergedClaims jwt.MapClaims) (idpsync.GroupParams, *idpsync.HTTPError) { - if !e.GroupSyncEntitled() { - return e.AGPLIDPSync.ParseGroupClaims(ctx, mergedClaims) + resp, err := e.AGPLIDPSync.ParseGroupClaims(ctx, mergedClaims) + if err != nil { + return idpsync.GroupParams{}, err } - - if e.GroupField != "" && len(e.GroupAllowList) > 0 { - groupsRaw, ok := mergedClaims[e.GroupField] - if !ok { - return idpsync.GroupParams{}, &idpsync.HTTPError{ - Code: http.StatusForbidden, - Msg: "Not a member of an allowed group", - Detail: "You have no groups in your claims!", - RenderStaticPage: true, - } - } - parsedGroups, err := idpsync.ParseStringSliceClaim(groupsRaw) - if err != nil { - return idpsync.GroupParams{}, &idpsync.HTTPError{ - Code: http.StatusBadRequest, - Msg: "Failed read groups from claims for allow list check. Ask an administrator for help.", - Detail: err.Error(), - RenderStaticPage: true, - } - } - - inAllowList := false - AllowListCheckLoop: - for _, group := range parsedGroups { - if _, ok := e.GroupAllowList[group]; ok { - inAllowList = true - break AllowListCheckLoop - } - } - - if !inAllowList { - return idpsync.GroupParams{}, &idpsync.HTTPError{ - Code: http.StatusForbidden, - Msg: "Not a member of an allowed group", - Detail: "Ask an administrator to add one of your groups to the allow list.", - RenderStaticPage: true, - } - } - } - return idpsync.GroupParams{ - SyncEntitled: true, - MergedClaims: mergedClaims, + SyncEntitled: e.GroupSyncEntitled(), + MergedClaims: resp.MergedClaims, }, nil } diff --git a/enterprise/coderd/enidpsync/organizations_test.go b/enterprise/coderd/enidpsync/organizations_test.go index 13a9bd69ed8fd..c3bae7cd1d848 100644 --- a/enterprise/coderd/enidpsync/organizations_test.go +++ b/enterprise/coderd/enidpsync/organizations_test.go @@ -56,7 +56,6 @@ func TestOrganizationSync(t *testing.T) { requireUserOrgs := func(t *testing.T, db database.Store, user database.User, expected []uuid.UUID) { t.Helper() - // nolint:gocritic // in testing members, err := db.OrganizationMembers(dbauthz.AsSystemRestricted(context.Background()), database.OrganizationMembersParams{ UserID: user.ID, }) diff --git a/enterprise/coderd/idpsync_test.go b/enterprise/coderd/idpsync_test.go index d34701c3f6936..49d83a62688ba 100644 --- a/enterprise/coderd/idpsync_test.go +++ b/enterprise/coderd/idpsync_test.go @@ -39,7 +39,6 @@ func TestGetGroupSyncSettings(t *testing.T) { ctx := testutil.Context(t, testutil.WaitShort) dbresv := runtimeconfig.OrganizationResolver(user.OrganizationID, runtimeconfig.NewStoreResolver(db)) entry := runtimeconfig.MustNew[*idpsync.GroupSyncSettings]("group-sync-settings") - //nolint:gocritic // Requires system context to set runtime config err := entry.SetRuntimeValue(dbauthz.AsSystemRestricted(ctx), dbresv, &idpsync.GroupSyncSettings{Field: "august"}) require.NoError(t, err) diff --git a/enterprise/coderd/license/license.go b/enterprise/coderd/license/license.go index bc5c174d9fc3a..504c9a04caea0 100644 --- a/enterprise/coderd/license/license.go +++ b/enterprise/coderd/license/license.go @@ -3,6 +3,7 @@ package license import ( "context" "crypto/ed25519" + "database/sql" "fmt" "math" "time" @@ -94,10 +95,34 @@ func Entitlements( return codersdk.Entitlements{}, xerrors.Errorf("query active user count: %w", err) } + // nolint:gocritic // Getting external workspaces is a system function. + externalWorkspaces, err := db.GetWorkspaces(dbauthz.AsSystemRestricted(ctx), database.GetWorkspacesParams{ + HasExternalAgent: sql.NullBool{ + Bool: true, + Valid: true, + }, + }) + if err != nil { + return codersdk.Entitlements{}, xerrors.Errorf("query external workspaces: %w", err) + } + + // nolint:gocritic // Getting external templates is a system function. + externalTemplates, err := db.GetTemplatesWithFilter(dbauthz.AsSystemRestricted(ctx), database.GetTemplatesWithFilterParams{ + HasExternalAgent: sql.NullBool{ + Bool: true, + Valid: true, + }, + }) + if err != nil { + return codersdk.Entitlements{}, xerrors.Errorf("query external templates: %w", err) + } + entitlements, err := LicensesEntitlements(ctx, now, licenses, enablements, keys, FeatureArguments{ - ActiveUserCount: activeUserCount, - ReplicaCount: replicaCount, - ExternalAuthCount: externalAuthCount, + ActiveUserCount: activeUserCount, + ReplicaCount: replicaCount, + ExternalAuthCount: externalAuthCount, + ExternalWorkspaceCount: int64(len(externalWorkspaces)), + ExternalTemplateCount: int64(len(externalTemplates)), ManagedAgentCountFn: func(ctx context.Context, startTime time.Time, endTime time.Time) (int64, error) { // nolint:gocritic // Requires permission to read all workspaces to read managed agent count. return db.GetManagedAgentCount(dbauthz.AsSystemRestricted(ctx), database.GetManagedAgentCountParams{ @@ -114,9 +139,11 @@ func Entitlements( } type FeatureArguments struct { - ActiveUserCount int64 - ReplicaCount int - ExternalAuthCount int + ActiveUserCount int64 + ReplicaCount int + ExternalAuthCount int + ExternalWorkspaceCount int64 + ExternalTemplateCount int64 // Unfortunately, managed agent count is not a simple count of the current // state of the world, but a count between two points in time determined by // the licenses. @@ -418,6 +445,30 @@ func LicensesEntitlements( } } + if featureArguments.ExternalWorkspaceCount > 0 { + feature := entitlements.Features[codersdk.FeatureWorkspaceExternalAgent] + switch feature.Entitlement { + case codersdk.EntitlementNotEntitled: + entitlements.Errors = append(entitlements.Errors, + "You have external workspaces but your license is not entitled to this feature.") + case codersdk.EntitlementGracePeriod: + entitlements.Warnings = append(entitlements.Warnings, + "You have external workspaces but your license is expired.") + } + } + + if featureArguments.ExternalTemplateCount > 0 { + feature := entitlements.Features[codersdk.FeatureWorkspaceExternalAgent] + switch feature.Entitlement { + case codersdk.EntitlementNotEntitled: + entitlements.Errors = append(entitlements.Errors, + "You have templates which use external agents but your license is not entitled to this feature.") + case codersdk.EntitlementGracePeriod: + entitlements.Warnings = append(entitlements.Warnings, + "You have templates which use external agents but your license is expired.") + } + } + // Managed agent warnings are applied based on usage period. We only // generate a warning if the license actually has managed agents. // Note that agents are free when unlicensed. @@ -584,6 +635,7 @@ type Claims struct { Version uint64 `json:"version"` Features Features `json:"features"` RequireTelemetry bool `json:"require_telemetry,omitempty"` + PublishUsageData bool `json:"publish_usage_data,omitempty"` } var _ jwt.Claims = &Claims{} diff --git a/enterprise/coderd/license/license_test.go b/enterprise/coderd/license/license_test.go index d8203117039cb..0ca7d2287ad63 100644 --- a/enterprise/coderd/license/license_test.go +++ b/enterprise/coderd/license/license_test.go @@ -723,6 +723,12 @@ func TestEntitlements(t *testing.T) { return true })). Return(int64(175), nil) + mDB.EXPECT(). + GetWorkspaces(gomock.Any(), gomock.Any()). + Return([]database.GetWorkspacesRow{}, nil) + mDB.EXPECT(). + GetTemplatesWithFilter(gomock.Any(), gomock.Any()). + Return([]database.Template{}, nil) entitlements, err := license.Entitlements(context.Background(), mDB, 1, 0, coderdenttest.Keys, all) require.NoError(t, err) @@ -766,6 +772,7 @@ func TestLicenseEntitlements(t *testing.T) { codersdk.FeatureUserRoleManagement: true, codersdk.FeatureAccessControl: true, codersdk.FeatureControlSharedPorts: true, + codersdk.FeatureWorkspaceExternalAgent: true, } legacyLicense := func() *coderdenttest.LicenseOptions { @@ -1109,6 +1116,32 @@ func TestLicenseEntitlements(t *testing.T) { assert.Equal(t, int64(200), *feature.Actual) }, }, + { + Name: "ExternalWorkspace", + Licenses: []*coderdenttest.LicenseOptions{ + enterpriseLicense().UserLimit(100), + }, + Arguments: license.FeatureArguments{ + ExternalWorkspaceCount: 1, + }, + AssertEntitlements: func(t *testing.T, entitlements codersdk.Entitlements) { + assert.Equal(t, codersdk.EntitlementEntitled, entitlements.Features[codersdk.FeatureWorkspaceExternalAgent].Entitlement) + assert.True(t, entitlements.Features[codersdk.FeatureWorkspaceExternalAgent].Enabled) + }, + }, + { + Name: "ExternalTemplate", + Licenses: []*coderdenttest.LicenseOptions{ + enterpriseLicense().UserLimit(100), + }, + Arguments: license.FeatureArguments{ + ExternalTemplateCount: 1, + }, + AssertEntitlements: func(t *testing.T, entitlements codersdk.Entitlements) { + assert.Equal(t, codersdk.EntitlementEntitled, entitlements.Features[codersdk.FeatureWorkspaceExternalAgent].Entitlement) + assert.True(t, entitlements.Features[codersdk.FeatureWorkspaceExternalAgent].Enabled) + }, + }, } for _, tc := range testCases { diff --git a/enterprise/coderd/prebuilds/claim.go b/enterprise/coderd/prebuilds/claim.go index b6a85ae1fc094..daea281d38d60 100644 --- a/enterprise/coderd/prebuilds/claim.go +++ b/enterprise/coderd/prebuilds/claim.go @@ -4,6 +4,7 @@ import ( "context" "database/sql" "errors" + "time" "github.com/google/uuid" "golang.org/x/xerrors" @@ -24,14 +25,22 @@ func NewEnterpriseClaimer(store database.Store) *EnterpriseClaimer { func (c EnterpriseClaimer) Claim( ctx context.Context, + now time.Time, userID uuid.UUID, name string, presetID uuid.UUID, + autostartSchedule sql.NullString, + nextStartAt sql.NullTime, + ttl sql.NullInt64, ) (*uuid.UUID, error) { result, err := c.store.ClaimPrebuiltWorkspace(ctx, database.ClaimPrebuiltWorkspaceParams{ - NewUserID: userID, - NewName: name, - PresetID: presetID, + NewUserID: userID, + NewName: name, + Now: now, + PresetID: presetID, + AutostartSchedule: autostartSchedule, + NextStartAt: nextStartAt, + WorkspaceTtl: ttl, }) if err != nil { switch { diff --git a/enterprise/coderd/prebuilds/claim_test.go b/enterprise/coderd/prebuilds/claim_test.go index 01195e3485016..9ed7e9ffd19e0 100644 --- a/enterprise/coderd/prebuilds/claim_test.go +++ b/enterprise/coderd/prebuilds/claim_test.go @@ -15,6 +15,8 @@ import ( "github.com/stretchr/testify/require" "golang.org/x/xerrors" + "github.com/coder/coder/v2/coderd/database/dbtime" + "github.com/coder/coder/v2/coderd/files" "github.com/coder/quartz" @@ -132,7 +134,9 @@ func TestClaimPrebuild(t *testing.T) { t.Run(name, func(t *testing.T) { t.Parallel() - // Setup. + // Setup + clock := quartz.NewMock(t) + clock.Set(dbtime.Now()) ctx := testutil.Context(t, testutil.WaitSuperLong) db, pubsub := dbtestutil.NewDB(t) @@ -144,6 +148,7 @@ func TestClaimPrebuild(t *testing.T) { Options: &coderdtest.Options{ Database: spy, Pubsub: pubsub, + Clock: clock, }, LicenseOptions: &coderdenttest.LicenseOptions{ Features: license.Features{ @@ -238,6 +243,7 @@ func TestClaimPrebuild(t *testing.T) { // When: a user creates a new workspace with a preset for which prebuilds are configured. workspaceName := strings.ReplaceAll(testutil.GetRandomName(t), "_", "-") params := database.ClaimPrebuiltWorkspaceParams{ + Now: clock.Now(), NewUserID: user.ID, NewName: workspaceName, PresetID: presets[0].ID, diff --git a/enterprise/coderd/prebuilds/metricscollector_test.go b/enterprise/coderd/prebuilds/metricscollector_test.go index 1e9f3f5082806..b852079beb2af 100644 --- a/enterprise/coderd/prebuilds/metricscollector_test.go +++ b/enterprise/coderd/prebuilds/metricscollector_test.go @@ -231,7 +231,6 @@ func TestMetricsCollector(t *testing.T) { } // Force an update to the metrics state to allow the collector to collect fresh metrics. - // nolint:gocritic // Authz context needed to retrieve state. require.NoError(t, collector.UpdateState(dbauthz.AsPrebuildsOrchestrator(ctx), testutil.WaitLong)) metricsFamilies, err := registry.Gather() @@ -367,7 +366,6 @@ func TestMetricsCollector_DuplicateTemplateNames(t *testing.T) { "organization_name": defaultOrg.Name, } - // nolint:gocritic // Authz context needed to retrieve state. ctx = dbauthz.AsPrebuildsOrchestrator(ctx) // Then: metrics collect successfully. diff --git a/enterprise/coderd/provisionerdaemons.go b/enterprise/coderd/provisionerdaemons.go index c8304952781d1..65b03a7d6b864 100644 --- a/enterprise/coderd/provisionerdaemons.go +++ b/enterprise/coderd/provisionerdaemons.go @@ -352,6 +352,7 @@ func (api *API) provisionerDaemonServe(rw http.ResponseWriter, r *http.Request) &api.AGPL.Auditor, api.AGPL.TemplateScheduleStore, api.AGPL.UserQuietHoursScheduleStore, + api.AGPL.UsageInserter, api.DeploymentValues, provisionerdserver.Options{ ExternalAuthConfigs: api.ExternalAuthConfigs, diff --git a/enterprise/coderd/provisionerdaemons_test.go b/enterprise/coderd/provisionerdaemons_test.go index a94a60ffff3c2..5797e978fa34c 100644 --- a/enterprise/coderd/provisionerdaemons_test.go +++ b/enterprise/coderd/provisionerdaemons_test.go @@ -682,7 +682,6 @@ func TestProvisionerDaemonServe(t *testing.T) { if tc.insertParams.Name != "" { tc.insertParams.OrganizationID = user.OrganizationID - // nolint:gocritic // test _, err := db.InsertProvisionerKey(dbauthz.AsSystemRestricted(ctx), tc.insertParams) require.NoError(t, err) } @@ -945,7 +944,6 @@ func TestGetProvisionerDaemons(t *testing.T) { daemonCreatedAt := time.Now() - //nolint:gocritic // We're not testing auth on the following in this test provisionerKey, err := db.InsertProvisionerKey(dbauthz.AsSystemRestricted(ctx), database.InsertProvisionerKeyParams{ Name: "Test Provisioner Key", ID: uuid.New(), @@ -956,7 +954,6 @@ func TestGetProvisionerDaemons(t *testing.T) { }) require.NoError(t, err, "should be able to create a provisioner key") - //nolint:gocritic // We're not testing auth on the following in this test pd, err := db.UpsertProvisionerDaemon(dbauthz.AsSystemRestricted(ctx), database.UpsertProvisionerDaemonParams{ CreatedAt: daemonCreatedAt, Name: "Test Provisioner Daemon", diff --git a/enterprise/coderd/schedule/template.go b/enterprise/coderd/schedule/template.go index 855dea4989c73..ed21b8160e2c3 100644 --- a/enterprise/coderd/schedule/template.go +++ b/enterprise/coderd/schedule/template.go @@ -205,7 +205,6 @@ func (s *EnterpriseTemplateScheduleStore) Set(ctx context.Context, db database.S if opts.DefaultTTL != 0 { ttl = sql.NullInt64{Valid: true, Int64: int64(opts.DefaultTTL)} } - if err = tx.UpdateWorkspacesTTLByTemplateID(ctx, database.UpdateWorkspacesTTLByTemplateIDParams{ TemplateID: template.ID, Ttl: ttl, @@ -243,6 +242,10 @@ func (s *EnterpriseTemplateScheduleStore) Set(ctx context.Context, db database.S nextStartAts := []time.Time{} for _, workspace := range workspaces { + // Skip prebuilt workspaces + if workspace.IsPrebuild() { + continue + } nextStartAt := time.Time{} if workspace.AutostartSchedule.Valid { next, err := agpl.NextAllowedAutostart(s.now(), workspace.AutostartSchedule.String, templateSchedule) @@ -255,7 +258,7 @@ func (s *EnterpriseTemplateScheduleStore) Set(ctx context.Context, db database.S nextStartAts = append(nextStartAts, nextStartAt) } - //nolint:gocritic // We need to be able to update information about all workspaces. + //nolint:gocritic // We need to be able to update information about regular user workspaces. if err := db.BatchUpdateWorkspaceNextStartAt(dbauthz.AsSystemRestricted(ctx), database.BatchUpdateWorkspaceNextStartAtParams{ IDs: workspaceIDs, NextStartAts: nextStartAts, @@ -335,6 +338,11 @@ func (s *EnterpriseTemplateScheduleStore) updateWorkspaceBuild(ctx context.Conte return xerrors.Errorf("get workspace %q: %w", build.WorkspaceID, err) } + // Skip lifecycle updates for prebuilt workspaces + if workspace.IsPrebuild() { + return nil + } + job, err := db.GetProvisionerJobByID(ctx, build.JobID) if err != nil { return xerrors.Errorf("get provisioner job %q: %w", build.JobID, err) @@ -350,14 +358,23 @@ func (s *EnterpriseTemplateScheduleStore) updateWorkspaceBuild(ctx context.Conte return nil } + // Calculate the new autostop max_deadline from the workspace. Since + // autostop is always calculated from the build completion time, we don't + // want to use the returned autostop.Deadline property as it will likely be + // in the distant past. + // + // The only exception is if the newly calculated workspace TTL is now zero, + // which means the workspace can now stay on indefinitely. + // + // This also matches the behavior of updating a workspace's TTL, where we + // don't apply the changes until the workspace is rebuilt. autostop, err := agpl.CalculateAutostop(ctx, agpl.CalculateAutostopParams{ Database: db, TemplateScheduleStore: s, UserQuietHoursScheduleStore: *s.UserQuietHoursScheduleStore.Load(), - // Use the job completion time as the time we calculate autostop from. - Now: job.CompletedAt.Time, - Workspace: workspace.WorkspaceTable(), - WorkspaceAutostart: workspace.AutostartSchedule.String, + WorkspaceBuildCompletedAt: job.CompletedAt.Time, + Workspace: workspace.WorkspaceTable(), + WorkspaceAutostart: workspace.AutostartSchedule.String, }) if err != nil { return xerrors.Errorf("calculate new autostop for workspace %q: %w", workspace.ID, err) @@ -389,9 +406,24 @@ func (s *EnterpriseTemplateScheduleStore) updateWorkspaceBuild(ctx context.Conte autostop.MaxDeadline = now.Add(time.Hour * 2) } + // If the new deadline is zero, the workspace can now stay on indefinitely. + // Otherwise, we want to discard the new value as per the comment above the + // CalculateAutostop call. + // + // We could potentially calculate a new deadline based on the TTL setting + // (on either the workspace or the template based on the template's policy) + // against the current time, but doing nothing here matches the current + // behavior of the workspace TTL update endpoint. + // + // Per the documentation of CalculateAutostop, the deadline is not intended + // as a policy measure, so it's fine that we don't update it when the + // template schedule changes. + if !autostop.Deadline.IsZero() { + autostop.Deadline = build.Deadline + } + // If the current deadline on the build is after the new max_deadline, then // set it to the max_deadline. - autostop.Deadline = build.Deadline if !autostop.MaxDeadline.IsZero() && autostop.Deadline.After(autostop.MaxDeadline) { autostop.Deadline = autostop.MaxDeadline } diff --git a/enterprise/coderd/schedule/template_test.go b/enterprise/coderd/schedule/template_test.go index 4af06042b031f..e764826f76922 100644 --- a/enterprise/coderd/schedule/template_test.go +++ b/enterprise/coderd/schedule/template_test.go @@ -1,6 +1,7 @@ package schedule_test import ( + "context" "database/sql" "encoding/json" "fmt" @@ -17,15 +18,18 @@ import ( "github.com/coder/coder/v2/coderd/database" "github.com/coder/coder/v2/coderd/database/dbauthz" + "github.com/coder/coder/v2/coderd/database/dbfake" "github.com/coder/coder/v2/coderd/database/dbgen" "github.com/coder/coder/v2/coderd/database/dbtestutil" "github.com/coder/coder/v2/coderd/database/dbtime" "github.com/coder/coder/v2/coderd/notifications" "github.com/coder/coder/v2/coderd/notifications/notificationstest" agplschedule "github.com/coder/coder/v2/coderd/schedule" - "github.com/coder/coder/v2/coderd/util/ptr" + "github.com/coder/coder/v2/coderd/schedule/cron" + "github.com/coder/coder/v2/codersdk" "github.com/coder/coder/v2/cryptorand" "github.com/coder/coder/v2/enterprise/coderd/schedule" + "github.com/coder/coder/v2/provisionersdk/proto" "github.com/coder/coder/v2/testutil" "github.com/coder/quartz" ) @@ -73,17 +77,23 @@ func TestTemplateUpdateBuildDeadlines(t *testing.T) { buildTime := time.Date(nowY, nowM, nowD, 12, 0, 0, 0, time.UTC) // noon today UTC nextQuietHours := time.Date(nowY, nowM, nowD+1, 0, 0, 0, 0, time.UTC) // midnight tomorrow UTC - // Workspace old max_deadline too soon + defaultTTL := 8 * time.Hour + cases := []struct { - name string - now time.Time + name string + now time.Time + // Before: deadline time.Time maxDeadline time.Time - // Set to nil for no change. - newDeadline *time.Time + // After: + newDeadline time.Time newMaxDeadline time.Time - noQuietHours bool - autostopReq *agplschedule.TemplateAutostopRequirement + // Config: + noQuietHours bool + // Note that ttl will not influence the new build at all unless it's 0 + // AND the build does not have a max deadline post recalculation. + ttl time.Duration + autostopReq *agplschedule.TemplateAutostopRequirement }{ { name: "SkippedWorkspaceMaxDeadlineTooSoon", @@ -91,8 +101,9 @@ func TestTemplateUpdateBuildDeadlines(t *testing.T) { deadline: buildTime, maxDeadline: buildTime.Add(1 * time.Hour), // Unchanged since the max deadline is too soon. - newDeadline: nil, + newDeadline: buildTime, newMaxDeadline: buildTime.Add(1 * time.Hour), + ttl: defaultTTL, // no effect }, { name: "NewWorkspaceMaxDeadlineBeforeNow", @@ -101,10 +112,11 @@ func TestTemplateUpdateBuildDeadlines(t *testing.T) { deadline: buildTime, // Far into the future... maxDeadline: nextQuietHours.Add(24 * time.Hour), - newDeadline: nil, + newDeadline: buildTime, // We will use now() + 2 hours if the newly calculated max deadline // from the workspace build time is before now. newMaxDeadline: nextQuietHours.Add(8 * time.Hour), + ttl: defaultTTL, // no effect }, { name: "NewWorkspaceMaxDeadlineSoon", @@ -113,10 +125,11 @@ func TestTemplateUpdateBuildDeadlines(t *testing.T) { deadline: buildTime, // Far into the future... maxDeadline: nextQuietHours.Add(24 * time.Hour), - newDeadline: nil, + newDeadline: buildTime, // We will use now() + 2 hours if the newly calculated max deadline // from the workspace build time is within the next 2 hours. newMaxDeadline: nextQuietHours.Add(1 * time.Hour), + ttl: defaultTTL, // no effect }, { name: "NewWorkspaceMaxDeadlineFuture", @@ -125,8 +138,9 @@ func TestTemplateUpdateBuildDeadlines(t *testing.T) { deadline: buildTime, // Far into the future... maxDeadline: nextQuietHours.Add(24 * time.Hour), - newDeadline: nil, + newDeadline: buildTime, newMaxDeadline: nextQuietHours, + ttl: defaultTTL, // no effect }, { name: "DeadlineAfterNewWorkspaceMaxDeadline", @@ -136,8 +150,9 @@ func TestTemplateUpdateBuildDeadlines(t *testing.T) { deadline: nextQuietHours.Add(24 * time.Hour), maxDeadline: nextQuietHours.Add(24 * time.Hour), // The deadline should match since it is after the new max deadline. - newDeadline: ptr.Ref(nextQuietHours), + newDeadline: nextQuietHours, newMaxDeadline: nextQuietHours, + ttl: defaultTTL, // no effect }, { // There was a bug if a user has no quiet hours set, and autostop @@ -151,13 +166,14 @@ func TestTemplateUpdateBuildDeadlines(t *testing.T) { deadline: buildTime.Add(time.Hour * 8), maxDeadline: time.Time{}, // No max set // Should be unchanged - newDeadline: ptr.Ref(buildTime.Add(time.Hour * 8)), + newDeadline: buildTime.Add(time.Hour * 8), newMaxDeadline: time.Time{}, noQuietHours: true, autostopReq: &agplschedule.TemplateAutostopRequirement{ DaysOfWeek: 0, Weeks: 0, }, + ttl: defaultTTL, // no effect }, { // A bug existed where MaxDeadline could be set, but deadline was @@ -168,15 +184,15 @@ func TestTemplateUpdateBuildDeadlines(t *testing.T) { deadline: time.Time{}, maxDeadline: time.Time{}, // No max set // Should be unchanged - newDeadline: ptr.Ref(time.Time{}), + newDeadline: time.Time{}, newMaxDeadline: time.Time{}, noQuietHours: true, autostopReq: &agplschedule.TemplateAutostopRequirement{ DaysOfWeek: 0, Weeks: 0, }, + ttl: defaultTTL, // no effect }, - { // Similar to 'NoDeadline' test. This has a MaxDeadline set, so // the deadline of the workspace should now be set. @@ -185,14 +201,33 @@ func TestTemplateUpdateBuildDeadlines(t *testing.T) { // Start with unset times deadline: time.Time{}, maxDeadline: time.Time{}, - newDeadline: ptr.Ref(nextQuietHours), + newDeadline: nextQuietHours, newMaxDeadline: nextQuietHours, + ttl: defaultTTL, // no effect + }, + { + // If the build doesn't have a max_deadline anymore, and there is no + // TTL anymore, then both the deadline and max_deadline should be + // zero. + name: "NoTTLNoDeadlineNoMaxDeadline", + now: buildTime, + deadline: buildTime.Add(time.Hour * 8), + maxDeadline: buildTime.Add(time.Hour * 8), + newDeadline: time.Time{}, + newMaxDeadline: time.Time{}, + noQuietHours: true, + autostopReq: &agplschedule.TemplateAutostopRequirement{ + DaysOfWeek: 0, + Weeks: 0, + }, + ttl: 0, }, } for _, c := range cases { t.Run(c.name, func(t *testing.T) { t.Parallel() + ctx := testutil.Context(t, testutil.WaitShort) user := quietUser if c.noQuietHours { @@ -206,6 +241,7 @@ func TestTemplateUpdateBuildDeadlines(t *testing.T) { t.Log("maxDeadline", c.maxDeadline) t.Log("newDeadline", c.newDeadline) t.Log("newMaxDeadline", c.newMaxDeadline) + t.Log("ttl", c.ttl) var ( template = dbgen.Template(t, db, database.Template{ @@ -300,7 +336,7 @@ func TestTemplateUpdateBuildDeadlines(t *testing.T) { _, err = templateScheduleStore.Set(ctx, db, template, agplschedule.TemplateScheduleOptions{ UserAutostartEnabled: false, UserAutostopEnabled: false, - DefaultTTL: 0, + DefaultTTL: c.ttl, AutostopRequirement: autostopReq, FailureTTL: 0, TimeTilDormant: 0, @@ -312,11 +348,8 @@ func TestTemplateUpdateBuildDeadlines(t *testing.T) { newBuild, err := db.GetWorkspaceBuildByID(ctx, wsBuild.ID) require.NoError(t, err) - if c.newDeadline == nil { - c.newDeadline = &wsBuild.Deadline - } - require.WithinDuration(t, *c.newDeadline, newBuild.Deadline, time.Second) - require.WithinDuration(t, c.newMaxDeadline, newBuild.MaxDeadline, time.Second) + require.WithinDuration(t, c.newDeadline, newBuild.Deadline, time.Second, "deadline") + require.WithinDuration(t, c.newMaxDeadline, newBuild.MaxDeadline, time.Second, "max_deadline") // Check that the new build has the same state as before. require.Equal(t, wsBuild.ProvisionerState, newBuild.ProvisionerState, "provisioner state mismatch") @@ -686,7 +719,6 @@ func TestNotifications(t *testing.T) { // Lower the dormancy TTL to ensure the schedule recalculates deadlines and // triggers notifications. - // nolint:gocritic // Need an actor in the context. _, err = templateScheduleStore.Set(dbauthz.AsNotifier(ctx), db, template, agplschedule.TemplateScheduleOptions{ TimeTilDormant: timeTilDormant / 2, TimeTilDormantAutoDelete: timeTilDormant / 2, @@ -951,6 +983,252 @@ func TestTemplateTTL(t *testing.T) { }) } +func TestTemplateUpdatePrebuilds(t *testing.T) { + t.Parallel() + + // Dormant auto-delete configured to 10 hours + dormantAutoDelete := 10 * time.Hour + + // TTL configured to 8 hours + ttl := 8 * time.Hour + + // Autostop configuration set to everyday at midnight + autostopWeekdays, err := codersdk.WeekdaysToBitmap(codersdk.AllDaysOfWeek) + require.NoError(t, err) + + // Autostart configuration set to everyday at midnight + autostartSchedule, err := cron.Weekly("CRON_TZ=UTC 0 0 * * *") + require.NoError(t, err) + autostartWeekdays, err := codersdk.WeekdaysToBitmap(codersdk.AllDaysOfWeek) + require.NoError(t, err) + + cases := []struct { + name string + templateSchedule agplschedule.TemplateScheduleOptions + workspaceUpdate func(*testing.T, context.Context, database.Store, time.Time, database.ClaimPrebuiltWorkspaceRow) + assertWorkspace func(*testing.T, context.Context, database.Store, time.Time, bool, database.Workspace) + }{ + { + name: "TemplateDormantAutoDeleteUpdatePrebuildAfterClaim", + templateSchedule: agplschedule.TemplateScheduleOptions{ + // Template level TimeTilDormantAutodelete set to 10 hours + TimeTilDormantAutoDelete: dormantAutoDelete, + }, + workspaceUpdate: func(t *testing.T, ctx context.Context, db database.Store, now time.Time, + workspace database.ClaimPrebuiltWorkspaceRow, + ) { + // When: the workspace is marked dormant + dormantWorkspace, err := db.UpdateWorkspaceDormantDeletingAt(ctx, database.UpdateWorkspaceDormantDeletingAtParams{ + ID: workspace.ID, + DormantAt: sql.NullTime{ + Time: now, + Valid: true, + }, + }) + require.NoError(t, err) + require.NotNil(t, dormantWorkspace.DormantAt) + }, + assertWorkspace: func(t *testing.T, ctx context.Context, db database.Store, now time.Time, + isPrebuild bool, workspace database.Workspace, + ) { + if isPrebuild { + // The unclaimed prebuild should have an empty DormantAt and DeletingAt + require.True(t, workspace.DormantAt.Time.IsZero()) + require.True(t, workspace.DeletingAt.Time.IsZero()) + } else { + // The claimed workspace should have its DormantAt and DeletingAt updated + require.False(t, workspace.DormantAt.Time.IsZero()) + require.False(t, workspace.DeletingAt.Time.IsZero()) + require.WithinDuration(t, now.UTC(), workspace.DormantAt.Time.UTC(), time.Second) + require.WithinDuration(t, now.Add(dormantAutoDelete).UTC(), workspace.DeletingAt.Time.UTC(), time.Second) + } + }, + }, + { + name: "TemplateTTLUpdatePrebuildAfterClaim", + templateSchedule: agplschedule.TemplateScheduleOptions{ + // Template level TTL can only be set if autostop is disabled for users + DefaultTTL: ttl, + UserAutostopEnabled: false, + }, + workspaceUpdate: func(t *testing.T, ctx context.Context, db database.Store, now time.Time, + workspace database.ClaimPrebuiltWorkspaceRow) { + }, + assertWorkspace: func(t *testing.T, ctx context.Context, db database.Store, now time.Time, + isPrebuild bool, workspace database.Workspace, + ) { + if isPrebuild { + // The unclaimed prebuild should have an empty TTL + require.Equal(t, sql.NullInt64{}, workspace.Ttl) + } else { + // The claimed workspace should have its TTL updated + require.Equal(t, sql.NullInt64{Int64: int64(ttl), Valid: true}, workspace.Ttl) + } + }, + }, + { + name: "TemplateAutostopUpdatePrebuildAfterClaim", + templateSchedule: agplschedule.TemplateScheduleOptions{ + // Template level Autostop set for everyday + AutostopRequirement: agplschedule.TemplateAutostopRequirement{ + DaysOfWeek: autostopWeekdays, + Weeks: 0, + }, + }, + workspaceUpdate: func(t *testing.T, ctx context.Context, db database.Store, now time.Time, + workspace database.ClaimPrebuiltWorkspaceRow) { + }, + assertWorkspace: func(t *testing.T, ctx context.Context, db database.Store, now time.Time, isPrebuild bool, workspace database.Workspace) { + if isPrebuild { + // The unclaimed prebuild should have an empty MaxDeadline + prebuildBuild, err := db.GetLatestWorkspaceBuildByWorkspaceID(ctx, workspace.ID) + require.NoError(t, err) + require.True(t, prebuildBuild.MaxDeadline.IsZero()) + } else { + // The claimed workspace should have its MaxDeadline updated + workspaceBuild, err := db.GetLatestWorkspaceBuildByWorkspaceID(ctx, workspace.ID) + require.NoError(t, err) + require.False(t, workspaceBuild.MaxDeadline.IsZero()) + } + }, + }, + { + name: "TemplateAutostartUpdatePrebuildAfterClaim", + templateSchedule: agplschedule.TemplateScheduleOptions{ + // Template level Autostart set for everyday + UserAutostartEnabled: true, + AutostartRequirement: agplschedule.TemplateAutostartRequirement{ + DaysOfWeek: autostartWeekdays, + }, + }, + workspaceUpdate: func(t *testing.T, ctx context.Context, db database.Store, now time.Time, workspace database.ClaimPrebuiltWorkspaceRow) { + // To compute NextStartAt, the workspace must have a valid autostart schedule + err = db.UpdateWorkspaceAutostart(ctx, database.UpdateWorkspaceAutostartParams{ + ID: workspace.ID, + AutostartSchedule: sql.NullString{ + String: autostartSchedule.String(), + Valid: true, + }, + }) + require.NoError(t, err) + }, + assertWorkspace: func(t *testing.T, ctx context.Context, db database.Store, now time.Time, isPrebuild bool, workspace database.Workspace) { + if isPrebuild { + // The unclaimed prebuild should have an empty NextStartAt + require.True(t, workspace.NextStartAt.Time.IsZero()) + } else { + // The claimed workspace should have its NextStartAt updated + require.False(t, workspace.NextStartAt.Time.IsZero()) + } + }, + }, + } + + for _, tc := range cases { + tc := tc + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + + clock := quartz.NewMock(t) + clock.Set(dbtime.Now()) + + // Setup + var ( + logger = slogtest.Make(t, &slogtest.Options{IgnoreErrors: true}).Leveled(slog.LevelDebug) + db, _ = dbtestutil.NewDB(t, dbtestutil.WithDumpOnFailure()) + ctx = testutil.Context(t, testutil.WaitLong) + user = dbgen.User(t, db, database.User{}) + ) + + // Setup the template schedule store + notifyEnq := notifications.NewNoopEnqueuer() + const userQuietHoursSchedule = "CRON_TZ=UTC 0 0 * * *" // midnight UTC + userQuietHoursStore, err := schedule.NewEnterpriseUserQuietHoursScheduleStore(userQuietHoursSchedule, true) + require.NoError(t, err) + userQuietHoursStorePtr := &atomic.Pointer[agplschedule.UserQuietHoursScheduleStore]{} + userQuietHoursStorePtr.Store(&userQuietHoursStore) + templateScheduleStore := schedule.NewEnterpriseTemplateScheduleStore(userQuietHoursStorePtr, notifyEnq, logger, clock) + + // Given: a template and a template version with preset and a prebuilt workspace + presetID := uuid.New() + org := dbfake.Organization(t, db).Do() + tv := dbfake.TemplateVersion(t, db).Seed(database.TemplateVersion{ + OrganizationID: org.Org.ID, + CreatedBy: user.ID, + }).Preset(database.TemplateVersionPreset{ + ID: presetID, + DesiredInstances: sql.NullInt32{ + Int32: 1, + Valid: true, + }, + }).Do() + workspaceBuild := dbfake.WorkspaceBuild(t, db, database.WorkspaceTable{ + OwnerID: database.PrebuildsSystemUserID, + TemplateID: tv.Template.ID, + OrganizationID: tv.Template.OrganizationID, + }).Seed(database.WorkspaceBuild{ + TemplateVersionID: tv.TemplateVersion.ID, + TemplateVersionPresetID: uuid.NullUUID{ + UUID: presetID, + Valid: true, + }, + }).WithAgent(func(agent []*proto.Agent) []*proto.Agent { + return agent + }).Do() + + // Mark the prebuilt workspace's agent as ready so the prebuild can be claimed + // nolint:gocritic + agentCtx := dbauthz.AsSystemRestricted(testutil.Context(t, testutil.WaitLong)) + agent, err := db.GetWorkspaceAgentAndLatestBuildByAuthToken(agentCtx, uuid.MustParse(workspaceBuild.AgentToken)) + require.NoError(t, err) + err = db.UpdateWorkspaceAgentLifecycleStateByID(agentCtx, database.UpdateWorkspaceAgentLifecycleStateByIDParams{ + ID: agent.WorkspaceAgent.ID, + LifecycleState: database.WorkspaceAgentLifecycleStateReady, + }) + require.NoError(t, err) + + // Given: a prebuilt workspace + prebuild, err := db.GetWorkspaceByID(ctx, workspaceBuild.Workspace.ID) + require.NoError(t, err) + tc.assertWorkspace(t, ctx, db, clock.Now(), true, prebuild) + + // When: the template schedule is updated + _, err = templateScheduleStore.Set(ctx, db, tv.Template, tc.templateSchedule) + require.NoError(t, err) + + // Then: lifecycle parameters must remain unset while the prebuild is unclaimed + prebuild, err = db.GetWorkspaceByID(ctx, workspaceBuild.Workspace.ID) + require.NoError(t, err) + tc.assertWorkspace(t, ctx, db, clock.Now(), true, prebuild) + + // Given: the prebuilt workspace is claimed by a user + claimedWorkspace := dbgen.ClaimPrebuild( + t, db, + clock.Now(), + user.ID, + "claimedWorkspace-autostop", + presetID, + sql.NullString{}, + sql.NullTime{}, + sql.NullInt64{}) + require.Equal(t, prebuild.ID, claimedWorkspace.ID) + + // Given: the workspace level configurations are properly set in order to ensure the + // lifecycle parameters are updated + tc.workspaceUpdate(t, ctx, db, clock.Now(), claimedWorkspace) + + // When: the template schedule is updated + _, err = templateScheduleStore.Set(ctx, db, tv.Template, tc.templateSchedule) + require.NoError(t, err) + + // Then: the workspace should have its lifecycle parameters updated + workspace, err := db.GetWorkspaceByID(ctx, claimedWorkspace.ID) + require.NoError(t, err) + tc.assertWorkspace(t, ctx, db, clock.Now(), false, workspace) + }) + } +} + func must[V any](v V, err error) V { if err != nil { panic(err) diff --git a/enterprise/coderd/templates.go b/enterprise/coderd/templates.go index 4514ba928e21a..07323dce3c7e6 100644 --- a/enterprise/coderd/templates.go +++ b/enterprise/coderd/templates.go @@ -1,7 +1,6 @@ package coderd import ( - "context" "database/sql" "fmt" "net/http" @@ -15,6 +14,7 @@ import ( "github.com/coder/coder/v2/coderd/database/dbauthz" "github.com/coder/coder/v2/coderd/httpapi" "github.com/coder/coder/v2/coderd/httpmw" + "github.com/coder/coder/v2/coderd/rbac/acl" "github.com/coder/coder/v2/coderd/rbac/policy" "github.com/coder/coder/v2/coderd/util/slice" "github.com/coder/coder/v2/codersdk" @@ -184,7 +184,7 @@ func (api *API) templateACL(rw http.ResponseWriter, r *http.Request) { // @Produce json // @Tags Enterprise // @Param template path string true "Template ID" format(uuid) -// @Param request body codersdk.UpdateTemplateACL true "Update template request" +// @Param request body codersdk.UpdateTemplateACL true "Update template ACL request" // @Success 200 {object} codersdk.Response // @Router /templates/{template}/acl [patch] func (api *API) patchTemplateACL(rw http.ResponseWriter, r *http.Request) { @@ -208,13 +208,10 @@ func (api *API) patchTemplateACL(rw http.ResponseWriter, r *http.Request) { return } - validErrs := validateTemplateACLPerms(ctx, api.Database, req.UserPerms, "user_perms", true) - validErrs = append(validErrs, - validateTemplateACLPerms(ctx, api.Database, req.GroupPerms, "group_perms", false)...) - + validErrs := acl.Validate(ctx, api.Database, TemplateACLUpdateValidator(req)) if len(validErrs) > 0 { httpapi.Write(ctx, rw, http.StatusBadRequest, codersdk.Response{ - Message: "Invalid request to update template metadata!", + Message: "Invalid request to update template ACL", Validations: validErrs, }) return @@ -227,28 +224,20 @@ func (api *API) patchTemplateACL(rw http.ResponseWriter, r *http.Request) { return xerrors.Errorf("get template by ID: %w", err) } - if len(req.UserPerms) > 0 { - for id, role := range req.UserPerms { - // A user with an empty string implies - // deletion. - if role == "" { - delete(template.UserACL, id) - continue - } - template.UserACL[id] = db2sdk.TemplateRoleActions(role) + for id, role := range req.UserPerms { + if role == codersdk.TemplateRoleDeleted { + delete(template.UserACL, id) + continue } + template.UserACL[id] = db2sdk.TemplateRoleActions(role) } - if len(req.GroupPerms) > 0 { - for id, role := range req.GroupPerms { - // An id with an empty string implies - // deletion. - if role == "" { - delete(template.GroupACL, id) - continue - } - template.GroupACL[id] = db2sdk.TemplateRoleActions(role) + for id, role := range req.GroupPerms { + if role == codersdk.TemplateRoleDeleted { + delete(template.GroupACL, id) + continue } + template.GroupACL[id] = db2sdk.TemplateRoleActions(role) } err = tx.UpdateTemplateACLByID(ctx, database.UpdateTemplateACLByIDParams{ @@ -277,42 +266,31 @@ func (api *API) patchTemplateACL(rw http.ResponseWriter, r *http.Request) { }) } -// nolint TODO fix stupid flag. -func validateTemplateACLPerms(ctx context.Context, db database.Store, perms map[string]codersdk.TemplateRole, field string, isUser bool) []codersdk.ValidationError { - // Validate requires full read access to users and groups - // nolint:gocritic - ctx = dbauthz.AsSystemRestricted(ctx) - var validErrs []codersdk.ValidationError - for k, v := range perms { - if err := validateTemplateRole(v); err != nil { - validErrs = append(validErrs, codersdk.ValidationError{Field: field, Detail: err.Error()}) - continue - } +type TemplateACLUpdateValidator codersdk.UpdateTemplateACL - id, err := uuid.Parse(k) - if err != nil { - validErrs = append(validErrs, codersdk.ValidationError{Field: field, Detail: "ID " + k + "must be a valid UUID."}) - continue - } +var ( + templateACLUpdateUsersFieldName = "user_perms" + templateACLUpdateGroupsFieldName = "group_perms" +) - if isUser { - // This could get slow if we get a ton of user perm updates. - _, err = db.GetUserByID(ctx, id) - if err != nil { - validErrs = append(validErrs, codersdk.ValidationError{Field: field, Detail: fmt.Sprintf("Failed to find resource with ID %q: %v", k, err.Error())}) - continue - } - } else { - // This could get slow if we get a ton of group perm updates. - _, err = db.GetGroupByID(ctx, id) - if err != nil { - validErrs = append(validErrs, codersdk.ValidationError{Field: field, Detail: fmt.Sprintf("Failed to find resource with ID %q: %v", k, err.Error())}) - continue - } - } +// TemplateACLUpdateValidator implements acl.UpdateValidator[codersdk.TemplateRole] +var _ acl.UpdateValidator[codersdk.TemplateRole] = TemplateACLUpdateValidator{} + +func (w TemplateACLUpdateValidator) Users() (map[string]codersdk.TemplateRole, string) { + return w.UserPerms, templateACLUpdateUsersFieldName +} + +func (w TemplateACLUpdateValidator) Groups() (map[string]codersdk.TemplateRole, string) { + return w.GroupPerms, templateACLUpdateGroupsFieldName +} + +func (TemplateACLUpdateValidator) ValidateRole(role codersdk.TemplateRole) error { + actions := db2sdk.TemplateRoleActions(role) + if len(actions) == 0 && role != codersdk.TemplateRoleDeleted { + return xerrors.Errorf("role %q is not a valid template role", role) } - return validErrs + return nil } func convertTemplateUsers(tus []database.TemplateUser, orgIDsByUserIDs map[uuid.UUID][]uuid.UUID) []codersdk.TemplateUser { @@ -328,15 +306,6 @@ func convertTemplateUsers(tus []database.TemplateUser, orgIDsByUserIDs map[uuid. return users } -func validateTemplateRole(role codersdk.TemplateRole) error { - actions := db2sdk.TemplateRoleActions(role) - if len(actions) == 0 && role != codersdk.TemplateRoleDeleted { - return xerrors.Errorf("role %q is not a valid Template role", role) - } - - return nil -} - func convertToTemplateRole(actions []policy.Action) codersdk.TemplateRole { switch { case len(actions) == 2 && slice.SameElements(actions, []policy.Action{policy.ActionUse, policy.ActionRead}): diff --git a/enterprise/coderd/templates_test.go b/enterprise/coderd/templates_test.go index 6c7a20f85a642..e5eafa82f8d1c 100644 --- a/enterprise/coderd/templates_test.go +++ b/enterprise/coderd/templates_test.go @@ -70,8 +70,7 @@ func TestTemplates(t *testing.T) { _ = coderdtest.CreateWorkspace(t, otherClient, secondTemplate.ID) - ctx, cancel := context.WithTimeout(context.Background(), testutil.WaitLong) - defer cancel() + ctx := testutil.Context(t, testutil.WaitLong) updated, err := client.UpdateTemplateMeta(ctx, template.ID, codersdk.UpdateTemplateMeta{ DeprecationMessage: ptr.Ref("Stop using this template"), @@ -185,8 +184,7 @@ func TestTemplates(t *testing.T) { ws, err := client.Workspace(context.Background(), ws.ID) require.NoError(t, err) - ctx, cancel := context.WithTimeout(context.Background(), testutil.WaitLong) - defer cancel() + ctx := testutil.Context(t, testutil.WaitLong) // OK var level codersdk.WorkspaceAgentPortShareLevel = codersdk.WorkspaceAgentPortShareLevelPublic @@ -261,9 +259,9 @@ func TestTemplates(t *testing.T) { ctx := testutil.Context(t, testutil.WaitLong) updated, err := anotherClient.UpdateTemplateMeta(ctx, template.ID, codersdk.UpdateTemplateMeta{ Name: template.Name, - DisplayName: template.DisplayName, - Description: template.Description, - Icon: template.Icon, + DisplayName: &template.DisplayName, + Description: &template.Description, + Icon: &template.Icon, AutostartRequirement: &codersdk.TemplateAutostartRequirement{ DaysOfWeek: []string{"monday", "saturday"}, }, @@ -278,9 +276,9 @@ func TestTemplates(t *testing.T) { // Ensure a missing field is a noop updated, err = anotherClient.UpdateTemplateMeta(ctx, template.ID, codersdk.UpdateTemplateMeta{ Name: template.Name, - DisplayName: template.DisplayName, - Description: template.Description, - Icon: template.Icon + "something", + DisplayName: &template.DisplayName, + Description: &template.Description, + Icon: ptr.Ref(template.Icon + "something"), }) require.NoError(t, err) require.Equal(t, []string{"monday", "saturday"}, updated.AutostartRequirement.DaysOfWeek) @@ -315,9 +313,9 @@ func TestTemplates(t *testing.T) { ctx := testutil.Context(t, testutil.WaitLong) _, err := anotherClient.UpdateTemplateMeta(ctx, template.ID, codersdk.UpdateTemplateMeta{ Name: template.Name, - DisplayName: template.DisplayName, - Description: template.Description, - Icon: template.Icon, + DisplayName: &template.DisplayName, + Description: &template.Description, + Icon: &template.Icon, AutostartRequirement: &codersdk.TemplateAutostartRequirement{ DaysOfWeek: []string{"foobar", "saturday"}, }, @@ -351,9 +349,9 @@ func TestTemplates(t *testing.T) { ctx := context.Background() updated, err := anotherClient.UpdateTemplateMeta(ctx, template.ID, codersdk.UpdateTemplateMeta{ Name: template.Name, - DisplayName: template.DisplayName, - Description: template.Description, - Icon: template.Icon, + DisplayName: &template.DisplayName, + Description: &template.Description, + Icon: &template.Icon, AllowUserCancelWorkspaceJobs: template.AllowUserCancelWorkspaceJobs, DefaultTTLMillis: time.Hour.Milliseconds(), AutostopRequirement: &codersdk.TemplateAutostopRequirement{ @@ -405,9 +403,9 @@ func TestTemplates(t *testing.T) { updated, err := anotherClient.UpdateTemplateMeta(ctx, template.ID, codersdk.UpdateTemplateMeta{ Name: template.Name, - DisplayName: template.DisplayName, - Description: template.Description, - Icon: template.Icon, + DisplayName: &template.DisplayName, + Description: &template.Description, + Icon: &template.Icon, AllowUserCancelWorkspaceJobs: template.AllowUserCancelWorkspaceJobs, TimeTilDormantMillis: inactivityTTL.Milliseconds(), FailureTTLMillis: failureTTL.Milliseconds(), @@ -474,9 +472,9 @@ func TestTemplates(t *testing.T) { t.Run(c.Name, func(t *testing.T) { _, err := anotherClient.UpdateTemplateMeta(ctx, template.ID, codersdk.UpdateTemplateMeta{ Name: template.Name, - DisplayName: template.DisplayName, - Description: template.Description, - Icon: template.Icon, + DisplayName: &template.DisplayName, + Description: &template.Description, + Icon: &template.Icon, AllowUserCancelWorkspaceJobs: template.AllowUserCancelWorkspaceJobs, TimeTilDormantMillis: c.TimeTilDormantMS, FailureTTLMillis: c.FailureTTLMS, @@ -704,8 +702,7 @@ func TestTemplates(t *testing.T) { coderdtest.AwaitTemplateVersionJobCompleted(t, client, version.ID) require.True(t, template.RequireActiveVersion) - ctx, cancel := context.WithTimeout(context.Background(), testutil.WaitLong) - defer cancel() + ctx := testutil.Context(t, testutil.WaitLong) // Update the field and assert it persists. updatedTemplate, err := anotherClient.UpdateTemplateMeta(ctx, template.ID, codersdk.UpdateTemplateMeta{ @@ -761,9 +758,6 @@ func TestTemplates(t *testing.T) { }) require.NoError(t, err) - ctx, cancel := context.WithTimeout(context.Background(), testutil.WaitLong) - defer cancel() - _, err = owner.Template(ctx, template.ID) require.NoError(t, err) }) @@ -932,8 +926,7 @@ func TestTemplateACL(t *testing.T) { version := coderdtest.CreateTemplateVersion(t, client, user.OrganizationID, nil) template := coderdtest.CreateTemplate(t, client, user.OrganizationID, version.ID) - ctx, cancel := context.WithTimeout(context.Background(), testutil.WaitLong) - defer cancel() + ctx := testutil.Context(t, testutil.WaitLong) acl, err := anotherClient.TemplateACL(ctx, template.ID) require.NoError(t, err) @@ -955,8 +948,7 @@ func TestTemplateACL(t *testing.T) { version := coderdtest.CreateTemplateVersion(t, client, user.OrganizationID, nil) template := coderdtest.CreateTemplate(t, client, user.OrganizationID, version.ID) - ctx, cancel := context.WithTimeout(context.Background(), testutil.WaitLong) - defer cancel() + ctx := testutil.Context(t, testutil.WaitLong) //nolint:gocritic // non-template-admin cannot update template acl acl, err := client.TemplateACL(ctx, template.ID) @@ -1004,8 +996,7 @@ func TestTemplateACL(t *testing.T) { version := coderdtest.CreateTemplateVersion(t, client, admin.OrganizationID, nil) template := coderdtest.CreateTemplate(t, client, admin.OrganizationID, version.ID) - ctx, cancel := context.WithTimeout(context.Background(), testutil.WaitLong) - defer cancel() + ctx := testutil.Context(t, testutil.WaitLong) //nolint:gocritic // non-template-admin cannot get template acl acl, err := client.TemplateACL(ctx, template.ID) @@ -1013,9 +1004,9 @@ func TestTemplateACL(t *testing.T) { require.Equal(t, 1, len(acl.Groups)) _, err = client.UpdateTemplateMeta(ctx, template.ID, codersdk.UpdateTemplateMeta{ Name: template.Name, - DisplayName: template.DisplayName, - Description: template.Description, - Icon: template.Icon, + DisplayName: &template.DisplayName, + Description: &template.Description, + Icon: &template.Icon, AllowUserCancelWorkspaceJobs: template.AllowUserCancelWorkspaceJobs, DisableEveryoneGroupAccess: true, }) @@ -1267,8 +1258,7 @@ func TestUpdateTemplateACL(t *testing.T) { version := coderdtest.CreateTemplateVersion(t, client, user.OrganizationID, nil) template := coderdtest.CreateTemplate(t, client, user.OrganizationID, version.ID) - ctx, cancel := context.WithTimeout(context.Background(), testutil.WaitLong) - defer cancel() + ctx := testutil.Context(t, testutil.WaitLong) err := anotherClient.UpdateTemplateACL(ctx, template.ID, codersdk.UpdateTemplateACL{ UserPerms: map[string]codersdk.TemplateRole{ @@ -1359,8 +1349,7 @@ func TestUpdateTemplateACL(t *testing.T) { }, } - ctx, cancel := context.WithTimeout(context.Background(), testutil.WaitLong) - defer cancel() + ctx := testutil.Context(t, testutil.WaitLong) err := anotherClient.UpdateTemplateACL(ctx, template.ID, req) require.NoError(t, err) @@ -1413,13 +1402,40 @@ func TestUpdateTemplateACL(t *testing.T) { template := coderdtest.CreateTemplate(t, client, user.OrganizationID, version.ID) req := codersdk.UpdateTemplateACL{ UserPerms: map[string]codersdk.TemplateRole{ - "hi": "admin", + "hi": codersdk.TemplateRoleAdmin, + }, + } + + ctx := testutil.Context(t, testutil.WaitLong) + + //nolint:gocritic // Testing ACL validation + err := client.UpdateTemplateACL(ctx, template.ID, req) + require.Error(t, err) + cerr, _ := codersdk.AsError(err) + require.Equal(t, http.StatusBadRequest, cerr.StatusCode()) + }) + + // We should report invalid UUIDs as errors + t.Run("DeleteRoleForInvalidUUID", func(t *testing.T) { + t.Parallel() + + client, user := coderdenttest.New(t, &coderdenttest.Options{LicenseOptions: &coderdenttest.LicenseOptions{ + Features: license.Features{ + codersdk.FeatureTemplateRBAC: 1, + }, + }}) + + version := coderdtest.CreateTemplateVersion(t, client, user.OrganizationID, nil) + template := coderdtest.CreateTemplate(t, client, user.OrganizationID, version.ID) + req := codersdk.UpdateTemplateACL{ + UserPerms: map[string]codersdk.TemplateRole{ + "hi": codersdk.TemplateRoleDeleted, }, } ctx := testutil.Context(t, testutil.WaitLong) - //nolint:gocritic // we're testing invalid UUID so testing RBAC is not relevant here. + //nolint:gocritic // Testing ACL validation err := client.UpdateTemplateACL(ctx, template.ID, req) require.Error(t, err) cerr, _ := codersdk.AsError(err) @@ -1445,13 +1461,75 @@ func TestUpdateTemplateACL(t *testing.T) { ctx := testutil.Context(t, testutil.WaitLong) - //nolint:gocritic // we're testing invalid user so testing RBAC is not relevant here. + //nolint:gocritic // Testing ACL validation err := client.UpdateTemplateACL(ctx, template.ID, req) require.Error(t, err) cerr, _ := codersdk.AsError(err) require.Equal(t, http.StatusBadRequest, cerr.StatusCode()) }) + // We should allow the special "Delete" role for valid UUIDs that don't + // correspond to a valid user, because the user might have been deleted. + t.Run("DeleteRoleForDeletedUser", func(t *testing.T) { + t.Parallel() + + client, user := coderdenttest.New(t, &coderdenttest.Options{LicenseOptions: &coderdenttest.LicenseOptions{ + Features: license.Features{ + codersdk.FeatureTemplateRBAC: 1, + }, + }}) + + version := coderdtest.CreateTemplateVersion(t, client, user.OrganizationID, nil) + template := coderdtest.CreateTemplate(t, client, user.OrganizationID, version.ID) + + ctx := testutil.Context(t, testutil.WaitLong) + + _, deletedUser := coderdtest.CreateAnotherUser(t, client, user.OrganizationID) + //nolint:gocritic // Can't delete yourself + err := client.DeleteUser(ctx, deletedUser.ID) + require.NoError(t, err) + + req := codersdk.UpdateTemplateACL{ + UserPerms: map[string]codersdk.TemplateRole{ + deletedUser.ID.String(): codersdk.TemplateRoleDeleted, + }, + } + //nolint:gocritic // Testing ACL validation + err = client.UpdateTemplateACL(ctx, template.ID, req) + require.NoError(t, err) + }) + + t.Run("DeletedUser", func(t *testing.T) { + t.Parallel() + + client, user := coderdenttest.New(t, &coderdenttest.Options{LicenseOptions: &coderdenttest.LicenseOptions{ + Features: license.Features{ + codersdk.FeatureTemplateRBAC: 1, + }, + }}) + + version := coderdtest.CreateTemplateVersion(t, client, user.OrganizationID, nil) + template := coderdtest.CreateTemplate(t, client, user.OrganizationID, version.ID) + + ctx := testutil.Context(t, testutil.WaitLong) + + _, deletedUser := coderdtest.CreateAnotherUser(t, client, user.OrganizationID) + //nolint:gocritic // Can't delete yourself + err := client.DeleteUser(ctx, deletedUser.ID) + require.NoError(t, err) + + req := codersdk.UpdateTemplateACL{ + UserPerms: map[string]codersdk.TemplateRole{ + deletedUser.ID.String(): codersdk.TemplateRoleAdmin, + }, + } + //nolint:gocritic // Testing ACL validation + err = client.UpdateTemplateACL(ctx, template.ID, req) + require.Error(t, err) + cerr, _ := codersdk.AsError(err) + require.Equal(t, http.StatusBadRequest, cerr.StatusCode()) + }) + t.Run("InvalidRole", func(t *testing.T) { t.Parallel() @@ -1472,7 +1550,7 @@ func TestUpdateTemplateACL(t *testing.T) { ctx := testutil.Context(t, testutil.WaitLong) - //nolint:gocritic // we're testing invalid role so testing RBAC is not relevant here. + //nolint:gocritic // Testing ACL validation err := client.UpdateTemplateACL(ctx, template.ID, req) require.Error(t, err) cerr, _ := codersdk.AsError(err) @@ -1590,8 +1668,7 @@ func TestUpdateTemplateACL(t *testing.T) { version := coderdtest.CreateTemplateVersion(t, client, user.OrganizationID, nil) template := coderdtest.CreateTemplate(t, client, user.OrganizationID, version.ID) - ctx, cancel := context.WithTimeout(context.Background(), testutil.WaitLong) - defer cancel() + ctx := testutil.Context(t, testutil.WaitLong) acl, err := anotherClient.TemplateACL(ctx, template.ID) require.NoError(t, err) @@ -1680,8 +1757,7 @@ func TestUpdateTemplateACL(t *testing.T) { version := coderdtest.CreateTemplateVersion(t, client, user.OrganizationID, nil) template := coderdtest.CreateTemplate(t, client, user.OrganizationID, version.ID) - ctx, cancel := context.WithTimeout(context.Background(), testutil.WaitLong) - defer cancel() + ctx := testutil.Context(t, testutil.WaitLong) acl, err := anotherClient.TemplateACL(ctx, template.ID) require.NoError(t, err) diff --git a/enterprise/coderd/usage/inserter.go b/enterprise/coderd/usage/inserter.go new file mode 100644 index 0000000000000..f3566595a181f --- /dev/null +++ b/enterprise/coderd/usage/inserter.go @@ -0,0 +1,68 @@ +package usage + +import ( + "context" + "encoding/json" + + "github.com/google/uuid" + "golang.org/x/xerrors" + + "github.com/coder/coder/v2/coderd/database" + "github.com/coder/coder/v2/coderd/database/dbtime" + agplusage "github.com/coder/coder/v2/coderd/usage" + "github.com/coder/coder/v2/coderd/usage/usagetypes" + "github.com/coder/quartz" +) + +// dbInserter collects usage events and stores them in the database for +// publishing. +type dbInserter struct { + clock quartz.Clock +} + +var _ agplusage.Inserter = &dbInserter{} + +// NewDBInserter creates a new database-backed usage event inserter. +func NewDBInserter(opts ...InserterOption) agplusage.Inserter { + c := &dbInserter{ + clock: quartz.NewReal(), + } + for _, opt := range opts { + opt(c) + } + return c +} + +type InserterOption func(*dbInserter) + +// InserterWithClock sets the quartz clock to use for the inserter. +func InserterWithClock(clock quartz.Clock) InserterOption { + return func(c *dbInserter) { + c.clock = clock + } +} + +// InsertDiscreteUsageEvent implements agplusage.Inserter. +func (i *dbInserter) InsertDiscreteUsageEvent(ctx context.Context, tx database.Store, event usagetypes.DiscreteEvent) error { + if !event.EventType().IsDiscrete() { + return xerrors.Errorf("event type %q is not a discrete event", event.EventType()) + } + if err := event.Valid(); err != nil { + return xerrors.Errorf("invalid %q event: %w", event.EventType(), err) + } + + jsonData, err := json.Marshal(event.Fields()) + if err != nil { + return xerrors.Errorf("marshal event as JSON: %w", err) + } + + // Duplicate events are ignored by the query, so we don't need to check the + // error. + return tx.InsertUsageEvent(ctx, database.InsertUsageEventParams{ + // Always generate a new UUID for discrete events. + ID: uuid.New().String(), + EventType: string(event.EventType()), + EventData: jsonData, + CreatedAt: dbtime.Time(i.clock.Now()), + }) +} diff --git a/enterprise/coderd/usage/inserter_test.go b/enterprise/coderd/usage/inserter_test.go new file mode 100644 index 0000000000000..7ac915be7a5a8 --- /dev/null +++ b/enterprise/coderd/usage/inserter_test.go @@ -0,0 +1,85 @@ +package usage_test + +import ( + "testing" + "time" + + "github.com/google/uuid" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "go.uber.org/mock/gomock" + + "github.com/coder/coder/v2/coderd/database" + "github.com/coder/coder/v2/coderd/database/dbmock" + "github.com/coder/coder/v2/coderd/database/dbtime" + "github.com/coder/coder/v2/coderd/usage/usagetypes" + "github.com/coder/coder/v2/enterprise/coderd/usage" + "github.com/coder/coder/v2/testutil" + "github.com/coder/quartz" +) + +func TestInserter(t *testing.T) { + t.Parallel() + + t.Run("OK", func(t *testing.T) { + t.Parallel() + + ctx := testutil.Context(t, testutil.WaitLong) + ctrl := gomock.NewController(t) + db := dbmock.NewMockStore(ctrl) + clock := quartz.NewMock(t) + inserter := usage.NewDBInserter(usage.InserterWithClock(clock)) + + now := dbtime.Now() + events := []struct { + time time.Time + event usagetypes.DiscreteEvent + }{ + { + time: now, + event: usagetypes.DCManagedAgentsV1{ + Count: 1, + }, + }, + { + time: now.Add(1 * time.Minute), + event: usagetypes.DCManagedAgentsV1{ + Count: 2, + }, + }, + } + + for _, e := range events { + eventJSON := jsoninate(t, e.event) + db.EXPECT().InsertUsageEvent(gomock.Any(), gomock.Any()).DoAndReturn( + func(ctx interface{}, params database.InsertUsageEventParams) error { + _, err := uuid.Parse(params.ID) + assert.NoError(t, err) + assert.Equal(t, e.event.EventType(), usagetypes.UsageEventType(params.EventType)) + assert.JSONEq(t, eventJSON, string(params.EventData)) + assert.Equal(t, e.time, params.CreatedAt) + return nil + }, + ).Times(1) + + clock.Set(e.time) + err := inserter.InsertDiscreteUsageEvent(ctx, db, e.event) + require.NoError(t, err) + } + }) + + t.Run("InvalidEvent", func(t *testing.T) { + t.Parallel() + + ctx := testutil.Context(t, testutil.WaitLong) + ctrl := gomock.NewController(t) + db := dbmock.NewMockStore(ctrl) + + // We should get an error if the event is invalid. + inserter := usage.NewDBInserter() + err := inserter.InsertDiscreteUsageEvent(ctx, db, usagetypes.DCManagedAgentsV1{ + Count: 0, // invalid + }) + assert.ErrorContains(t, err, `invalid "dc_managed_agents_v1" event: count must be greater than 0`) + }) +} diff --git a/enterprise/coderd/usage/publisher.go b/enterprise/coderd/usage/publisher.go new file mode 100644 index 0000000000000..ce38f9a24a925 --- /dev/null +++ b/enterprise/coderd/usage/publisher.go @@ -0,0 +1,433 @@ +package usage + +import ( + "bytes" + "context" + "crypto/ed25519" + "encoding/json" + "fmt" + "io" + "net/http" + "time" + + "github.com/google/uuid" + "golang.org/x/xerrors" + + "cdr.dev/slog" + "github.com/coder/coder/v2/buildinfo" + "github.com/coder/coder/v2/coderd/database" + "github.com/coder/coder/v2/coderd/database/dbauthz" + "github.com/coder/coder/v2/coderd/database/dbtime" + "github.com/coder/coder/v2/coderd/pproflabel" + "github.com/coder/coder/v2/coderd/usage/usagetypes" + "github.com/coder/coder/v2/cryptorand" + "github.com/coder/coder/v2/enterprise/coderd/license" + "github.com/coder/quartz" +) + +const ( + tallymanURL = "https://tallyman-prod.coder.com" + tallymanIngestURLV1 = tallymanURL + "/api/v1/events/ingest" + + tallymanPublishInitialMinimumDelay = 5 * time.Minute + // Chosen to be a prime number and not a multiple of 5 like many other + // recurring tasks. + tallymanPublishInterval = 17 * time.Minute + tallymanPublishTimeout = 30 * time.Second + tallymanPublishBatchSize = 100 +) + +var errUsagePublishingDisabled = xerrors.New("usage publishing is not enabled by any license") + +// Publisher publishes usage events ***somewhere***. +type Publisher interface { + // Close closes the publisher and waits for it to finish. + io.Closer + // Start starts the publisher. It must only be called once. + Start() error +} + +type tallymanPublisher struct { + ctx context.Context + ctxCancel context.CancelFunc + log slog.Logger + db database.Store + licenseKeys map[string]ed25519.PublicKey + done chan struct{} + + // Configured with options: + ingestURL string + httpClient *http.Client + clock quartz.Clock + initialDelay time.Duration +} + +var _ Publisher = &tallymanPublisher{} + +// NewTallymanPublisher creates a Publisher that publishes usage events to +// Coder's Tallyman service. +func NewTallymanPublisher(ctx context.Context, log slog.Logger, db database.Store, keys map[string]ed25519.PublicKey, opts ...TallymanPublisherOption) Publisher { + ctx, cancel := context.WithCancel(ctx) + ctx = dbauthz.AsUsagePublisher(ctx) //nolint:gocritic // we intentionally want to be able to process usage events + + publisher := &tallymanPublisher{ + ctx: ctx, + ctxCancel: cancel, + log: log, + db: db, + licenseKeys: keys, + done: make(chan struct{}), + + ingestURL: tallymanIngestURLV1, + httpClient: http.DefaultClient, + clock: quartz.NewReal(), + } + for _, opt := range opts { + opt(publisher) + } + return publisher +} + +type TallymanPublisherOption func(*tallymanPublisher) + +// PublisherWithHTTPClient sets the HTTP client to use for publishing usage events. +func PublisherWithHTTPClient(httpClient *http.Client) TallymanPublisherOption { + return func(p *tallymanPublisher) { + if httpClient == nil { + httpClient = http.DefaultClient + } + p.httpClient = httpClient + } +} + +// PublisherWithClock sets the clock to use for publishing usage events. +func PublisherWithClock(clock quartz.Clock) TallymanPublisherOption { + return func(p *tallymanPublisher) { + p.clock = clock + } +} + +// PublisherWithIngestURL sets the ingest URL to use for publishing usage +// events. +func PublisherWithIngestURL(ingestURL string) TallymanPublisherOption { + return func(p *tallymanPublisher) { + p.ingestURL = ingestURL + } +} + +// PublisherWithInitialDelay sets the initial delay for the publisher. +func PublisherWithInitialDelay(initialDelay time.Duration) TallymanPublisherOption { + return func(p *tallymanPublisher) { + p.initialDelay = initialDelay + } +} + +// Start implements Publisher. +func (p *tallymanPublisher) Start() error { + ctx := p.ctx + deploymentID, err := p.db.GetDeploymentID(ctx) + if err != nil { + return xerrors.Errorf("get deployment ID: %w", err) + } + deploymentUUID, err := uuid.Parse(deploymentID) + if err != nil { + return xerrors.Errorf("parse deployment ID %q: %w", deploymentID, err) + } + + if p.initialDelay <= 0 { + // Pick a random time between tallymanPublishInitialMinimumDelay and + // tallymanPublishInterval. + maxPlusDelay := tallymanPublishInterval - tallymanPublishInitialMinimumDelay + plusDelay, err := cryptorand.Int63n(int64(maxPlusDelay)) + if err != nil { + return xerrors.Errorf("could not generate random start delay: %w", err) + } + p.initialDelay = tallymanPublishInitialMinimumDelay + time.Duration(plusDelay) + } + + if len(p.licenseKeys) == 0 { + return xerrors.New("no license keys provided") + } + + pproflabel.Go(ctx, pproflabel.Service(pproflabel.ServiceTallymanPublisher), func(ctx context.Context) { + p.publishLoop(ctx, deploymentUUID) + }) + return nil +} + +func (p *tallymanPublisher) publishLoop(ctx context.Context, deploymentID uuid.UUID) { + defer close(p.done) + + // Start the ticker with the initial delay. We will reset it to the interval + // after the first tick. + ticker := p.clock.NewTicker(p.initialDelay) + defer ticker.Stop() + + for { + select { + case <-ctx.Done(): + return + case <-ticker.C: + } + + err := p.publish(ctx, deploymentID) + if err != nil { + p.log.Warn(ctx, "publish usage events to tallyman", slog.Error(err)) + } + ticker.Reset(tallymanPublishInterval) + } +} + +// publish publishes usage events to Tallyman in a loop until there is an error +// (or any rejection) or there are no more events to publish. +func (p *tallymanPublisher) publish(ctx context.Context, deploymentID uuid.UUID) error { + for { + publishCtx, publishCtxCancel := context.WithTimeout(ctx, tallymanPublishTimeout) + accepted, err := p.publishOnce(publishCtx, deploymentID) + publishCtxCancel() + if err != nil { + return xerrors.Errorf("publish usage events to tallyman: %w", err) + } + if accepted < tallymanPublishBatchSize { + // We published less than the batch size, so we're done. + return nil + } + } +} + +// publishOnce publishes up to tallymanPublishBatchSize usage events to +// tallyman. It returns the number of successfully published events. +func (p *tallymanPublisher) publishOnce(ctx context.Context, deploymentID uuid.UUID) (int, error) { + licenseJwt, err := p.getBestLicenseJWT(ctx) + if xerrors.Is(err, errUsagePublishingDisabled) { + return 0, nil + } else if err != nil { + return 0, xerrors.Errorf("find usage publishing license: %w", err) + } + + events, err := p.db.SelectUsageEventsForPublishing(ctx, dbtime.Time(p.clock.Now())) + if err != nil { + return 0, xerrors.Errorf("select usage events for publishing: %w", err) + } + if len(events) == 0 { + // No events to publish. + return 0, nil + } + + var ( + eventIDs = make(map[string]struct{}) + tallymanReq = usagetypes.TallymanV1IngestRequest{ + Events: make([]usagetypes.TallymanV1IngestEvent, 0, len(events)), + } + ) + for _, event := range events { + eventIDs[event.ID] = struct{}{} + eventType := usagetypes.UsageEventType(event.EventType) + if !eventType.Valid() { + // This should never happen due to the check constraint in the + // database. + return 0, xerrors.Errorf("event %q has an invalid event type %q", event.ID, event.EventType) + } + tallymanReq.Events = append(tallymanReq.Events, usagetypes.TallymanV1IngestEvent{ + ID: event.ID, + EventType: eventType, + EventData: event.EventData, + CreatedAt: event.CreatedAt, + }) + } + if len(eventIDs) != len(events) { + // This should never happen due to the unique constraint in the + // database. + return 0, xerrors.Errorf("duplicate event IDs found in events for publishing") + } + + resp, err := p.sendPublishRequest(ctx, deploymentID, licenseJwt, tallymanReq) + allFailed := err != nil + if err != nil { + p.log.Warn(ctx, "failed to send publish request to tallyman", slog.F("count", len(events)), slog.Error(err)) + // Fake a response with all events temporarily rejected. + resp = usagetypes.TallymanV1IngestResponse{ + AcceptedEvents: []usagetypes.TallymanV1IngestAcceptedEvent{}, + RejectedEvents: make([]usagetypes.TallymanV1IngestRejectedEvent, len(events)), + } + for i, event := range events { + resp.RejectedEvents[i] = usagetypes.TallymanV1IngestRejectedEvent{ + ID: event.ID, + Message: fmt.Sprintf("failed to publish to tallyman: %v", err), + Permanent: false, + } + } + } else { + p.log.Debug(ctx, "published usage events to tallyman", slog.F("accepted", len(resp.AcceptedEvents)), slog.F("rejected", len(resp.RejectedEvents))) + } + + if len(resp.AcceptedEvents)+len(resp.RejectedEvents) != len(events) { + p.log.Warn(ctx, "tallyman returned a different number of events than we sent", slog.F("sent", len(events)), slog.F("accepted", len(resp.AcceptedEvents)), slog.F("rejected", len(resp.RejectedEvents))) + } + + acceptedEvents := make(map[string]*usagetypes.TallymanV1IngestAcceptedEvent) + rejectedEvents := make(map[string]*usagetypes.TallymanV1IngestRejectedEvent) + for _, event := range resp.AcceptedEvents { + acceptedEvents[event.ID] = &event + } + for _, event := range resp.RejectedEvents { + rejectedEvents[event.ID] = &event + } + + dbUpdate := database.UpdateUsageEventsPostPublishParams{ + Now: dbtime.Time(p.clock.Now()), + IDs: make([]string, len(events)), + FailureMessages: make([]string, len(events)), + SetPublishedAts: make([]bool, len(events)), + } + for i, event := range events { + dbUpdate.IDs[i] = event.ID + if _, ok := acceptedEvents[event.ID]; ok { + dbUpdate.FailureMessages[i] = "" + dbUpdate.SetPublishedAts[i] = true + continue + } + if rejectedEvent, ok := rejectedEvents[event.ID]; ok { + dbUpdate.FailureMessages[i] = rejectedEvent.Message + dbUpdate.SetPublishedAts[i] = rejectedEvent.Permanent + continue + } + // It's not good if this path gets hit, but we'll handle it as if it + // was a temporary rejection. + dbUpdate.FailureMessages[i] = "tallyman did not include the event in the response" + dbUpdate.SetPublishedAts[i] = false + } + + // Collate rejected events into a single map of ID to failure message for + // logging. We only want to log once. + // If all events failed, we'll log the overall error above. + if !allFailed { + rejectionReasonsForLog := make(map[string]string) + for i, id := range dbUpdate.IDs { + failureMessage := dbUpdate.FailureMessages[i] + if failureMessage == "" { + continue + } + setPublishedAt := dbUpdate.SetPublishedAts[i] + if setPublishedAt { + failureMessage = "permanently rejected: " + failureMessage + } + rejectionReasonsForLog[id] = failureMessage + } + if len(rejectionReasonsForLog) > 0 { + p.log.Warn(ctx, "tallyman rejected usage events", slog.F("count", len(rejectionReasonsForLog)), slog.F("event_failure_reasons", rejectionReasonsForLog)) + } + } + + err = p.db.UpdateUsageEventsPostPublish(ctx, dbUpdate) + if err != nil { + return 0, xerrors.Errorf("update usage events post publish: %w", err) + } + + var returnErr error + if len(resp.RejectedEvents) > 0 { + returnErr = xerrors.New("some events were rejected by tallyman") + } + return len(resp.AcceptedEvents), returnErr +} + +// getBestLicenseJWT returns the best license JWT to use for the request. The +// criteria is as follows: +// - The license must be valid and active (after nbf, before exp) +// - The license must have usage publishing enabled +// The most recently issued (iat) license is chosen. +// +// If no licenses are found or none have usage publishing enabled, +// errUsagePublishingDisabled is returned. +func (p *tallymanPublisher) getBestLicenseJWT(ctx context.Context) (string, error) { + licenses, err := p.db.GetUnexpiredLicenses(ctx) + if err != nil { + return "", xerrors.Errorf("get unexpired licenses: %w", err) + } + if len(licenses) == 0 { + return "", errUsagePublishingDisabled + } + + type licenseJWTWithClaims struct { + Claims *license.Claims + Raw string + } + + var bestLicense licenseJWTWithClaims + for _, dbLicense := range licenses { + claims, err := license.ParseClaims(dbLicense.JWT, p.licenseKeys) + if err != nil { + p.log.Warn(ctx, "failed to parse license claims", slog.F("license_id", dbLicense.ID), slog.Error(err)) + continue + } + if claims.AccountType != license.AccountTypeSalesforce { + // Non-Salesforce accounts cannot be tracked as they do not have a + // trusted Salesforce opportunity ID encoded in the license. + continue + } + if !claims.PublishUsageData { + // Publishing is disabled. + continue + } + + // Otherwise, if it's issued more recently, it's the best license. + // IssuedAt is verified to be non-nil in license.ParseClaims. + if bestLicense.Claims == nil || claims.IssuedAt.Time.After(bestLicense.Claims.IssuedAt.Time) { + bestLicense = licenseJWTWithClaims{ + Claims: claims, + Raw: dbLicense.JWT, + } + } + } + + if bestLicense.Raw == "" { + return "", errUsagePublishingDisabled + } + + return bestLicense.Raw, nil +} + +func (p *tallymanPublisher) sendPublishRequest(ctx context.Context, deploymentID uuid.UUID, licenseJwt string, req usagetypes.TallymanV1IngestRequest) (usagetypes.TallymanV1IngestResponse, error) { + body, err := json.Marshal(req) + if err != nil { + return usagetypes.TallymanV1IngestResponse{}, err + } + + r, err := http.NewRequestWithContext(ctx, http.MethodPost, p.ingestURL, bytes.NewReader(body)) + if err != nil { + return usagetypes.TallymanV1IngestResponse{}, err + } + r.Header.Set("User-Agent", "coderd/"+buildinfo.Version()) + r.Header.Set(usagetypes.TallymanCoderLicenseKeyHeader, licenseJwt) + r.Header.Set(usagetypes.TallymanCoderDeploymentIDHeader, deploymentID.String()) + + resp, err := p.httpClient.Do(r) + if err != nil { + return usagetypes.TallymanV1IngestResponse{}, err + } + defer resp.Body.Close() + + if resp.StatusCode != http.StatusOK { + var errBody usagetypes.TallymanV1Response + if err := json.NewDecoder(resp.Body).Decode(&errBody); err != nil { + errBody = usagetypes.TallymanV1Response{ + Message: fmt.Sprintf("could not decode error response body: %v", err), + } + } + return usagetypes.TallymanV1IngestResponse{}, xerrors.Errorf("unexpected status code %v, error: %s", resp.StatusCode, errBody.Message) + } + + var respBody usagetypes.TallymanV1IngestResponse + if err := json.NewDecoder(resp.Body).Decode(&respBody); err != nil { + return usagetypes.TallymanV1IngestResponse{}, xerrors.Errorf("decode response body: %w", err) + } + + return respBody, nil +} + +// Close implements Publisher. +func (p *tallymanPublisher) Close() error { + p.ctxCancel() + <-p.done + return nil +} diff --git a/enterprise/coderd/usage/publisher_test.go b/enterprise/coderd/usage/publisher_test.go new file mode 100644 index 0000000000000..c104c9712e499 --- /dev/null +++ b/enterprise/coderd/usage/publisher_test.go @@ -0,0 +1,746 @@ +package usage_test + +import ( + "context" + "database/sql" + "encoding/json" + "net/http" + "net/http/httptest" + "testing" + "time" + + "github.com/google/uuid" + "github.com/prometheus/client_golang/prometheus" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "go.uber.org/goleak" + "go.uber.org/mock/gomock" + + "cdr.dev/slog/sloggers/slogtest" + "github.com/coder/coder/v2/coderd/coderdtest" + "github.com/coder/coder/v2/coderd/database" + "github.com/coder/coder/v2/coderd/database/dbauthz" + "github.com/coder/coder/v2/coderd/database/dbmock" + "github.com/coder/coder/v2/coderd/database/dbtestutil" + "github.com/coder/coder/v2/coderd/database/dbtime" + "github.com/coder/coder/v2/coderd/rbac" + "github.com/coder/coder/v2/coderd/usage/usagetypes" + "github.com/coder/coder/v2/enterprise/coderd/coderdenttest" + "github.com/coder/coder/v2/enterprise/coderd/usage" + "github.com/coder/coder/v2/testutil" + "github.com/coder/quartz" +) + +func TestMain(m *testing.M) { + goleak.VerifyTestMain(m, testutil.GoleakOptions...) +} + +// TestIntegration tests the inserter and publisher by running them with a real +// database. +func TestIntegration(t *testing.T) { + t.Parallel() + const eventCount = 3 + + ctx := testutil.Context(t, testutil.WaitLong) + log := slogtest.Make(t, nil) + db, _ := dbtestutil.NewDB(t) + + clock := quartz.NewMock(t) + deploymentID, licenseJWT := configureDeployment(ctx, t, db) + now := time.Now() + + var ( + calls int + handler func(req usagetypes.TallymanV1IngestRequest) any + ) + ingestURL := fakeServer(t, tallymanHandler(t, deploymentID.String(), licenseJWT, func(req usagetypes.TallymanV1IngestRequest) any { + calls++ + t.Logf("tallyman backend received call %d", calls) + + if handler == nil { + t.Errorf("handler is nil") + return usagetypes.TallymanV1IngestResponse{} + } + return handler(req) + })) + + inserter := usage.NewDBInserter( + usage.InserterWithClock(clock), + ) + // Insert an old event that should never be published. + clock.Set(now.Add(-31 * 24 * time.Hour)) + err := inserter.InsertDiscreteUsageEvent(ctx, db, usagetypes.DCManagedAgentsV1{ + Count: 31, + }) + require.NoError(t, err) + + // Insert the events we expect to be published. + clock.Set(now.Add(1 * time.Second)) + for i := 0; i < eventCount; i++ { + clock.Advance(time.Second) + err := inserter.InsertDiscreteUsageEvent(ctx, db, usagetypes.DCManagedAgentsV1{ + Count: uint64(i + 1), // nolint:gosec // these numbers are tiny and will not overflow + }) + require.NoErrorf(t, err, "collecting event %d", i) + } + + // Wrap the publisher's DB in a dbauthz to ensure that the publisher has + // enough permissions. + authzDB := dbauthz.New(db, rbac.NewAuthorizer(prometheus.NewRegistry()), log, coderdtest.AccessControlStorePointer()) + publisher := usage.NewTallymanPublisher(ctx, log, authzDB, coderdenttest.Keys, + usage.PublisherWithClock(clock), + usage.PublisherWithIngestURL(ingestURL), + ) + defer publisher.Close() + + // Start the publisher with a trap. + tickerTrap := clock.Trap().NewTicker() + defer tickerTrap.Close() + startErr := make(chan error) + go func() { + err := publisher.Start() + testutil.AssertSend(ctx, t, startErr, err) + }() + tickerCall := tickerTrap.MustWait(ctx) + tickerCall.MustRelease(ctx) + // The initial duration will always be some time between 5m and 17m. + require.GreaterOrEqual(t, tickerCall.Duration, 5*time.Minute) + require.LessOrEqual(t, tickerCall.Duration, 17*time.Minute) + require.NoError(t, testutil.RequireReceive(ctx, t, startErr)) + + // Set up a trap for the ticker.Reset call. + tickerResetTrap := clock.Trap().TickerReset() + defer tickerResetTrap.Close() + + // Configure the handler for the first publish. This handler will accept the + // first event, temporarily reject the second, and permanently reject the + // third. + var temporarilyRejectedEventID string + handler = func(req usagetypes.TallymanV1IngestRequest) any { + // On the first call, accept the first event, temporarily reject the + // second, and permanently reject the third. + acceptedEvents := make([]usagetypes.TallymanV1IngestAcceptedEvent, 1) + rejectedEvents := make([]usagetypes.TallymanV1IngestRejectedEvent, 2) + if assert.Len(t, req.Events, eventCount) { + assert.JSONEqf(t, jsoninate(t, usagetypes.DCManagedAgentsV1{ + Count: 1, + }), string(req.Events[0].EventData), "event data did not match for event %d", 0) + acceptedEvents[0].ID = req.Events[0].ID + + temporarilyRejectedEventID = req.Events[1].ID + assert.JSONEqf(t, jsoninate(t, usagetypes.DCManagedAgentsV1{ + Count: 2, + }), string(req.Events[1].EventData), "event data did not match for event %d", 1) + rejectedEvents[0].ID = req.Events[1].ID + rejectedEvents[0].Message = "temporarily rejected" + rejectedEvents[0].Permanent = false + + assert.JSONEqf(t, jsoninate(t, usagetypes.DCManagedAgentsV1{ + Count: 3, + }), string(req.Events[2].EventData), "event data did not match for event %d", 2) + rejectedEvents[1].ID = req.Events[2].ID + rejectedEvents[1].Message = "permanently rejected" + rejectedEvents[1].Permanent = true + } + return usagetypes.TallymanV1IngestResponse{ + AcceptedEvents: acceptedEvents, + RejectedEvents: rejectedEvents, + } + } + + // Advance the clock to the initial tick, which should trigger the first + // publish, then wait for the reset call. The duration will always be 17m + // for resets (only the initial tick is variable). + clock.Advance(tickerCall.Duration) + tickerResetCall := tickerResetTrap.MustWait(ctx) + require.Equal(t, 17*time.Minute, tickerResetCall.Duration) + tickerResetCall.MustRelease(ctx) + + // The publisher should have published the events once. + require.Equal(t, 1, calls) + + // Set the handler for the next publish call. This call should only include + // the temporarily rejected event from earlier. This time we'll accept it. + handler = func(req usagetypes.TallymanV1IngestRequest) any { + assert.Len(t, req.Events, 1) + acceptedEvents := make([]usagetypes.TallymanV1IngestAcceptedEvent, len(req.Events)) + for i, event := range req.Events { + assert.Equal(t, temporarilyRejectedEventID, event.ID) + acceptedEvents[i].ID = event.ID + } + return usagetypes.TallymanV1IngestResponse{ + AcceptedEvents: acceptedEvents, + RejectedEvents: []usagetypes.TallymanV1IngestRejectedEvent{}, + } + } + + // Advance the clock to the next tick and wait for the reset call. + clock.Advance(tickerResetCall.Duration) + tickerResetCall = tickerResetTrap.MustWait(ctx) + tickerResetCall.MustRelease(ctx) + + // The publisher should have published the events again. + require.Equal(t, 2, calls) + + // There should be no more publish calls after this, so set the handler to + // nil. + handler = nil + + // Advance the clock to the next tick. + clock.Advance(tickerResetCall.Duration) + tickerResetTrap.MustWait(ctx).MustRelease(ctx) + + // No publish should have taken place since there are no more events to + // publish. + require.Equal(t, 2, calls) + + require.NoError(t, publisher.Close()) +} + +func TestPublisherNoEligibleLicenses(t *testing.T) { + t.Parallel() + ctx := testutil.Context(t, testutil.WaitLong) + log := slogtest.Make(t, nil) + ctrl := gomock.NewController(t) + db := dbmock.NewMockStore(ctrl) + clock := quartz.NewMock(t) + + // Configure the deployment manually. + deploymentID := uuid.New() + db.EXPECT().GetDeploymentID(gomock.Any()).Return(deploymentID.String(), nil).Times(1) + + var calls int + ingestURL := fakeServer(t, tallymanHandler(t, deploymentID.String(), "", func(req usagetypes.TallymanV1IngestRequest) any { + calls++ + return usagetypes.TallymanV1IngestResponse{ + AcceptedEvents: []usagetypes.TallymanV1IngestAcceptedEvent{}, + RejectedEvents: []usagetypes.TallymanV1IngestRejectedEvent{}, + } + })) + + publisher := usage.NewTallymanPublisher(ctx, log, db, coderdenttest.Keys, + usage.PublisherWithClock(clock), + usage.PublisherWithIngestURL(ingestURL), + ) + defer publisher.Close() + + // Start the publisher with a trap. + tickerTrap := clock.Trap().NewTicker() + defer tickerTrap.Close() + startErr := make(chan error) + go func() { + err := publisher.Start() + testutil.RequireSend(ctx, t, startErr, err) + }() + tickerCall := tickerTrap.MustWait(ctx) + tickerCall.MustRelease(ctx) + require.NoError(t, testutil.RequireReceive(ctx, t, startErr)) + + // Mock zero licenses. + db.EXPECT().GetUnexpiredLicenses(gomock.Any()).Return([]database.License{}, nil).Times(1) + + // Tick and wait for the reset call. + tickerResetTrap := clock.Trap().TickerReset() + defer tickerResetTrap.Close() + clock.Advance(tickerCall.Duration) + tickerResetCall := tickerResetTrap.MustWait(ctx) + tickerResetCall.MustRelease(ctx) + + // The publisher should not have published the events. + require.Equal(t, 0, calls) + + // Mock a single license with usage publishing disabled. + licenseJWT := coderdenttest.GenerateLicense(t, coderdenttest.LicenseOptions{ + PublishUsageData: false, + }) + db.EXPECT().GetUnexpiredLicenses(gomock.Any()).Return([]database.License{ + { + ID: 1, + JWT: licenseJWT, + UploadedAt: dbtime.Now(), + Exp: dbtime.Now().Add(48 * time.Hour), // fake + UUID: uuid.New(), + }, + }, nil).Times(1) + + // Tick and wait for the reset call. + clock.Advance(tickerResetCall.Duration) + tickerResetTrap.MustWait(ctx).MustRelease(ctx) + + // The publisher should still not have published the events. + require.Equal(t, 0, calls) +} + +// TestPublisherClaimExpiry tests the claim query to ensure that events are not +// claimed if they've recently been claimed by another publisher. +func TestPublisherClaimExpiry(t *testing.T) { + t.Parallel() + ctx := testutil.Context(t, testutil.WaitLong) + log := slogtest.Make(t, nil) + db, _ := dbtestutil.NewDB(t) + clock := quartz.NewMock(t) + deploymentID, licenseJWT := configureDeployment(ctx, t, db) + now := time.Now() + + var calls int + ingestURL := fakeServer(t, tallymanHandler(t, deploymentID.String(), licenseJWT, func(req usagetypes.TallymanV1IngestRequest) any { + calls++ + return tallymanAcceptAllHandler(req) + })) + + inserter := usage.NewDBInserter( + usage.InserterWithClock(clock), + ) + + publisher := usage.NewTallymanPublisher(ctx, log, db, coderdenttest.Keys, + usage.PublisherWithClock(clock), + usage.PublisherWithIngestURL(ingestURL), + usage.PublisherWithInitialDelay(17*time.Minute), + ) + defer publisher.Close() + + // Create an event that was claimed 1h-18m ago. The ticker has a forced + // delay of 17m in this test. + clock.Set(now) + err := inserter.InsertDiscreteUsageEvent(ctx, db, usagetypes.DCManagedAgentsV1{ + Count: 1, + }) + require.NoError(t, err) + // Claim the event in the past. Claiming it this way via the database + // directly means it won't be marked as published or unclaimed. + events, err := db.SelectUsageEventsForPublishing(ctx, now.Add(-42*time.Minute)) + require.NoError(t, err) + require.Len(t, events, 1) + + // Start the publisher with a trap. + tickerTrap := clock.Trap().NewTicker() + defer tickerTrap.Close() + startErr := make(chan error) + go func() { + err := publisher.Start() + testutil.RequireSend(ctx, t, startErr, err) + }() + tickerCall := tickerTrap.MustWait(ctx) + require.Equal(t, 17*time.Minute, tickerCall.Duration) + tickerCall.MustRelease(ctx) + require.NoError(t, testutil.RequireReceive(ctx, t, startErr)) + + // Set up a trap for the ticker.Reset call. + tickerResetTrap := clock.Trap().TickerReset() + defer tickerResetTrap.Close() + + // Advance the clock to the initial tick, which should trigger the first + // publish, then wait for the reset call. The duration will always be 17m + // for resets (only the initial tick is variable). + clock.Advance(tickerCall.Duration) + tickerResetCall := tickerResetTrap.MustWait(ctx) + require.Equal(t, 17*time.Minute, tickerResetCall.Duration) + tickerResetCall.MustRelease(ctx) + + // No events should have been published since none are eligible. + require.Equal(t, 0, calls) + + // Advance the clock to the next tick and wait for the reset call. + clock.Advance(tickerResetCall.Duration) + tickerResetCall = tickerResetTrap.MustWait(ctx) + tickerResetCall.MustRelease(ctx) + + // The publisher should have published the event, as it's now eligible. + require.Equal(t, 1, calls) +} + +// TestPublisherMissingEvents tests that the publisher notices events that are +// not returned by the Tallyman server and marks them as temporarily rejected. +func TestPublisherMissingEvents(t *testing.T) { + t.Parallel() + ctx := testutil.Context(t, testutil.WaitLong) + log := slogtest.Make(t, nil) + ctrl := gomock.NewController(t) + db := dbmock.NewMockStore(ctrl) + deploymentID, licenseJWT := configureMockDeployment(t, db) + clock := quartz.NewMock(t) + now := time.Now() + clock.Set(now) + + var calls int + ingestURL := fakeServer(t, tallymanHandler(t, deploymentID.String(), licenseJWT, func(req usagetypes.TallymanV1IngestRequest) any { + calls++ + return usagetypes.TallymanV1IngestResponse{ + AcceptedEvents: []usagetypes.TallymanV1IngestAcceptedEvent{}, + RejectedEvents: []usagetypes.TallymanV1IngestRejectedEvent{}, + } + })) + + publisher := usage.NewTallymanPublisher(ctx, log, db, coderdenttest.Keys, + usage.PublisherWithClock(clock), + usage.PublisherWithIngestURL(ingestURL), + ) + + // Expect the publisher to call SelectUsageEventsForPublishing, followed by + // UpdateUsageEventsPostPublish. + events := []database.UsageEvent{ + { + ID: uuid.New().String(), + EventType: string(usagetypes.UsageEventTypeDCManagedAgentsV1), + EventData: []byte(jsoninate(t, usagetypes.DCManagedAgentsV1{ + Count: 1, + })), + CreatedAt: now, + PublishedAt: sql.NullTime{}, + PublishStartedAt: sql.NullTime{}, + FailureMessage: sql.NullString{}, + }, + } + db.EXPECT().SelectUsageEventsForPublishing(gomock.Any(), gomock.Any()).Return(events, nil).Times(1) + db.EXPECT().UpdateUsageEventsPostPublish(gomock.Any(), gomock.Any()).DoAndReturn( + func(ctx context.Context, params database.UpdateUsageEventsPostPublishParams) error { + assert.Equal(t, []string{events[0].ID}, params.IDs) + assert.Equal(t, []string{"tallyman did not include the event in the response"}, params.FailureMessages) + assert.Equal(t, []bool{false}, params.SetPublishedAts) + return nil + }, + ).Times(1) + + // Start the publisher with a trap. + tickerTrap := clock.Trap().NewTicker() + defer tickerTrap.Close() + startErr := make(chan error) + go func() { + err := publisher.Start() + testutil.RequireSend(ctx, t, startErr, err) + }() + tickerCall := tickerTrap.MustWait(ctx) + tickerCall.MustRelease(ctx) + require.NoError(t, testutil.RequireReceive(ctx, t, startErr)) + + // Tick and wait for the reset call. + tickerResetTrap := clock.Trap().TickerReset() + defer tickerResetTrap.Close() + clock.Advance(tickerCall.Duration) + tickerResetTrap.MustWait(ctx).MustRelease(ctx) + + // The publisher should have published the events once. + require.Equal(t, 1, calls) + + require.NoError(t, publisher.Close()) +} + +func TestPublisherLicenseSelection(t *testing.T) { + t.Parallel() + ctx := testutil.Context(t, testutil.WaitLong) + log := slogtest.Make(t, nil) + ctrl := gomock.NewController(t) + db := dbmock.NewMockStore(ctrl) + clock := quartz.NewMock(t) + now := time.Now() + + // Configure the deployment manually. + deploymentID := uuid.New() + db.EXPECT().GetDeploymentID(gomock.Any()).Return(deploymentID.String(), nil).Times(1) + + // Insert multiple licenses: + // 1. PublishUsageData false, type=salesforce, iat 30m ago (ineligible, publish not enabled) + // 2. PublishUsageData true, type=trial, iat 1h ago (ineligible, not salesforce) + // 3. PublishUsageData true, type=salesforce, iat 30m ago, exp 10m ago (ineligible, expired) + // 4. PublishUsageData true, type=salesforce, iat 1h ago (eligible) + // 5. PublishUsageData true, type=salesforce, iat 30m ago (eligible, and newer!) + badLicense1 := coderdenttest.GenerateLicense(t, coderdenttest.LicenseOptions{ + PublishUsageData: false, + IssuedAt: now.Add(-30 * time.Minute), + }) + badLicense2 := coderdenttest.GenerateLicense(t, coderdenttest.LicenseOptions{ + PublishUsageData: true, + IssuedAt: now.Add(-1 * time.Hour), + AccountType: "trial", + }) + badLicense3 := coderdenttest.GenerateLicense(t, coderdenttest.LicenseOptions{ + PublishUsageData: true, + IssuedAt: now.Add(-30 * time.Minute), + ExpiresAt: now.Add(-10 * time.Minute), + }) + badLicense4 := coderdenttest.GenerateLicense(t, coderdenttest.LicenseOptions{ + PublishUsageData: true, + IssuedAt: now.Add(-1 * time.Hour), + }) + expectedLicense := coderdenttest.GenerateLicense(t, coderdenttest.LicenseOptions{ + PublishUsageData: true, + IssuedAt: now.Add(-30 * time.Minute), + }) + // GetUnexpiredLicenses is not supposed to return expired licenses, but for + // the purposes of this test we're going to do it anyway. + db.EXPECT().GetUnexpiredLicenses(gomock.Any()).Return([]database.License{ + { + ID: 1, + JWT: badLicense1, + Exp: now.Add(48 * time.Hour), // fake times, the JWT should be checked + UUID: uuid.New(), + UploadedAt: now, + }, + { + ID: 2, + JWT: badLicense2, + Exp: now.Add(48 * time.Hour), + UUID: uuid.New(), + UploadedAt: now, + }, + { + ID: 3, + JWT: badLicense3, + Exp: now.Add(48 * time.Hour), + UUID: uuid.New(), + UploadedAt: now, + }, + { + ID: 4, + JWT: badLicense4, + Exp: now.Add(48 * time.Hour), + UUID: uuid.New(), + UploadedAt: now, + }, + { + ID: 5, + JWT: expectedLicense, + Exp: now.Add(48 * time.Hour), + UUID: uuid.New(), + UploadedAt: now, + }, + }, nil) + + called := false + ingestURL := fakeServer(t, tallymanHandler(t, deploymentID.String(), expectedLicense, func(req usagetypes.TallymanV1IngestRequest) any { + called = true + return tallymanAcceptAllHandler(req) + })) + + publisher := usage.NewTallymanPublisher(ctx, log, db, coderdenttest.Keys, + usage.PublisherWithClock(clock), + usage.PublisherWithIngestURL(ingestURL), + ) + defer publisher.Close() + + // Start the publisher with a trap. + tickerTrap := clock.Trap().NewTicker() + defer tickerTrap.Close() + startErr := make(chan error) + go func() { + err := publisher.Start() + testutil.RequireSend(ctx, t, startErr, err) + }() + tickerCall := tickerTrap.MustWait(ctx) + tickerCall.MustRelease(ctx) + require.NoError(t, testutil.RequireReceive(ctx, t, startErr)) + + // Mock events to be published. + events := []database.UsageEvent{ + { + ID: uuid.New().String(), + EventType: string(usagetypes.UsageEventTypeDCManagedAgentsV1), + EventData: []byte(jsoninate(t, usagetypes.DCManagedAgentsV1{ + Count: 1, + })), + }, + } + db.EXPECT().SelectUsageEventsForPublishing(gomock.Any(), gomock.Any()).Return(events, nil).Times(1) + db.EXPECT().UpdateUsageEventsPostPublish(gomock.Any(), gomock.Any()).DoAndReturn( + func(ctx context.Context, params database.UpdateUsageEventsPostPublishParams) error { + assert.Equal(t, []string{events[0].ID}, params.IDs) + assert.Equal(t, []string{""}, params.FailureMessages) + assert.Equal(t, []bool{true}, params.SetPublishedAts) + return nil + }, + ).Times(1) + + // Tick and wait for the reset call. + tickerResetTrap := clock.Trap().TickerReset() + defer tickerResetTrap.Close() + clock.Advance(tickerCall.Duration) + tickerResetTrap.MustWait(ctx).MustRelease(ctx) + + // The publisher should have published the events once. + require.True(t, called) +} + +func TestPublisherTallymanError(t *testing.T) { + t.Parallel() + ctx := testutil.Context(t, testutil.WaitLong) + log := slogtest.Make(t, nil) + ctrl := gomock.NewController(t) + db := dbmock.NewMockStore(ctrl) + clock := quartz.NewMock(t) + now := time.Now() + clock.Set(now) + + deploymentID, licenseJWT := configureMockDeployment(t, db) + const errorMessage = "tallyman error" + var calls int + ingestURL := fakeServer(t, tallymanHandler(t, deploymentID.String(), licenseJWT, func(req usagetypes.TallymanV1IngestRequest) any { + calls++ + return usagetypes.TallymanV1Response{ + Message: errorMessage, + } + })) + + publisher := usage.NewTallymanPublisher(ctx, log, db, coderdenttest.Keys, + usage.PublisherWithClock(clock), + usage.PublisherWithIngestURL(ingestURL), + ) + defer publisher.Close() + + // Start the publisher with a trap. + tickerTrap := clock.Trap().NewTicker() + defer tickerTrap.Close() + startErr := make(chan error) + go func() { + err := publisher.Start() + testutil.RequireSend(ctx, t, startErr, err) + }() + tickerCall := tickerTrap.MustWait(ctx) + tickerCall.MustRelease(ctx) + require.NoError(t, testutil.RequireReceive(ctx, t, startErr)) + + // Mock events to be published. + events := []database.UsageEvent{ + { + ID: uuid.New().String(), + EventType: string(usagetypes.UsageEventTypeDCManagedAgentsV1), + EventData: []byte(jsoninate(t, usagetypes.DCManagedAgentsV1{ + Count: 1, + })), + }, + } + db.EXPECT().SelectUsageEventsForPublishing(gomock.Any(), gomock.Any()).Return(events, nil).Times(1) + db.EXPECT().UpdateUsageEventsPostPublish(gomock.Any(), gomock.Any()).DoAndReturn( + func(ctx context.Context, params database.UpdateUsageEventsPostPublishParams) error { + assert.Equal(t, []string{events[0].ID}, params.IDs) + assert.Contains(t, params.FailureMessages[0], errorMessage) + assert.Equal(t, []bool{false}, params.SetPublishedAts) + return nil + }, + ).Times(1) + + // Tick and wait for the reset call. + tickerResetTrap := clock.Trap().TickerReset() + defer tickerResetTrap.Close() + clock.Advance(tickerCall.Duration) + tickerResetTrap.MustWait(ctx).MustRelease(ctx) + + // The publisher should have published the events once. + require.Equal(t, 1, calls) +} + +func jsoninate(t *testing.T, v any) string { + t.Helper() + if e, ok := v.(usagetypes.Event); ok { + v = e.Fields() + } + buf, err := json.Marshal(v) + require.NoError(t, err) + return string(buf) +} + +func configureDeployment(ctx context.Context, t *testing.T, db database.Store) (uuid.UUID, string) { + t.Helper() + deploymentID := uuid.New() + err := db.InsertDeploymentID(ctx, deploymentID.String()) + require.NoError(t, err) + + licenseRaw := coderdenttest.GenerateLicense(t, coderdenttest.LicenseOptions{ + PublishUsageData: true, + }) + _, err = db.InsertLicense(ctx, database.InsertLicenseParams{ + UploadedAt: dbtime.Now(), + JWT: licenseRaw, + Exp: dbtime.Now().Add(48 * time.Hour), + UUID: uuid.New(), + }) + require.NoError(t, err) + + return deploymentID, licenseRaw +} + +func configureMockDeployment(t *testing.T, db *dbmock.MockStore) (uuid.UUID, string) { + t.Helper() + deploymentID := uuid.New() + db.EXPECT().GetDeploymentID(gomock.Any()).Return(deploymentID.String(), nil).Times(1) + + licenseRaw := coderdenttest.GenerateLicense(t, coderdenttest.LicenseOptions{ + PublishUsageData: true, + }) + db.EXPECT().GetUnexpiredLicenses(gomock.Any()).Return([]database.License{ + { + ID: 1, + UploadedAt: dbtime.Now(), + JWT: licenseRaw, + Exp: dbtime.Now().Add(48 * time.Hour), + UUID: uuid.New(), + }, + }, nil) + + return deploymentID, licenseRaw +} + +func fakeServer(t *testing.T, handler http.Handler) string { + t.Helper() + server := httptest.NewServer(handler) + t.Cleanup(server.Close) + return server.URL +} + +func tallymanHandler(t *testing.T, expectDeploymentID string, expectLicenseJWT string, handler func(req usagetypes.TallymanV1IngestRequest) any) http.Handler { + t.Helper() + return http.HandlerFunc(func(rw http.ResponseWriter, r *http.Request) { + t.Helper() + licenseJWT := r.Header.Get(usagetypes.TallymanCoderLicenseKeyHeader) + if expectLicenseJWT != "" && !assert.Equal(t, expectLicenseJWT, licenseJWT, "license JWT in request did not match") { + rw.WriteHeader(http.StatusUnauthorized) + _ = json.NewEncoder(rw).Encode(usagetypes.TallymanV1Response{ + Message: "license JWT in request did not match", + }) + return + } + + deploymentID := r.Header.Get(usagetypes.TallymanCoderDeploymentIDHeader) + if expectDeploymentID != "" && !assert.Equal(t, expectDeploymentID, deploymentID, "deployment ID in request did not match") { + rw.WriteHeader(http.StatusUnauthorized) + _ = json.NewEncoder(rw).Encode(usagetypes.TallymanV1Response{ + Message: "deployment ID in request did not match", + }) + return + } + + var req usagetypes.TallymanV1IngestRequest + err := json.NewDecoder(r.Body).Decode(&req) + if !assert.NoError(t, err, "could not decode request body") { + rw.WriteHeader(http.StatusBadRequest) + _ = json.NewEncoder(rw).Encode(usagetypes.TallymanV1Response{ + Message: "could not decode request body", + }) + return + } + + resp := handler(req) + switch resp.(type) { + case usagetypes.TallymanV1Response: + rw.WriteHeader(http.StatusInternalServerError) + default: + rw.WriteHeader(http.StatusOK) + } + err = json.NewEncoder(rw).Encode(resp) + if !assert.NoError(t, err, "could not encode response body") { + rw.WriteHeader(http.StatusInternalServerError) + return + } + }) +} + +func tallymanAcceptAllHandler(req usagetypes.TallymanV1IngestRequest) usagetypes.TallymanV1IngestResponse { + acceptedEvents := make([]usagetypes.TallymanV1IngestAcceptedEvent, len(req.Events)) + for i, event := range req.Events { + acceptedEvents[i].ID = event.ID + } + + return usagetypes.TallymanV1IngestResponse{ + AcceptedEvents: acceptedEvents, + RejectedEvents: []usagetypes.TallymanV1IngestRejectedEvent{}, + } +} diff --git a/enterprise/coderd/userauth_test.go b/enterprise/coderd/userauth_test.go index 46207f319dbe1..fd4706a25e511 100644 --- a/enterprise/coderd/userauth_test.go +++ b/enterprise/coderd/userauth_test.go @@ -941,7 +941,6 @@ func TestGroupSync(t *testing.T) { require.NoError(t, err) } - // nolint:gocritic _, err := runner.API.Database.UpdateUserLoginType(dbauthz.AsSystemRestricted(ctx), database.UpdateUserLoginTypeParams{ NewLoginType: database.LoginTypeOIDC, UserID: user.ID, diff --git a/enterprise/coderd/workspaceagents.go b/enterprise/coderd/workspaceagents.go index 3223151425630..739aba6d628c2 100644 --- a/enterprise/coderd/workspaceagents.go +++ b/enterprise/coderd/workspaceagents.go @@ -2,9 +2,14 @@ package coderd import ( "context" + "fmt" "net/http" + "github.com/go-chi/chi/v5" + + "github.com/coder/coder/v2/coderd/database" "github.com/coder/coder/v2/coderd/httpapi" + "github.com/coder/coder/v2/coderd/httpmw" "github.com/coder/coder/v2/codersdk" ) @@ -17,3 +22,77 @@ func (api *API) shouldBlockNonBrowserConnections(rw http.ResponseWriter) bool { } return false } + +// @Summary Get workspace external agent credentials +// @ID get-workspace-external-agent-credentials +// @Security CoderSessionToken +// @Produce json +// @Tags Enterprise +// @Param workspace path string true "Workspace ID" format(uuid) +// @Param agent path string true "Agent name" +// @Success 200 {object} codersdk.ExternalAgentCredentials +// @Router /workspaces/{workspace}/external-agent/{agent}/credentials [get] +func (api *API) workspaceExternalAgentCredentials(rw http.ResponseWriter, r *http.Request) { + ctx := r.Context() + workspace := httpmw.WorkspaceParam(r) + agentName := chi.URLParam(r, "agent") + + build, err := api.Database.GetLatestWorkspaceBuildByWorkspaceID(ctx, workspace.ID) + if err != nil { + httpapi.Write(ctx, rw, http.StatusInternalServerError, codersdk.Response{ + Message: "Failed to get latest workspace build.", + Detail: err.Error(), + }) + return + } + if !build.HasExternalAgent.Bool { + httpapi.Write(ctx, rw, http.StatusNotFound, codersdk.Response{ + Message: "Workspace does not have an external agent.", + }) + return + } + + agents, err := api.Database.GetWorkspaceAgentsByWorkspaceAndBuildNumber(ctx, database.GetWorkspaceAgentsByWorkspaceAndBuildNumberParams{ + WorkspaceID: workspace.ID, + BuildNumber: build.BuildNumber, + }) + if err != nil { + httpapi.Write(ctx, rw, http.StatusInternalServerError, codersdk.Response{ + Message: "Failed to get workspace agents.", + Detail: err.Error(), + }) + return + } + + var agent *database.WorkspaceAgent + for i := range agents { + if agents[i].Name == agentName { + agent = &agents[i] + break + } + } + if agent == nil { + httpapi.Write(ctx, rw, http.StatusNotFound, codersdk.Response{ + Message: fmt.Sprintf("External agent '%s' not found in workspace.", agentName), + }) + return + } + + if agent.AuthInstanceID.Valid { + httpapi.Write(ctx, rw, http.StatusNotFound, codersdk.Response{ + Message: "External agent is authenticated with an instance ID.", + }) + return + } + + initScriptURL := fmt.Sprintf("%s/api/v2/init-script/%s/%s", api.AccessURL.String(), agent.OperatingSystem, agent.Architecture) + command := fmt.Sprintf("curl -fsSL %q | CODER_AGENT_TOKEN=%q sh", initScriptURL, agent.AuthToken.String()) + if agent.OperatingSystem == "windows" { + command = fmt.Sprintf("$env:CODER_AGENT_TOKEN=%q; iwr -useb %q | iex", agent.AuthToken.String(), initScriptURL) + } + + httpapi.Write(ctx, rw, http.StatusOK, codersdk.ExternalAgentCredentials{ + AgentToken: agent.AuthToken.String(), + Command: command, + }) +} diff --git a/enterprise/coderd/workspaceagents_test.go b/enterprise/coderd/workspaceagents_test.go index f4f0670cd150e..c9d44e667c212 100644 --- a/enterprise/coderd/workspaceagents_test.go +++ b/enterprise/coderd/workspaceagents_test.go @@ -3,6 +3,7 @@ package coderd_test import ( "context" "crypto/tls" + "database/sql" "fmt" "net/http" "os" @@ -12,6 +13,7 @@ import ( "time" "github.com/coder/coder/v2/coderd/database" + "github.com/coder/coder/v2/coderd/database/dbfake" "github.com/coder/coder/v2/coderd/database/dbgen" "github.com/coder/coder/v2/coderd/database/dbtestutil" "github.com/coder/coder/v2/provisionersdk" @@ -344,3 +346,123 @@ func setupWorkspaceAgent(t *testing.T, client *codersdk.Client, user codersdk.Cr return setupResp{workspace, sdkAgent, agnt} } + +func TestWorkspaceExternalAgentCredentials(t *testing.T) { + t.Parallel() + + client, db, user := coderdenttest.NewWithDatabase(t, &coderdenttest.Options{ + LicenseOptions: &coderdenttest.LicenseOptions{ + Features: license.Features{ + codersdk.FeatureWorkspaceExternalAgent: 1, + }, + }, + }) + + t.Run("Success - linux", func(t *testing.T) { + t.Parallel() + ctx := testutil.Context(t, testutil.WaitShort) + + r := dbfake.WorkspaceBuild(t, db, database.WorkspaceTable{ + OrganizationID: user.OrganizationID, + OwnerID: user.UserID, + }).Seed(database.WorkspaceBuild{ + HasExternalAgent: sql.NullBool{ + Bool: true, + Valid: true, + }, + }).Resource(&proto.Resource{ + Name: "test-agent", + Type: "coder_external_agent", + }).WithAgent(func(a []*proto.Agent) []*proto.Agent { + a[0].Name = "test-agent" + a[0].OperatingSystem = "linux" + a[0].Architecture = "amd64" + return a + }).Do() + + credentials, err := client.WorkspaceExternalAgentCredentials( + ctx, r.Workspace.ID, "test-agent") + require.NoError(t, err) + + require.Equal(t, r.AgentToken, credentials.AgentToken) + expectedCommand := fmt.Sprintf("curl -fsSL \"%s/api/v2/init-script/linux/amd64\" | CODER_AGENT_TOKEN=%q sh", client.URL, r.AgentToken) + require.Equal(t, expectedCommand, credentials.Command) + }) + + t.Run("Success - windows", func(t *testing.T) { + t.Parallel() + ctx := testutil.Context(t, testutil.WaitShort) + + r := dbfake.WorkspaceBuild(t, db, database.WorkspaceTable{ + OrganizationID: user.OrganizationID, + OwnerID: user.UserID, + }).Resource(&proto.Resource{ + Name: "test-agent", + Type: "coder_external_agent", + }).Seed(database.WorkspaceBuild{ + HasExternalAgent: sql.NullBool{ + Bool: true, + Valid: true, + }, + }).WithAgent(func(a []*proto.Agent) []*proto.Agent { + a[0].Name = "test-agent" + a[0].OperatingSystem = "windows" + a[0].Architecture = "amd64" + return a + }).Do() + + credentials, err := client.WorkspaceExternalAgentCredentials( + ctx, r.Workspace.ID, "test-agent") + require.NoError(t, err) + + require.Equal(t, r.AgentToken, credentials.AgentToken) + expectedCommand := fmt.Sprintf("$env:CODER_AGENT_TOKEN=%q; iwr -useb \"%s/api/v2/init-script/windows/amd64\" | iex", r.AgentToken, client.URL) + require.Equal(t, expectedCommand, credentials.Command) + }) + + t.Run("WithInstanceID - should return 404", func(t *testing.T) { + t.Parallel() + ctx := testutil.Context(t, testutil.WaitShort) + + r := dbfake.WorkspaceBuild(t, db, database.WorkspaceTable{ + OrganizationID: user.OrganizationID, + OwnerID: user.UserID, + }).Seed(database.WorkspaceBuild{ + HasExternalAgent: sql.NullBool{ + Bool: true, + Valid: true, + }, + }).Resource(&proto.Resource{ + Name: "test-agent", + Type: "coder_external_agent", + }).WithAgent(func(a []*proto.Agent) []*proto.Agent { + a[0].Name = "test-agent" + a[0].Auth = &proto.Agent_InstanceId{ + InstanceId: uuid.New().String(), + } + return a + }).Do() + + _, err := client.WorkspaceExternalAgentCredentials(ctx, r.Workspace.ID, "test-agent") + require.Error(t, err) + var apiErr *codersdk.Error + require.ErrorAs(t, err, &apiErr) + require.Equal(t, "External agent is authenticated with an instance ID.", apiErr.Message) + }) + + t.Run("No external agent - should return 404", func(t *testing.T) { + t.Parallel() + ctx := testutil.Context(t, testutil.WaitShort) + + r := dbfake.WorkspaceBuild(t, db, database.WorkspaceTable{ + OrganizationID: user.OrganizationID, + OwnerID: user.UserID, + }).Do() + + _, err := client.WorkspaceExternalAgentCredentials(ctx, r.Workspace.ID, "test-agent") + require.Error(t, err) + var apiErr *codersdk.Error + require.ErrorAs(t, err, &apiErr) + require.Equal(t, "Workspace does not have an external agent.", apiErr.Message) + }) +} diff --git a/enterprise/coderd/workspaceproxy_test.go b/enterprise/coderd/workspaceproxy_test.go index 23775f370f95f..7024ad2366423 100644 --- a/enterprise/coderd/workspaceproxy_test.go +++ b/enterprise/coderd/workspaceproxy_test.go @@ -15,6 +15,7 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" + "cdr.dev/slog/sloggers/slogtest" "github.com/coder/coder/v2/agent/agenttest" "github.com/coder/coder/v2/buildinfo" "github.com/coder/coder/v2/coderd/coderdtest" @@ -1009,11 +1010,16 @@ func TestGetCryptoKeys(t *testing.T) { ctx := testutil.Context(t, testutil.WaitMedium) db, pubsub := dbtestutil.NewDB(t) + // IgnoreErrors is set here to avoid a test failure due to "used of closed network connection". + logger := slogtest.Make(t, &slogtest.Options{ + IgnoreErrors: true, + }) cclient, _, api, _ := coderdenttest.NewWithAPI(t, &coderdenttest.Options{ Options: &coderdtest.Options{ Database: db, Pubsub: pubsub, IncludeProvisionerDaemon: true, + Logger: &logger, }, LicenseOptions: &coderdenttest.LicenseOptions{ Features: license.Features{ diff --git a/enterprise/coderd/workspacequota_test.go b/enterprise/coderd/workspacequota_test.go index f49e135ad55b3..f39b090ca21b1 100644 --- a/enterprise/coderd/workspacequota_test.go +++ b/enterprise/coderd/workspacequota_test.go @@ -462,7 +462,6 @@ func TestWorkspaceSerialization(t *testing.T) { // +------------------------------+------------------+ // pq: could not serialize access due to concurrent update ctx := testutil.Context(t, testutil.WaitLong) - //nolint:gocritic // testing ctx = dbauthz.AsSystemRestricted(ctx) myWorkspace := dbfake.WorkspaceBuild(t, db, database.WorkspaceTable{ @@ -520,7 +519,6 @@ func TestWorkspaceSerialization(t *testing.T) { // +------------------------------+------------------+ // Works! ctx := testutil.Context(t, testutil.WaitLong) - //nolint:gocritic // testing ctx = dbauthz.AsSystemRestricted(ctx) myWorkspace := dbfake.WorkspaceBuild(t, db, database.WorkspaceTable{ @@ -589,7 +587,6 @@ func TestWorkspaceSerialization(t *testing.T) { // +---------------------+----------------------------------+ // pq: could not serialize access due to concurrent update ctx := testutil.Context(t, testutil.WaitShort) - //nolint:gocritic // testing ctx = dbauthz.AsSystemRestricted(ctx) myWorkspace := dbfake.WorkspaceBuild(t, db, database.WorkspaceTable{ @@ -642,7 +639,6 @@ func TestWorkspaceSerialization(t *testing.T) { // | CommitTx() | | // +---------------------+----------------------------------+ ctx := testutil.Context(t, testutil.WaitShort) - //nolint:gocritic // testing ctx = dbauthz.AsSystemRestricted(ctx) myWorkspace := dbfake.WorkspaceBuild(t, db, database.WorkspaceTable{ @@ -686,7 +682,6 @@ func TestWorkspaceSerialization(t *testing.T) { // +---------------------+----------------------------------+ // Works! ctx := testutil.Context(t, testutil.WaitShort) - //nolint:gocritic // testing ctx = dbauthz.AsSystemRestricted(ctx) var err error @@ -741,7 +736,6 @@ func TestWorkspaceSerialization(t *testing.T) { // | | CommitTx() | // +---------------------+---------------------+ ctx := testutil.Context(t, testutil.WaitLong) - //nolint:gocritic // testing ctx = dbauthz.AsSystemRestricted(ctx) myWorkspace := dbfake.WorkspaceBuild(t, db, database.WorkspaceTable{ @@ -799,7 +793,6 @@ func TestWorkspaceSerialization(t *testing.T) { // | | CommitTx() | // +---------------------+---------------------+ ctx := testutil.Context(t, testutil.WaitLong) - //nolint:gocritic // testing ctx = dbauthz.AsSystemRestricted(ctx) myWorkspace := dbfake.WorkspaceBuild(t, db, database.WorkspaceTable{ @@ -860,7 +853,6 @@ func TestWorkspaceSerialization(t *testing.T) { // +---------------------+---------------------+ // pq: could not serialize access due to read/write dependencies among transactions ctx := testutil.Context(t, testutil.WaitLong) - //nolint:gocritic // testing ctx = dbauthz.AsSystemRestricted(ctx) myWorkspace := dbfake.WorkspaceBuild(t, db, database.WorkspaceTable{ diff --git a/enterprise/coderd/workspaces_test.go b/enterprise/coderd/workspaces_test.go index 2278fb2a71939..1cdcd9fb43144 100644 --- a/enterprise/coderd/workspaces_test.go +++ b/enterprise/coderd/workspaces_test.go @@ -15,18 +15,12 @@ import ( "testing" "time" - "github.com/prometheus/client_golang/prometheus" - - "github.com/coder/coder/v2/coderd/files" - agplprebuilds "github.com/coder/coder/v2/coderd/prebuilds" - "github.com/coder/coder/v2/enterprise/coderd/prebuilds" - "github.com/google/uuid" + "github.com/prometheus/client_golang/prometheus" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" "cdr.dev/slog" - "cdr.dev/slog/sloggers/slogtest" "github.com/coder/coder/v2/coderd/audit" @@ -35,21 +29,26 @@ import ( "github.com/coder/coder/v2/coderd/database" "github.com/coder/coder/v2/coderd/database/dbauthz" "github.com/coder/coder/v2/coderd/database/dbfake" + "github.com/coder/coder/v2/coderd/database/dbgen" "github.com/coder/coder/v2/coderd/database/dbtestutil" "github.com/coder/coder/v2/coderd/database/dbtime" + "github.com/coder/coder/v2/coderd/files" "github.com/coder/coder/v2/coderd/httpmw" "github.com/coder/coder/v2/coderd/notifications" + agplprebuilds "github.com/coder/coder/v2/coderd/prebuilds" "github.com/coder/coder/v2/coderd/provisionerdserver" "github.com/coder/coder/v2/coderd/rbac" "github.com/coder/coder/v2/coderd/rbac/policy" agplschedule "github.com/coder/coder/v2/coderd/schedule" "github.com/coder/coder/v2/coderd/schedule/cron" "github.com/coder/coder/v2/coderd/util/ptr" + "github.com/coder/coder/v2/coderd/workspacestats" "github.com/coder/coder/v2/codersdk" entaudit "github.com/coder/coder/v2/enterprise/audit" "github.com/coder/coder/v2/enterprise/audit/backends" "github.com/coder/coder/v2/enterprise/coderd/coderdenttest" "github.com/coder/coder/v2/enterprise/coderd/license" + "github.com/coder/coder/v2/enterprise/coderd/prebuilds" "github.com/coder/coder/v2/enterprise/coderd/schedule" "github.com/coder/coder/v2/provisioner/echo" "github.com/coder/coder/v2/provisionersdk" @@ -571,7 +570,6 @@ func TestCreateUserWorkspace(t *testing.T) { return a }).Do() - // nolint:gocritic // this is a test ctx := dbauthz.AsSystemRestricted(testutil.Context(t, testutil.WaitLong)) agent, err := db.GetWorkspaceAgentAndLatestBuildByAuthToken(ctx, uuid.MustParse(r.AgentToken)) require.NoError(t, err) @@ -619,7 +617,7 @@ func TestWorkspaceAutobuild(t *testing.T) { failureTTL = time.Minute ) - client, user := coderdenttest.New(t, &coderdenttest.Options{ + client, db, user := coderdenttest.NewWithDatabase(t, &coderdenttest.Options{ Options: &coderdtest.Options{ Logger: &logger, AutobuildTicker: ticker, @@ -644,7 +642,12 @@ func TestWorkspaceAutobuild(t *testing.T) { ws := coderdtest.CreateWorkspace(t, client, template.ID) build := coderdtest.AwaitWorkspaceBuildJobCompleted(t, client, ws.LatestBuild.ID) require.Equal(t, codersdk.WorkspaceStatusFailed, build.Status) - ticker <- build.Job.CompletedAt.Add(failureTTL * 2) + tickTime := build.Job.CompletedAt.Add(failureTTL * 2) + + p, err := coderdtest.GetProvisionerForTags(db, time.Now(), ws.OrganizationID, nil) + require.NoError(t, err) + coderdtest.UpdateProvisionerLastSeenAt(t, db, p.ID, tickTime) + ticker <- tickTime stats := <-statCh // Expect workspace to transition to stopped state for breaching // failure TTL. @@ -666,7 +669,7 @@ func TestWorkspaceAutobuild(t *testing.T) { failureTTL = time.Minute ) - client, user := coderdenttest.New(t, &coderdenttest.Options{ + client, db, user := coderdenttest.NewWithDatabase(t, &coderdenttest.Options{ Options: &coderdtest.Options{ Logger: &logger, AutobuildTicker: ticker, @@ -691,7 +694,12 @@ func TestWorkspaceAutobuild(t *testing.T) { build := coderdtest.AwaitWorkspaceBuildJobCompleted(t, client, ws.LatestBuild.ID) require.Equal(t, codersdk.WorkspaceStatusFailed, build.Status) // Make it impossible to trigger the failure TTL. - ticker <- build.Job.CompletedAt.Add(-failureTTL * 2) + tickTime := build.Job.CompletedAt.Add(-failureTTL * 2) + + p, err := coderdtest.GetProvisionerForTags(db, time.Now(), ws.OrganizationID, nil) + require.NoError(t, err) + coderdtest.UpdateProvisionerLastSeenAt(t, db, p.ID, tickTime) + ticker <- tickTime stats := <-statCh // Expect no transitions since not enough time has elapsed. require.Len(t, stats.Transitions, 0) @@ -759,10 +767,11 @@ func TestWorkspaceAutobuild(t *testing.T) { client, db, user := coderdenttest.NewWithDatabase(t, &coderdenttest.Options{ Options: &coderdtest.Options{ - AutobuildTicker: ticker, - AutobuildStats: statCh, - TemplateScheduleStore: schedule.NewEnterpriseTemplateScheduleStore(agplUserQuietHoursScheduleStore(), notifications.NewNoopEnqueuer(), logger, nil), - Auditor: auditRecorder, + AutobuildTicker: ticker, + AutobuildStats: statCh, + IncludeProvisionerDaemon: true, + TemplateScheduleStore: schedule.NewEnterpriseTemplateScheduleStore(agplUserQuietHoursScheduleStore(), notifications.NewNoopEnqueuer(), logger, nil), + Auditor: auditRecorder, }, LicenseOptions: &coderdenttest.LicenseOptions{ Features: license.Features{codersdk.FeatureAdvancedTemplateScheduling: 1}, @@ -790,7 +799,12 @@ func TestWorkspaceAutobuild(t *testing.T) { auditRecorder.ResetLogs() // Simulate being inactive. - ticker <- workspace.LastUsedAt.Add(inactiveTTL * 2) + tickTime := workspace.LastUsedAt.Add(inactiveTTL * 2) + + p, err := coderdtest.GetProvisionerForTags(db, time.Now(), workspace.OrganizationID, nil) + require.NoError(t, err) + coderdtest.UpdateProvisionerLastSeenAt(t, db, p.ID, tickTime) + ticker <- tickTime stats := <-statCh // Expect workspace to transition to stopped state for breaching @@ -813,7 +827,7 @@ func TestWorkspaceAutobuild(t *testing.T) { dormantLastUsedAt := ws.LastUsedAt // nolint:gocritic // this test is not testing RBAC. - err := client.UpdateWorkspaceDormancy(ctx, ws.ID, codersdk.UpdateWorkspaceDormancy{Dormant: false}) + err = client.UpdateWorkspaceDormancy(ctx, ws.ID, codersdk.UpdateWorkspaceDormancy{Dormant: false}) require.NoError(t, err) // Assert that we updated our last_used_at so that we don't immediately @@ -888,7 +902,12 @@ func TestWorkspaceAutobuild(t *testing.T) { } // Simulate being inactive. - ticker <- time.Now().Add(time.Hour) + // Fix provisioner stale issue by updating LastSeenAt to the tick time + tickTime := time.Now().Add(time.Hour) + p, err := coderdtest.GetProvisionerForTags(db, time.Now(), workspaces[0].OrganizationID, nil) + require.NoError(t, err) + coderdtest.UpdateProvisionerLastSeenAt(t, db, p.ID, tickTime) + ticker <- tickTime stats := <-statCh // Expect workspace to transition to stopped state for breaching @@ -997,7 +1016,7 @@ func TestWorkspaceAutobuild(t *testing.T) { ) logger := slogtest.Make(t, &slogtest.Options{IgnoreErrors: true}).Leveled(slog.LevelDebug) - client, user := coderdenttest.New(t, &coderdenttest.Options{ + client, db, user := coderdenttest.NewWithDatabase(t, &coderdenttest.Options{ Options: &coderdtest.Options{ AutobuildTicker: ticker, IncludeProvisionerDaemon: true, @@ -1029,7 +1048,11 @@ func TestWorkspaceAutobuild(t *testing.T) { ws = coderdtest.MustTransitionWorkspace(t, client, ws.ID, codersdk.WorkspaceTransitionStart, codersdk.WorkspaceTransitionStop) // Simulate not having accessed the workspace in a while. - ticker <- ws.LastUsedAt.Add(2 * inactiveTTL) + tickTime := ws.LastUsedAt.Add(2 * inactiveTTL) + p, err := coderdtest.GetProvisionerForTags(db, time.Now(), ws.OrganizationID, nil) + require.NoError(t, err) + coderdtest.UpdateProvisionerLastSeenAt(t, db, p.ID, tickTime) + ticker <- tickTime stats := <-statCh // Expect no transitions since workspace is stopped. require.Len(t, stats.Transitions, 0) @@ -1051,7 +1074,7 @@ func TestWorkspaceAutobuild(t *testing.T) { ) logger := slogtest.Make(t, &slogtest.Options{IgnoreErrors: true}).Leveled(slog.LevelDebug) - client, user := coderdenttest.New(t, &coderdenttest.Options{ + client, db, user := coderdenttest.NewWithDatabase(t, &coderdenttest.Options{ Options: &coderdtest.Options{ AutobuildTicker: ticker, IncludeProvisionerDaemon: true, @@ -1079,7 +1102,11 @@ func TestWorkspaceAutobuild(t *testing.T) { require.Equal(t, codersdk.WorkspaceStatusRunning, build.Status) // Simulate not having accessed the workspace in a while. - ticker <- ws.LastUsedAt.Add(2 * transitionTTL) + tickTime := ws.LastUsedAt.Add(2 * transitionTTL) + p, err := coderdtest.GetProvisionerForTags(db, time.Now(), ws.OrganizationID, nil) + require.NoError(t, err) + coderdtest.UpdateProvisionerLastSeenAt(t, db, p.ID, tickTime) + ticker <- tickTime stats := <-statCh // Expect workspace to transition to stopped state for breaching // inactive TTL. @@ -1094,7 +1121,9 @@ func TestWorkspaceAutobuild(t *testing.T) { _ = coderdtest.AwaitWorkspaceBuildJobCompleted(t, client, ws.LatestBuild.ID) // Simulate the workspace being dormant beyond the threshold. - ticker <- ws.DormantAt.Add(2 * transitionTTL) + tickTime2 := ws.DormantAt.Add(2 * transitionTTL) + coderdtest.UpdateProvisionerLastSeenAt(t, db, p.ID, tickTime2) + ticker <- tickTime2 stats = <-statCh require.Len(t, stats.Transitions, 1) // The workspace should be scheduled for deletion. @@ -1106,7 +1135,7 @@ func TestWorkspaceAutobuild(t *testing.T) { // Assert that the workspace is actually deleted. //nolint:gocritic // ensuring workspace is deleted and not just invisible to us due to RBAC - _, err := client.Workspace(testutil.Context(t, testutil.WaitShort), ws.ID) + _, err = client.Workspace(testutil.Context(t, testutil.WaitShort), ws.ID) require.Error(t, err) cerr, ok := codersdk.AsError(err) require.True(t, ok) @@ -1123,7 +1152,7 @@ func TestWorkspaceAutobuild(t *testing.T) { ) logger := slogtest.Make(t, &slogtest.Options{IgnoreErrors: true}).Leveled(slog.LevelDebug) - client, user := coderdenttest.New(t, &coderdenttest.Options{ + client, db, user := coderdenttest.NewWithDatabase(t, &coderdenttest.Options{ Options: &coderdtest.Options{ AutobuildTicker: ticker, IncludeProvisionerDaemon: true, @@ -1158,7 +1187,11 @@ func TestWorkspaceAutobuild(t *testing.T) { require.NotNil(t, ws.DormantAt) // Ensure we haven't breached our threshold. - ticker <- ws.DormantAt.Add(-dormantTTL * 2) + tickTime := ws.DormantAt.Add(-dormantTTL * 2) + p, err := coderdtest.GetProvisionerForTags(db, time.Now(), ws.OrganizationID, nil) + require.NoError(t, err) + coderdtest.UpdateProvisionerLastSeenAt(t, db, p.ID, tickTime) + ticker <- tickTime stats := <-statCh // Expect no transitions since not enough time has elapsed. require.Len(t, stats.Transitions, 0) @@ -1169,7 +1202,9 @@ func TestWorkspaceAutobuild(t *testing.T) { require.NoError(t, err) // Simlute the workspace breaching the threshold. - ticker <- ws.DormantAt.Add(dormantTTL * 2) + tickTime2 := ws.DormantAt.Add(dormantTTL * 2) + coderdtest.UpdateProvisionerLastSeenAt(t, db, p.ID, tickTime2) + ticker <- tickTime2 stats = <-statCh require.Len(t, stats.Transitions, 1) require.Equal(t, database.WorkspaceTransitionDelete, stats.Transitions[ws.ID]) @@ -1186,7 +1221,7 @@ func TestWorkspaceAutobuild(t *testing.T) { ) logger := slogtest.Make(t, &slogtest.Options{IgnoreErrors: true}).Leveled(slog.LevelDebug) - client, user := coderdenttest.New(t, &coderdenttest.Options{ + client, db, user := coderdenttest.NewWithDatabase(t, &coderdenttest.Options{ Options: &coderdtest.Options{ AutobuildTicker: tickCh, IncludeProvisionerDaemon: true, @@ -1217,7 +1252,11 @@ func TestWorkspaceAutobuild(t *testing.T) { ws = coderdtest.MustTransitionWorkspace(t, client, ws.ID, codersdk.WorkspaceTransitionStart, codersdk.WorkspaceTransitionStop) // Assert that autostart works when the workspace isn't dormant.. - tickCh <- sched.Next(ws.LatestBuild.CreatedAt) + tickTime := sched.Next(ws.LatestBuild.CreatedAt) + p, err := coderdtest.GetProvisionerForTags(db, time.Now(), ws.OrganizationID, nil) + require.NoError(t, err) + coderdtest.UpdateProvisionerLastSeenAt(t, db, p.ID, tickTime) + tickCh <- tickTime stats := <-statsCh require.Len(t, stats.Errors, 0) require.Len(t, stats.Transitions, 1) @@ -1237,7 +1276,9 @@ func TestWorkspaceAutobuild(t *testing.T) { require.NoError(t, err) // We should see the workspace get stopped now. - tickCh <- ws.LastUsedAt.Add(inactiveTTL * 2) + tickTime2 := ws.LastUsedAt.Add(inactiveTTL * 2) + coderdtest.UpdateProvisionerLastSeenAt(t, db, p.ID, tickTime2) + tickCh <- tickTime2 stats = <-statsCh require.Len(t, stats.Errors, 0) require.Len(t, stats.Transitions, 1) @@ -1267,7 +1308,7 @@ func TestWorkspaceAutobuild(t *testing.T) { ) logger := slogtest.Make(t, &slogtest.Options{IgnoreErrors: true}).Leveled(slog.LevelDebug) - client, user := coderdenttest.New(t, &coderdenttest.Options{ + client, db, user := coderdenttest.NewWithDatabase(t, &coderdenttest.Options{ Options: &coderdtest.Options{ AutobuildTicker: ticker, IncludeProvisionerDaemon: true, @@ -1335,13 +1376,19 @@ func TestWorkspaceAutobuild(t *testing.T) { // Simulate ticking an hour after the workspace is expected to be deleted. // Under normal circumstances this should result in a transition but // since our last build resulted in failure it should be skipped. - ticker <- build.Job.CompletedAt.Add(time.Hour) + tickTime := build.Job.CompletedAt.Add(time.Hour) + p, err := coderdtest.GetProvisionerForTags(db, time.Now(), ws.OrganizationID, nil) + require.NoError(t, err) + coderdtest.UpdateProvisionerLastSeenAt(t, db, p.ID, tickTime) + ticker <- tickTime stats := <-statCh require.Len(t, stats.Transitions, 0) // Simulate ticking a day after the workspace was last attempted to // be deleted. This should result in an attempt. - ticker <- build.Job.CompletedAt.Add(time.Hour * 25) + tickTime2 := build.Job.CompletedAt.Add(time.Hour * 25) + coderdtest.UpdateProvisionerLastSeenAt(t, db, p.ID, tickTime2) + ticker <- tickTime2 stats = <-statCh require.Len(t, stats.Transitions, 1) require.Equal(t, database.WorkspaceTransitionDelete, stats.Transitions[ws.ID]) @@ -1356,7 +1403,7 @@ func TestWorkspaceAutobuild(t *testing.T) { ) logger := slogtest.Make(t, &slogtest.Options{IgnoreErrors: true}).Leveled(slog.LevelDebug) - client, user := coderdenttest.New(t, &coderdenttest.Options{ + client, db, user := coderdenttest.NewWithDatabase(t, &coderdenttest.Options{ Options: &coderdtest.Options{ AutobuildTicker: tickCh, IncludeProvisionerDaemon: true, @@ -1401,7 +1448,11 @@ func TestWorkspaceAutobuild(t *testing.T) { require.NoError(t, err) // Kick of an autostart build. - tickCh <- sched.Next(ws.LatestBuild.CreatedAt) + tickTime := sched.Next(ws.LatestBuild.CreatedAt) + p, err := coderdtest.GetProvisionerForTags(db, time.Now(), ws.OrganizationID, nil) + require.NoError(t, err) + coderdtest.UpdateProvisionerLastSeenAt(t, db, p.ID, tickTime) + tickCh <- tickTime stats := <-statsCh require.Len(t, stats.Errors, 0) require.Len(t, stats.Transitions, 1) @@ -1429,7 +1480,9 @@ func TestWorkspaceAutobuild(t *testing.T) { }) // Force an autostart transition again. - tickCh <- sched.Next(firstBuild.CreatedAt) + tickTime2 := sched.Next(firstBuild.CreatedAt) + coderdtest.UpdateProvisionerLastSeenAt(t, db, p.ID, tickTime2) + tickCh <- tickTime2 stats = <-statsCh require.Len(t, stats.Errors, 0) require.Len(t, stats.Transitions, 1) @@ -1453,7 +1506,7 @@ func TestWorkspaceAutobuild(t *testing.T) { clock.Set(dbtime.Now()) logger := slogtest.Make(t, &slogtest.Options{IgnoreErrors: true}).Leveled(slog.LevelDebug) - client, user := coderdenttest.New(t, &coderdenttest.Options{ + client, db, user := coderdenttest.NewWithDatabase(t, &coderdenttest.Options{ Options: &coderdtest.Options{ AutobuildTicker: tickCh, IncludeProvisionerDaemon: true, @@ -1494,6 +1547,9 @@ func TestWorkspaceAutobuild(t *testing.T) { next = sched.Next(next) clock.Set(next) + p, err := coderdtest.GetProvisionerForTags(db, time.Now(), ws.OrganizationID, nil) + require.NoError(t, err) + coderdtest.UpdateProvisionerLastSeenAt(t, db, p.ID, next) tickCh <- next stats := <-statsCh ws = coderdtest.MustWorkspace(t, client, ws.ID) @@ -1651,7 +1707,6 @@ func TestWorkspaceAutobuild(t *testing.T) { // We want to test the database nullifies the NextStartAt so we // make a raw DB call here. We pass in NextStartAt here so we // can test the database will nullify it and not us. - //nolint: gocritic // We need system context to modify this. err = db.UpdateWorkspaceAutostart(dbauthz.AsSystemRestricted(ctx), database.UpdateWorkspaceAutostartParams{ ID: ws.ID, AutostartSchedule: sql.NullString{Valid: true, String: sched.String()}, @@ -1722,7 +1777,7 @@ func TestTemplateDoesNotAllowUserAutostop(t *testing.T) { }) } -func TestExecutorPrebuilds(t *testing.T) { +func TestPrebuildsAutobuild(t *testing.T) { t.Parallel() if !dbtestutil.WillUsePostgres() { @@ -1800,14 +1855,21 @@ func TestExecutorPrebuilds(t *testing.T) { username string, version codersdk.TemplateVersion, presetID uuid.UUID, + autostartSchedule ...string, ) codersdk.Workspace { t.Helper() + var startSchedule string + if len(autostartSchedule) > 0 { + startSchedule = autostartSchedule[0] + } + workspaceName := strings.ReplaceAll(testutil.GetRandomName(t), "_", "-") userWorkspace, err := userClient.CreateUserWorkspace(ctx, username, codersdk.CreateWorkspaceRequest{ TemplateVersionID: version.ID, Name: workspaceName, TemplateVersionPresetID: presetID, + AutostartSchedule: ptr.Ref(startSchedule), }) require.NoError(t, err) build := coderdtest.AwaitWorkspaceBuildJobCompleted(t, userClient, userWorkspace.LatestBuild.ID) @@ -1820,7 +1882,7 @@ func TestExecutorPrebuilds(t *testing.T) { // Prebuilt workspaces should not be autostopped based on the default TTL. // This test ensures that DefaultTTLMillis is ignored while the workspace is in a prebuild state. - // Once the workspace is claimed, the default autostop timer should take effect. + // Once the workspace is claimed, the default TTL should take effect. t.Run("DefaultTTLOnlyTriggersAfterClaim", func(t *testing.T) { t.Parallel() @@ -1875,9 +1937,9 @@ func TestExecutorPrebuilds(t *testing.T) { userClient, user := coderdtest.CreateAnotherUser(t, client, owner.OrganizationID, rbac.RoleMember()) version := coderdtest.CreateTemplateVersion(t, client, owner.OrganizationID, templateWithAgentAndPresetsWithPrebuilds(prebuildInstances)) coderdtest.AwaitTemplateVersionJobCompleted(t, client, version.ID) + // Set a template level TTL to trigger the autostop + // Template level TTL can only be set if autostop is disabled for users coderdtest.CreateTemplate(t, client, owner.OrganizationID, version.ID, func(ctr *codersdk.CreateTemplateRequest) { - // Set a template level TTL to trigger the autostop - // Template level TTL can only be set if autostop is disabled for users ctr.AllowUserAutostop = ptr.Ref[bool](false) ctr.DefaultTTLMillis = ptr.Ref[int64](ttlTime.Milliseconds()) }) @@ -1890,43 +1952,48 @@ func TestExecutorPrebuilds(t *testing.T) { runningPrebuilds := getRunningPrebuilds(t, ctx, db, int(prebuildInstances)) require.Len(t, runningPrebuilds, int(prebuildInstances)) - // Given: a running prebuilt workspace with a deadline, ready to be claimed + // Given: a running prebuilt workspace, ready to be claimed prebuild := coderdtest.MustWorkspace(t, client, runningPrebuilds[0].ID) require.Equal(t, codersdk.WorkspaceTransitionStart, prebuild.LatestBuild.Transition) - require.NotZero(t, prebuild.LatestBuild.Deadline) - - // When: the autobuild executor ticks *after* the deadline - next := prebuild.LatestBuild.Deadline.Time.Add(time.Minute) - clock.Set(next) + // Prebuilt workspaces should have an empty Deadline and MaxDeadline + // which is equivalent to 0001-01-01 00:00:00 +0000 + require.Zero(t, prebuild.LatestBuild.Deadline) + require.Zero(t, prebuild.LatestBuild.MaxDeadline) + + // When: the autobuild executor ticks *after* the TTL time (10:00 AM UTC) + next := clock.Now().Add(ttlTime).Add(time.Minute) + clock.Set(next) // 10:01 AM UTC go func() { tickCh <- next }() // Then: the prebuilt workspace should remain in a start transition - prebuildStats := <-statsCh + prebuildStats := testutil.RequireReceive(ctx, t, statsCh) require.Len(t, prebuildStats.Errors, 0) require.Len(t, prebuildStats.Transitions, 0) require.Equal(t, codersdk.WorkspaceTransitionStart, prebuild.LatestBuild.Transition) prebuild = coderdtest.MustWorkspace(t, client, prebuild.ID) require.Equal(t, codersdk.BuildReasonInitiator, prebuild.LatestBuild.Reason) + require.Zero(t, prebuild.LatestBuild.Deadline) + require.Zero(t, prebuild.LatestBuild.MaxDeadline) // Given: a user claims the prebuilt workspace sometime later - clock.Set(clock.Now().Add(ttlTime)) + clock.Set(clock.Now().Add(1 * time.Hour)) // 11:01 AM UTC workspace := claimPrebuild(t, ctx, client, userClient, user.Username, version, presets[0].ID) require.Equal(t, prebuild.ID, workspace.ID) - // Workspace deadline must be ttlTime from the time it is claimed + // Workspace deadline must be ttlTime from the time it is claimed (1:01 PM UTC) require.True(t, workspace.LatestBuild.Deadline.Time.Equal(clock.Now().Add(ttlTime))) - // When: the autobuild executor ticks *after* the deadline + // When: the autobuild executor ticks *after* the TTL time (1:01 PM UTC) next = workspace.LatestBuild.Deadline.Time.Add(time.Minute) - clock.Set(next) + clock.Set(next) // 1:02 PM UTC go func() { tickCh <- next close(tickCh) }() // Then: the workspace should be stopped - workspaceStats := <-statsCh + workspaceStats := testutil.RequireReceive(ctx, t, statsCh) require.Len(t, workspaceStats.Errors, 0) require.Len(t, workspaceStats.Transitions, 1) require.Contains(t, workspaceStats.Transitions, workspace.ID) @@ -1941,158 +2008,125 @@ func TestExecutorPrebuilds(t *testing.T) { t.Run("AutostopScheduleOnlyTriggersAfterClaim", func(t *testing.T) { t.Parallel() - cases := []struct { - name string - isClaimedBeforeDeadline bool - }{ - // If the prebuild is claimed before the scheduled deadline, - // the claimed workspace should inherit and respect that same deadline. - { - name: "ClaimedBeforeDeadline_UsesSameDeadline", - isClaimedBeforeDeadline: true, + // Set the clock to Monday, January 1st, 2024 at 8:00 AM UTC to keep the test deterministic + clock := quartz.NewMock(t) + clock.Set(time.Date(2024, 1, 1, 8, 0, 0, 0, time.UTC)) + + // Setup + ctx := testutil.Context(t, testutil.WaitSuperLong) + db, pb := dbtestutil.NewDB(t, dbtestutil.WithDumpOnFailure()) + logger := testutil.Logger(t) + tickCh := make(chan time.Time) + statsCh := make(chan autobuild.Stats) + notificationsNoop := notifications.NewNoopEnqueuer() + client, _, api, owner := coderdenttest.NewWithAPI(t, &coderdenttest.Options{ + Options: &coderdtest.Options{ + Database: db, + Pubsub: pb, + AutobuildTicker: tickCh, + IncludeProvisionerDaemon: true, + AutobuildStats: statsCh, + Clock: clock, + TemplateScheduleStore: schedule.NewEnterpriseTemplateScheduleStore( + agplUserQuietHoursScheduleStore(), + notificationsNoop, + logger, + clock, + ), }, - // If the prebuild is claimed after the scheduled deadline, - // the workspace should not stop immediately, but instead respect the next - // valid scheduled deadline (the next day). - { - name: "ClaimedAfterDeadline_SchedulesForNextDay", - isClaimedBeforeDeadline: false, + LicenseOptions: &coderdenttest.LicenseOptions{ + Features: license.Features{codersdk.FeatureAdvancedTemplateScheduling: 1}, }, - } + }) - for _, tc := range cases { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - - // Set the clock to Monday, January 1st, 2024 at 8:00 AM UTC to keep the test deterministic - clock := quartz.NewMock(t) - clock.Set(time.Date(2024, 1, 1, 8, 0, 0, 0, time.UTC)) - - // Setup - ctx := testutil.Context(t, testutil.WaitSuperLong) - db, pb := dbtestutil.NewDB(t, dbtestutil.WithDumpOnFailure()) - logger := testutil.Logger(t) - tickCh := make(chan time.Time) - statsCh := make(chan autobuild.Stats) - notificationsNoop := notifications.NewNoopEnqueuer() - client, _, api, owner := coderdenttest.NewWithAPI(t, &coderdenttest.Options{ - Options: &coderdtest.Options{ - Database: db, - Pubsub: pb, - AutobuildTicker: tickCh, - IncludeProvisionerDaemon: true, - AutobuildStats: statsCh, - Clock: clock, - TemplateScheduleStore: schedule.NewEnterpriseTemplateScheduleStore( - agplUserQuietHoursScheduleStore(), - notificationsNoop, - logger, - clock, - ), - }, - LicenseOptions: &coderdenttest.LicenseOptions{ - Features: license.Features{codersdk.FeatureAdvancedTemplateScheduling: 1}, - }, - }) + // Setup Prebuild reconciler + cache := files.New(prometheus.NewRegistry(), &coderdtest.FakeAuthorizer{}) + reconciler := prebuilds.NewStoreReconciler( + db, pb, cache, + codersdk.PrebuildsConfig{}, + logger, + clock, + prometheus.NewRegistry(), + notificationsNoop, + api.AGPL.BuildUsageChecker, + ) + var claimer agplprebuilds.Claimer = prebuilds.NewEnterpriseClaimer(db) + api.AGPL.PrebuildsClaimer.Store(&claimer) - // Setup Prebuild reconciler - cache := files.New(prometheus.NewRegistry(), &coderdtest.FakeAuthorizer{}) - reconciler := prebuilds.NewStoreReconciler( - db, pb, cache, - codersdk.PrebuildsConfig{}, - logger, - clock, - prometheus.NewRegistry(), - notificationsNoop, - api.AGPL.BuildUsageChecker, - ) - var claimer agplprebuilds.Claimer = prebuilds.NewEnterpriseClaimer(db) - api.AGPL.PrebuildsClaimer.Store(&claimer) - - // Setup user, template and template version with a preset with 1 prebuild instance - prebuildInstances := int32(1) - userClient, user := coderdtest.CreateAnotherUser(t, client, owner.OrganizationID, rbac.RoleMember()) - version := coderdtest.CreateTemplateVersion(t, client, owner.OrganizationID, templateWithAgentAndPresetsWithPrebuilds(prebuildInstances)) - coderdtest.AwaitTemplateVersionJobCompleted(t, client, version.ID) - coderdtest.CreateTemplate(t, client, owner.OrganizationID, version.ID, func(ctr *codersdk.CreateTemplateRequest) { - // Set a template level Autostop schedule to trigger the autostop daily - ctr.AutostopRequirement = ptr.Ref[codersdk.TemplateAutostopRequirement]( - codersdk.TemplateAutostopRequirement{ - DaysOfWeek: []string{"monday", "tuesday", "wednesday", "thursday", "friday", "saturday", "sunday"}, - Weeks: 1, - }) + // Setup user, template and template version with a preset with 1 prebuild instance + prebuildInstances := int32(1) + userClient, user := coderdtest.CreateAnotherUser(t, client, owner.OrganizationID, rbac.RoleMember()) + version := coderdtest.CreateTemplateVersion(t, client, owner.OrganizationID, templateWithAgentAndPresetsWithPrebuilds(prebuildInstances)) + coderdtest.AwaitTemplateVersionJobCompleted(t, client, version.ID) + // Set a template level Autostop schedule to trigger the autostop daily + coderdtest.CreateTemplate(t, client, owner.OrganizationID, version.ID, func(ctr *codersdk.CreateTemplateRequest) { + ctr.AutostopRequirement = ptr.Ref[codersdk.TemplateAutostopRequirement]( + codersdk.TemplateAutostopRequirement{ + DaysOfWeek: []string{"monday", "tuesday", "wednesday", "thursday", "friday", "saturday", "sunday"}, + Weeks: 1, }) - presets, err := client.TemplateVersionPresets(ctx, version.ID) - require.NoError(t, err) - require.Len(t, presets, 1) - - // Given: Reconciliation loop runs and starts prebuilt workspace - runReconciliationLoop(t, ctx, db, reconciler, presets) - runningPrebuilds := getRunningPrebuilds(t, ctx, db, int(prebuildInstances)) - require.Len(t, runningPrebuilds, int(prebuildInstances)) - - // Given: a running prebuilt workspace with a deadline, ready to be claimed - prebuild := coderdtest.MustWorkspace(t, client, runningPrebuilds[0].ID) - require.Equal(t, codersdk.WorkspaceTransitionStart, prebuild.LatestBuild.Transition) - require.NotZero(t, prebuild.LatestBuild.Deadline) - - next := clock.Now() - if tc.isClaimedBeforeDeadline { - // When: the autobuild executor ticks *before* the deadline: - next = next.Add(time.Minute) - } else { - // When: the autobuild executor ticks *after* the deadline: - next = next.Add(24 * time.Hour) - } + }) + presets, err := client.TemplateVersionPresets(ctx, version.ID) + require.NoError(t, err) + require.Len(t, presets, 1) - clock.Set(next) - go func() { - tickCh <- next - }() - - // Then: the prebuilt workspace should remain in a start transition - prebuildStats := <-statsCh - require.Len(t, prebuildStats.Errors, 0) - require.Len(t, prebuildStats.Transitions, 0) - require.Equal(t, codersdk.WorkspaceTransitionStart, prebuild.LatestBuild.Transition) - prebuild = coderdtest.MustWorkspace(t, client, prebuild.ID) - require.Equal(t, codersdk.BuildReasonInitiator, prebuild.LatestBuild.Reason) - - // Given: a user claims the prebuilt workspace - workspace := claimPrebuild(t, ctx, client, userClient, user.Username, version, presets[0].ID) - require.Equal(t, prebuild.ID, workspace.ID) - - if tc.isClaimedBeforeDeadline { - // Then: the claimed workspace should inherit and respect that same deadline. - require.True(t, workspace.LatestBuild.Deadline.Time.Equal(prebuild.LatestBuild.Deadline.Time)) - } else { - // Then: the claimed workspace should respect the next valid scheduled deadline (next day). - require.True(t, workspace.LatestBuild.Deadline.Time.Equal(clock.Now().Truncate(24*time.Hour).Add(24*time.Hour))) - } + // Given: Reconciliation loop runs and starts prebuilt workspace + runReconciliationLoop(t, ctx, db, reconciler, presets) + runningPrebuilds := getRunningPrebuilds(t, ctx, db, int(prebuildInstances)) + require.Len(t, runningPrebuilds, int(prebuildInstances)) - // When: the autobuild executor ticks *after* the deadline: - next = workspace.LatestBuild.Deadline.Time.Add(time.Minute) - clock.Set(next) - go func() { - tickCh <- next - close(tickCh) - }() - - // Then: the workspace should be stopped - workspaceStats := <-statsCh - require.Len(t, workspaceStats.Errors, 0) - require.Len(t, workspaceStats.Transitions, 1) - require.Contains(t, workspaceStats.Transitions, workspace.ID) - require.Equal(t, database.WorkspaceTransitionStop, workspaceStats.Transitions[workspace.ID]) - workspace = coderdtest.MustWorkspace(t, client, workspace.ID) - require.Equal(t, codersdk.BuildReasonAutostop, workspace.LatestBuild.Reason) - }) - } + // Given: a running prebuilt workspace, ready to be claimed + prebuild := coderdtest.MustWorkspace(t, client, runningPrebuilds[0].ID) + require.Equal(t, codersdk.WorkspaceTransitionStart, prebuild.LatestBuild.Transition) + // Prebuilt workspaces should have an empty Deadline and MaxDeadline + // which is equivalent to 0001-01-01 00:00:00 +0000 + require.Zero(t, prebuild.LatestBuild.Deadline) + require.Zero(t, prebuild.LatestBuild.MaxDeadline) + + // When: the autobuild executor ticks *after* the deadline (2024-01-02 0:00 UTC) + next := clock.Now().Truncate(24 * time.Hour).Add(24 * time.Hour).Add(time.Minute) + clock.Set(next) // 2024-01-02 0:01 UTC + go func() { + tickCh <- next + }() + + // Then: the prebuilt workspace should remain in a start transition + prebuildStats := testutil.RequireReceive(ctx, t, statsCh) + require.Len(t, prebuildStats.Errors, 0) + require.Len(t, prebuildStats.Transitions, 0) + require.Equal(t, codersdk.WorkspaceTransitionStart, prebuild.LatestBuild.Transition) + prebuild = coderdtest.MustWorkspace(t, client, prebuild.ID) + require.Equal(t, codersdk.BuildReasonInitiator, prebuild.LatestBuild.Reason) + require.Zero(t, prebuild.LatestBuild.Deadline) + require.Zero(t, prebuild.LatestBuild.MaxDeadline) + + // Given: a user claims the prebuilt workspace + workspace := claimPrebuild(t, ctx, client, userClient, user.Username, version, presets[0].ID) + require.Equal(t, prebuild.ID, workspace.ID) + // Then: the claimed workspace should respect the next valid scheduled deadline (2024-01-03 0:00 UTC) + require.True(t, workspace.LatestBuild.Deadline.Time.Equal(clock.Now().Truncate(24*time.Hour).Add(24*time.Hour))) + + // When: the autobuild executor ticks *after* the deadline (2024-01-03 0:00 UTC) + next = workspace.LatestBuild.Deadline.Time.Add(time.Minute) + clock.Set(next) // 2024-01-03 0:01 UTC + go func() { + tickCh <- next + close(tickCh) + }() + + // Then: the workspace should be stopped + workspaceStats := testutil.RequireReceive(ctx, t, statsCh) + require.Len(t, workspaceStats.Errors, 0) + require.Len(t, workspaceStats.Transitions, 1) + require.Contains(t, workspaceStats.Transitions, workspace.ID) + require.Equal(t, database.WorkspaceTransitionStop, workspaceStats.Transitions[workspace.ID]) + workspace = coderdtest.MustWorkspace(t, client, workspace.ID) + require.Equal(t, codersdk.BuildReasonAutostop, workspace.LatestBuild.Reason) }) // Prebuild workspaces should not follow the autostart schedule. // This test verifies that AutostartRequirement (autostart schedule) is ignored while the workspace is a prebuild. + // After being claimed, the workspace should be started according to the autostart schedule. t.Run("AutostartScheduleOnlyTriggersAfterClaim", func(t *testing.T) { t.Parallel() @@ -2146,8 +2180,11 @@ func TestExecutorPrebuilds(t *testing.T) { userClient, user := coderdtest.CreateAnotherUser(t, client, owner.OrganizationID, rbac.RoleMember()) version := coderdtest.CreateTemplateVersion(t, client, owner.OrganizationID, templateWithAgentAndPresetsWithPrebuilds(prebuildInstances)) coderdtest.AwaitTemplateVersionJobCompleted(t, client, version.ID) + // Template-level autostart config only defines allowed days for workspaces to autostart + // The actual autostart schedule is set at the workspace level + sched, err := cron.Weekly("CRON_TZ=UTC 0 0 * * *") + require.NoError(t, err) coderdtest.CreateTemplate(t, client, owner.OrganizationID, version.ID, func(ctr *codersdk.CreateTemplateRequest) { - // Set a template level Autostart schedule to trigger the autostart daily ctr.AllowUserAutostart = ptr.Ref[bool](true) ctr.AutostartRequirement = &codersdk.TemplateAutostartRequirement{DaysOfWeek: codersdk.AllDaysOfWeek} }) @@ -2160,14 +2197,11 @@ func TestExecutorPrebuilds(t *testing.T) { runningPrebuilds := getRunningPrebuilds(t, ctx, db, int(prebuildInstances)) require.Len(t, runningPrebuilds, int(prebuildInstances)) - // Given: prebuilt workspace has autostart schedule daily at midnight + // Given: a running prebuilt workspace prebuild := coderdtest.MustWorkspace(t, client, runningPrebuilds[0].ID) - sched, err := cron.Weekly("CRON_TZ=UTC 0 0 * * *") - require.NoError(t, err) - err = client.UpdateWorkspaceAutostart(ctx, prebuild.ID, codersdk.UpdateWorkspaceAutostartRequest{ - Schedule: ptr.Ref(sched.String()), - }) - require.NoError(t, err) + // Prebuilt workspaces should have an empty Autostart Schedule + require.Nil(t, prebuild.AutostartSchedule) + require.Nil(t, prebuild.NextStartAt) // Given: prebuilt workspace is stopped prebuild = coderdtest.MustTransitionWorkspace(t, client, prebuild.ID, codersdk.WorkspaceTransitionStart, codersdk.WorkspaceTransitionStop) @@ -2181,51 +2215,61 @@ func TestExecutorPrebuilds(t *testing.T) { }() // Then: the prebuilt workspace should remain in a stop transition - prebuildStats := <-statsCh + prebuildStats := testutil.RequireReceive(ctx, t, statsCh) require.Len(t, prebuildStats.Errors, 0) require.Len(t, prebuildStats.Transitions, 0) require.Equal(t, codersdk.WorkspaceTransitionStop, prebuild.LatestBuild.Transition) prebuild = coderdtest.MustWorkspace(t, client, prebuild.ID) require.Equal(t, codersdk.BuildReasonInitiator, prebuild.LatestBuild.Reason) + require.Nil(t, prebuild.AutostartSchedule) + require.Nil(t, prebuild.NextStartAt) // Given: a prebuilt workspace that is running and ready to be claimed prebuild = coderdtest.MustTransitionWorkspace(t, client, prebuild.ID, codersdk.WorkspaceTransitionStop, codersdk.WorkspaceTransitionStart) coderdtest.AwaitWorkspaceBuildJobCompleted(t, client, prebuild.LatestBuild.ID) - // Make sure the workspace's agent is again ready getRunningPrebuilds(t, ctx, db, int(prebuildInstances)) - // Given: a user claims the prebuilt workspace - workspace := claimPrebuild(t, ctx, client, userClient, user.Username, version, presets[0].ID) + // Given: a user claims the prebuilt workspace with an Autostart schedule request + workspace := claimPrebuild(t, ctx, client, userClient, user.Username, version, presets[0].ID, sched.String()) require.Equal(t, prebuild.ID, workspace.ID) + // Then: newly claimed workspace's AutostartSchedule and NextStartAt should be set + require.NotNil(t, workspace.AutostartSchedule) require.NotNil(t, workspace.NextStartAt) // Given: workspace is stopped workspace = coderdtest.MustTransitionWorkspace(t, client, workspace.ID, codersdk.WorkspaceTransitionStart, codersdk.WorkspaceTransitionStop) coderdtest.AwaitWorkspaceBuildJobCompleted(t, client, workspace.LatestBuild.ID) - // Then: the claimed workspace should inherit and respect that same NextStartAt - require.True(t, workspace.NextStartAt.Equal(*prebuild.NextStartAt)) + // Wait for provisioner to be available for this specific workspace + coderdtest.MustWaitForProvisionersAvailable(t, db, prebuild) + + tickTime := sched.Next(prebuild.LatestBuild.CreatedAt).Add(time.Minute) + p, err := coderdtest.GetProvisionerForTags(db, time.Now(), workspace.OrganizationID, nil) + require.NoError(t, err) + coderdtest.UpdateProvisionerLastSeenAt(t, db, p.ID, tickTime) // Tick at the next scheduled time after the prebuild’s LatestBuild.CreatedAt, // since the next allowed autostart is calculated starting from that point. // When: the autobuild executor ticks after the scheduled time go func() { - tickCh <- sched.Next(prebuild.LatestBuild.CreatedAt).Add(time.Minute) + tickCh <- tickTime }() // Then: the workspace should have a NextStartAt equal to the next autostart schedule - workspaceStats := <-statsCh + workspaceStats := testutil.RequireReceive(ctx, t, statsCh) require.Len(t, workspaceStats.Errors, 0) require.Len(t, workspaceStats.Transitions, 1) workspace = coderdtest.MustWorkspace(t, client, workspace.ID) + require.NotNil(t, workspace.AutostartSchedule) require.NotNil(t, workspace.NextStartAt) require.Equal(t, sched.Next(clock.Now()), workspace.NextStartAt.UTC()) }) - // Prebuild workspaces should not transition to dormant when the inactive TTL is reached. - // This test verifies that TimeTilDormantMillis is ignored while the workspace is a prebuild. - // After being claimed, the workspace should become dormant according to the configured inactivity period. + // Prebuild workspaces should not transition to dormant or be deleted due to inactivity. + // This test verifies that both TimeTilDormantMillis and TimeTilDormantAutoDeleteMillis + // are ignored while the workspace is a prebuild. After the workspace is claimed, + // it should respect these inactivity thresholds accordingly. t.Run("DormantOnlyAfterClaimed", func(t *testing.T) { t.Parallel() @@ -2276,13 +2320,15 @@ func TestExecutorPrebuilds(t *testing.T) { // Setup user, template and template version with a preset with 1 prebuild instance prebuildInstances := int32(1) - inactiveTTL := 2 * time.Hour + dormantTTL := 2 * time.Hour + deletionTTL := 2 * time.Hour userClient, user := coderdtest.CreateAnotherUser(t, client, owner.OrganizationID, rbac.RoleMember()) version := coderdtest.CreateTemplateVersion(t, client, owner.OrganizationID, templateWithAgentAndPresetsWithPrebuilds(prebuildInstances)) coderdtest.AwaitTemplateVersionJobCompleted(t, client, version.ID) + // Set a template level dormant TTL to trigger dormancy coderdtest.CreateTemplate(t, client, owner.OrganizationID, version.ID, func(ctr *codersdk.CreateTemplateRequest) { - // Set a template level inactive TTL to trigger dormancy - ctr.TimeTilDormantMillis = ptr.Ref[int64](inactiveTTL.Milliseconds()) + ctr.TimeTilDormantMillis = ptr.Ref[int64](dormantTTL.Milliseconds()) + ctr.TimeTilDormantAutoDeleteMillis = ptr.Ref[int64](deletionTTL.Milliseconds()) }) presets, err := client.TemplateVersionPresets(ctx, version.ID) require.NoError(t, err) @@ -2296,41 +2342,73 @@ func TestExecutorPrebuilds(t *testing.T) { // Given: a running prebuilt workspace, ready to be claimed prebuild := coderdtest.MustWorkspace(t, client, runningPrebuilds[0].ID) require.Equal(t, codersdk.WorkspaceTransitionStart, prebuild.LatestBuild.Transition) + require.Nil(t, prebuild.DormantAt) + require.Nil(t, prebuild.DeletingAt) - // When: the autobuild executor ticks *after* the inactive TTL + // When: the autobuild executor ticks *after* the dormant TTL (10:00 AM UTC) + next := clock.Now().Add(dormantTTL).Add(time.Minute) + clock.Set(next) // 10:01 AM UTC go func() { - tickCh <- prebuild.LastUsedAt.Add(inactiveTTL).Add(time.Minute) + tickCh <- next }() // Then: the prebuilt workspace should remain in a start transition - prebuildStats := <-statsCh + prebuildStats := testutil.RequireReceive(ctx, t, statsCh) require.Len(t, prebuildStats.Errors, 0) require.Len(t, prebuildStats.Transitions, 0) require.Equal(t, codersdk.WorkspaceTransitionStart, prebuild.LatestBuild.Transition) prebuild = coderdtest.MustWorkspace(t, client, prebuild.ID) require.Equal(t, codersdk.BuildReasonInitiator, prebuild.LatestBuild.Reason) + require.Nil(t, prebuild.DormantAt) + require.Nil(t, prebuild.DeletingAt) // Given: a user claims the prebuilt workspace sometime later - clock.Set(clock.Now().Add(inactiveTTL)) + clock.Set(clock.Now().Add(1 * time.Hour)) // 11:01 AM UTC workspace := claimPrebuild(t, ctx, client, userClient, user.Username, version, presets[0].ID) require.Equal(t, prebuild.ID, workspace.ID) - require.Nil(t, prebuild.DormantAt) + // Then: the claimed workspace should have DormantAt and DeletingAt unset (nil), + // and LastUsedAt updated + require.Nil(t, workspace.DormantAt) + require.Nil(t, workspace.DeletingAt) + require.True(t, workspace.LastUsedAt.After(prebuild.LastUsedAt)) - // When: the autobuild executor ticks *after* the inactive TTL + // When: the autobuild executor ticks *after* the dormant TTL (1:01 PM UTC) + next = clock.Now().Add(dormantTTL).Add(time.Minute) + clock.Set(next) // 1:02 PM UTC go func() { - tickCh <- prebuild.LastUsedAt.Add(inactiveTTL).Add(time.Minute) - close(tickCh) + tickCh <- next }() - // Then: the workspace should transition to stopped state for breaching failure TTL - workspaceStats := <-statsCh + // Then: the workspace should transition to stopped state for breaching dormant TTL + workspaceStats := testutil.RequireReceive(ctx, t, statsCh) require.Len(t, workspaceStats.Errors, 0) require.Len(t, workspaceStats.Transitions, 1) require.Contains(t, workspaceStats.Transitions, workspace.ID) require.Equal(t, database.WorkspaceTransitionStop, workspaceStats.Transitions[workspace.ID]) workspace = coderdtest.MustWorkspace(t, client, workspace.ID) + coderdtest.AwaitWorkspaceBuildJobCompleted(t, client, workspace.LatestBuild.ID) + workspace = coderdtest.MustWorkspace(t, client, workspace.ID) require.Equal(t, codersdk.BuildReasonDormancy, workspace.LatestBuild.Reason) + require.Equal(t, codersdk.WorkspaceStatusStopped, workspace.LatestBuild.Status) require.NotNil(t, workspace.DormantAt) + require.NotNil(t, workspace.DeletingAt) + + tickTime := workspace.DeletingAt.Add(time.Minute) + p, err := coderdtest.GetProvisionerForTags(db, time.Now(), workspace.OrganizationID, nil) + require.NoError(t, err) + coderdtest.UpdateProvisionerLastSeenAt(t, db, p.ID, tickTime) + + // When: the autobuild executor ticks *after* the deletion TTL + go func() { + tickCh <- tickTime + }() + + // Then: the workspace should be deleted + dormantWorkspaceStats := testutil.RequireReceive(ctx, t, statsCh) + require.Len(t, dormantWorkspaceStats.Errors, 0) + require.Len(t, dormantWorkspaceStats.Transitions, 1) + require.Contains(t, dormantWorkspaceStats.Transitions, workspace.ID) + require.Equal(t, database.WorkspaceTransitionDelete, dormantWorkspaceStats.Transitions[workspace.ID]) }) // Prebuild workspaces should not be deleted when the failure TTL is reached. @@ -2390,8 +2468,8 @@ func TestExecutorPrebuilds(t *testing.T) { failureTTL := 2 * time.Hour version := coderdtest.CreateTemplateVersion(t, client, owner.OrganizationID, templateWithFailedResponseAndPresetsWithPrebuilds(prebuildInstances)) coderdtest.AwaitTemplateVersionJobCompleted(t, client, version.ID) + // Set a template level Failure TTL to trigger workspace deletion template := coderdtest.CreateTemplate(t, client, owner.OrganizationID, version.ID, func(ctr *codersdk.CreateTemplateRequest) { - // Set a template level Failure TTL to trigger workspace deletion ctr.FailureTTLMillis = ptr.Ref[int64](failureTTL.Milliseconds()) }) presets, err := client.TemplateVersionPresets(ctx, version.ID) @@ -2400,7 +2478,6 @@ func TestExecutorPrebuilds(t *testing.T) { // Given: reconciliation loop runs and starts prebuilt workspace in failed state runReconciliationLoop(t, ctx, db, reconciler, presets) - var failedWorkspaceBuilds []database.GetFailedWorkspaceBuildsByTemplateIDRow require.Eventually(t, func() bool { rows, err := db.GetFailedWorkspaceBuildsByTemplateID(ctx, database.GetFailedWorkspaceBuildsByTemplateIDParams{ @@ -2427,7 +2504,7 @@ func TestExecutorPrebuilds(t *testing.T) { }() // Then: the prebuilt workspace should remain in a start transition - prebuildStats := <-statsCh + prebuildStats := testutil.RequireReceive(ctx, t, statsCh) require.Len(t, prebuildStats.Errors, 0) require.Len(t, prebuildStats.Transitions, 0) require.Equal(t, codersdk.WorkspaceTransitionStart, prebuild.LatestBuild.Transition) @@ -2437,50 +2514,46 @@ func TestExecutorPrebuilds(t *testing.T) { } func templateWithAgentAndPresetsWithPrebuilds(desiredInstances int32) *echo.Responses { - return &echo.Responses{ - Parse: echo.ParseComplete, - ProvisionPlan: []*proto.Response{ - { - Type: &proto.Response_Plan{ - Plan: &proto.PlanComplete{ - Presets: []*proto.Preset{ - { - Name: "preset-test", - Parameters: []*proto.PresetParameter{ - { - Name: "k1", - Value: "v1", - }, - }, - Prebuild: &proto.Prebuild{ - Instances: desiredInstances, - }, - }, - }, - }, + agent := &proto.Agent{ + Name: "smith", + OperatingSystem: "linux", + Architecture: "i386", + } + + resource := func(withAgent bool) *proto.Resource { + r := &proto.Resource{Type: "compute", Name: "main"} + if withAgent { + r.Agents = []*proto.Agent{agent} + } + return r + } + + applyResponse := func(withAgent bool) *proto.Response { + return &proto.Response{ + Type: &proto.Response_Apply{ + Apply: &proto.ApplyComplete{ + Resources: []*proto.Resource{resource(withAgent)}, }, }, - }, - ProvisionApply: []*proto.Response{ - { - Type: &proto.Response_Apply{ - Apply: &proto.ApplyComplete{ - Resources: []*proto.Resource{ - { - Type: "compute", - Name: "main", - Agents: []*proto.Agent{ - { - Name: "smith", - OperatingSystem: "linux", - Architecture: "i386", - }, - }, - }, - }, - }, + } + } + + return &echo.Responses{ + Parse: echo.ParseComplete, + ProvisionPlan: []*proto.Response{{ + Type: &proto.Response_Plan{ + Plan: &proto.PlanComplete{ + Presets: []*proto.Preset{{ + Name: "preset-test", + Parameters: []*proto.PresetParameter{{Name: "k1", Value: "v1"}}, + Prebuild: &proto.Prebuild{Instances: desiredInstances}, + }}, }, }, + }}, + ProvisionApplyMap: map[proto.WorkspaceTransition][]*proto.Response{ + proto.WorkspaceTransition_START: {applyResponse(true)}, + proto.WorkspaceTransition_STOP: {applyResponse(false)}, }, } } @@ -2514,6 +2587,292 @@ func templateWithFailedResponseAndPresetsWithPrebuilds(desiredInstances int32) * } } +func TestPrebuildUpdateLifecycleParams(t *testing.T) { + t.Parallel() + + // Autostart schedule configuration set to weekly at 9:30 AM UTC + autostartSchedule, err := cron.Weekly("CRON_TZ=UTC 30 9 * * 1-5") + require.NoError(t, err) + + // TTL configuration set to 8 hours + ttlMillis := ptr.Ref((8 * time.Hour).Milliseconds()) + + // Deadline configuration set to January 1st, 2024 at 10:00 AM UTC + deadline := time.Date(2024, 1, 1, 10, 0, 0, 0, time.UTC) + + cases := []struct { + name string + endpoint func(*testing.T, context.Context, *codersdk.Client, uuid.UUID) error + apiErrorMsg string + assertUpdate func(*testing.T, *quartz.Mock, *codersdk.Client, uuid.UUID) + }{ + { + name: "AutostartUpdatePrebuildAfterClaim", + endpoint: func(t *testing.T, ctx context.Context, client *codersdk.Client, workspaceID uuid.UUID) error { + err = client.UpdateWorkspaceAutostart(ctx, workspaceID, codersdk.UpdateWorkspaceAutostartRequest{ + Schedule: ptr.Ref(autostartSchedule.String()), + }) + return err + }, + apiErrorMsg: "Autostart is not supported for prebuilt workspaces", + assertUpdate: func(t *testing.T, clock *quartz.Mock, client *codersdk.Client, workspaceID uuid.UUID) { + // The workspace's autostart schedule should be updated to the given schedule, + // and its next start time should be set to 2024-01-01 09:30 AM UTC + updatedWorkspace := coderdtest.MustWorkspace(t, client, workspaceID) + require.Equal(t, autostartSchedule.String(), *updatedWorkspace.AutostartSchedule) + require.Equal(t, autostartSchedule.Next(clock.Now()), updatedWorkspace.NextStartAt.UTC()) + expectedNext := time.Date(2024, 1, 1, 9, 30, 0, 0, time.UTC) + require.Equal(t, expectedNext, updatedWorkspace.NextStartAt.UTC()) + }, + }, + { + name: "TTLUpdatePrebuildAfterClaim", + endpoint: func(t *testing.T, ctx context.Context, client *codersdk.Client, workspaceID uuid.UUID) error { + err := client.UpdateWorkspaceTTL(ctx, workspaceID, codersdk.UpdateWorkspaceTTLRequest{ + TTLMillis: ttlMillis, + }) + return err + }, + apiErrorMsg: "TTL updates are not supported for prebuilt workspaces", + assertUpdate: func(t *testing.T, clock *quartz.Mock, client *codersdk.Client, workspaceID uuid.UUID) { + // The workspace's TTL should be updated accordingly + updatedWorkspace := coderdtest.MustWorkspace(t, client, workspaceID) + require.Equal(t, ttlMillis, updatedWorkspace.TTLMillis) + }, + }, + { + name: "DormantUpdatePrebuildAfterClaim", + endpoint: func(t *testing.T, ctx context.Context, client *codersdk.Client, workspaceID uuid.UUID) error { + err := client.UpdateWorkspaceDormancy(ctx, workspaceID, codersdk.UpdateWorkspaceDormancy{ + Dormant: true, + }) + return err + }, + apiErrorMsg: "Dormancy updates are not supported for prebuilt workspaces", + assertUpdate: func(t *testing.T, clock *quartz.Mock, client *codersdk.Client, workspaceID uuid.UUID) { + // The workspace's dormantAt should be updated accordingly + updatedWorkspace := coderdtest.MustWorkspace(t, client, workspaceID) + require.Equal(t, clock.Now(), updatedWorkspace.DormantAt.UTC()) + }, + }, + { + name: "DeadlineUpdatePrebuildAfterClaim", + endpoint: func(t *testing.T, ctx context.Context, client *codersdk.Client, workspaceID uuid.UUID) error { + err := client.PutExtendWorkspace(ctx, workspaceID, codersdk.PutExtendWorkspaceRequest{ + Deadline: deadline, + }) + return err + }, + apiErrorMsg: "Deadline extension is not supported for prebuilt workspaces", + assertUpdate: func(t *testing.T, clock *quartz.Mock, client *codersdk.Client, workspaceID uuid.UUID) { + // The workspace build's deadline should be updated accordingly + updatedWorkspace := coderdtest.MustWorkspace(t, client, workspaceID) + require.Equal(t, deadline, updatedWorkspace.LatestBuild.Deadline.Time.UTC()) + }, + }, + } + + for _, tc := range cases { + tc := tc + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + + // Set the clock to Monday, January 1st, 2024 at 8:00 AM UTC to keep the test deterministic + clock := quartz.NewMock(t) + clock.Set(time.Date(2024, 1, 1, 8, 0, 0, 0, time.UTC)) + + // Setup + client, db, owner := coderdenttest.NewWithDatabase(t, &coderdenttest.Options{ + Options: &coderdtest.Options{ + IncludeProvisionerDaemon: true, + Clock: clock, + }, + LicenseOptions: &coderdenttest.LicenseOptions{ + Features: license.Features{ + codersdk.FeatureWorkspacePrebuilds: 1, + }, + }, + }) + + // Given: a template and a template version with preset and a prebuilt workspace + presetID := uuid.New() + version := coderdtest.CreateTemplateVersion(t, client, owner.OrganizationID, nil) + _ = coderdtest.AwaitTemplateVersionJobCompleted(t, client, version.ID) + template := coderdtest.CreateTemplate(t, client, owner.OrganizationID, version.ID) + dbgen.Preset(t, db, database.InsertPresetParams{ + ID: presetID, + TemplateVersionID: version.ID, + DesiredInstances: sql.NullInt32{Int32: 1, Valid: true}, + }) + workspaceBuild := dbfake.WorkspaceBuild(t, db, database.WorkspaceTable{ + OwnerID: database.PrebuildsSystemUserID, + TemplateID: template.ID, + }).Seed(database.WorkspaceBuild{ + TemplateVersionID: version.ID, + TemplateVersionPresetID: uuid.NullUUID{ + UUID: presetID, + Valid: true, + }, + }).WithAgent(func(agent []*proto.Agent) []*proto.Agent { + return agent + }).Do() + + // Mark the prebuilt workspace's agent as ready so the prebuild can be claimed + ctx := dbauthz.AsSystemRestricted(testutil.Context(t, testutil.WaitLong)) + agent, err := db.GetWorkspaceAgentAndLatestBuildByAuthToken(ctx, uuid.MustParse(workspaceBuild.AgentToken)) + require.NoError(t, err) + err = db.UpdateWorkspaceAgentLifecycleStateByID(ctx, database.UpdateWorkspaceAgentLifecycleStateByIDParams{ + ID: agent.WorkspaceAgent.ID, + LifecycleState: database.WorkspaceAgentLifecycleStateReady, + }) + require.NoError(t, err) + + // Given: a prebuilt workspace + prebuild := coderdtest.MustWorkspace(t, client, workspaceBuild.Workspace.ID) + + // When: the lifecycle-update endpoint is called for the prebuilt workspace + err = tc.endpoint(t, ctx, client, prebuild.ID) + + // Then: a 409 Conflict should be returned, with an error message specific to the lifecycle parameter + var apiErr *codersdk.Error + require.ErrorAs(t, err, &apiErr) + require.Equal(t, http.StatusConflict, apiErr.StatusCode()) + require.Equal(t, tc.apiErrorMsg, apiErr.Response.Message) + + // Given: the prebuilt workspace is claimed by a user + user, err := client.User(ctx, "testUser") + require.NoError(t, err) + claimedWorkspace, err := client.CreateUserWorkspace(ctx, user.ID.String(), codersdk.CreateWorkspaceRequest{ + TemplateVersionID: version.ID, + TemplateVersionPresetID: presetID, + Name: coderdtest.RandomUsername(t), + // The 'extend' endpoint requires the workspace to have an existing deadline. + // To ensure this, we set the workspace's TTL to 1 hour. + TTLMillis: ptr.Ref[int64](time.Hour.Milliseconds()), + }) + require.NoError(t, err) + coderdtest.AwaitWorkspaceBuildJobCompleted(t, client, claimedWorkspace.LatestBuild.ID) + workspace := coderdtest.MustWorkspace(t, client, claimedWorkspace.ID) + require.Equal(t, prebuild.ID, workspace.ID) + + // When: the same lifecycle-update endpoint is called for the claimed workspace + err = tc.endpoint(t, ctx, client, workspace.ID) + require.NoError(t, err) + + // Then: the workspace's lifecycle parameter should be updated accordingly + tc.assertUpdate(t, clock, client, claimedWorkspace.ID) + }) + } +} + +func TestPrebuildActivityBump(t *testing.T) { + t.Parallel() + + clock := quartz.NewMock(t) + clock.Set(dbtime.Now()) + + // Setup + log := testutil.Logger(t) + client, db, owner := coderdenttest.NewWithDatabase(t, &coderdenttest.Options{ + Options: &coderdtest.Options{ + IncludeProvisionerDaemon: true, + Clock: clock, + }, + LicenseOptions: &coderdenttest.LicenseOptions{ + Features: license.Features{ + codersdk.FeatureWorkspacePrebuilds: 1, + }, + }, + }) + + // Given: a template and a template version with preset and a prebuilt workspace + presetID := uuid.New() + version := coderdtest.CreateTemplateVersion(t, client, owner.OrganizationID, nil) + _ = coderdtest.AwaitTemplateVersionJobCompleted(t, client, version.ID) + // Configure activity bump on the template + activityBump := time.Hour + template := coderdtest.CreateTemplate(t, client, owner.OrganizationID, version.ID, func(ctr *codersdk.CreateTemplateRequest) { + ctr.ActivityBumpMillis = ptr.Ref[int64](activityBump.Milliseconds()) + }) + dbgen.Preset(t, db, database.InsertPresetParams{ + ID: presetID, + TemplateVersionID: version.ID, + DesiredInstances: sql.NullInt32{Int32: 1, Valid: true}, + }) + // Given: a prebuild with an expired Deadline + deadline := clock.Now().Add(-30 * time.Minute) + wb := dbfake.WorkspaceBuild(t, db, database.WorkspaceTable{ + OwnerID: database.PrebuildsSystemUserID, + TemplateID: template.ID, + }).Seed(database.WorkspaceBuild{ + TemplateVersionID: version.ID, + TemplateVersionPresetID: uuid.NullUUID{ + UUID: presetID, + Valid: true, + }, + Deadline: deadline, + }).WithAgent(func(agent []*proto.Agent) []*proto.Agent { + return agent + }).Do() + + // Mark the prebuilt workspace's agent as ready so the prebuild can be claimed + // nolint:gocritic + ctx := dbauthz.AsSystemRestricted(testutil.Context(t, testutil.WaitLong)) + agent, err := db.GetWorkspaceAgentAndLatestBuildByAuthToken(ctx, uuid.MustParse(wb.AgentToken)) + require.NoError(t, err) + err = db.UpdateWorkspaceAgentLifecycleStateByID(ctx, database.UpdateWorkspaceAgentLifecycleStateByIDParams{ + ID: agent.WorkspaceAgent.ID, + LifecycleState: database.WorkspaceAgentLifecycleStateReady, + }) + require.NoError(t, err) + + // Given: a prebuilt workspace with a Deadline and an empty MaxDeadline + prebuild := coderdtest.MustWorkspace(t, client, wb.Workspace.ID) + require.Equal(t, deadline.UTC(), prebuild.LatestBuild.Deadline.Time.UTC()) + require.Zero(t, prebuild.LatestBuild.MaxDeadline) + + // When: activity bump is applied to an unclaimed prebuild + workspacestats.ActivityBumpWorkspace(ctx, log, db, prebuild.ID, clock.Now().Add(10*time.Hour)) + + // Then: prebuild Deadline/MaxDeadline remain unchanged + prebuild = coderdtest.MustWorkspace(t, client, wb.Workspace.ID) + require.Equal(t, deadline.UTC(), prebuild.LatestBuild.Deadline.Time.UTC()) + require.Zero(t, prebuild.LatestBuild.MaxDeadline) + + // Given: the prebuilt workspace is claimed by a user + user, err := client.User(ctx, "testUser") + require.NoError(t, err) + claimedWorkspace, err := client.CreateUserWorkspace(ctx, user.ID.String(), codersdk.CreateWorkspaceRequest{ + TemplateVersionID: version.ID, + TemplateVersionPresetID: presetID, + Name: coderdtest.RandomUsername(t), + }) + require.NoError(t, err) + coderdtest.AwaitWorkspaceBuildJobCompleted(t, client, claimedWorkspace.LatestBuild.ID) + workspace := coderdtest.MustWorkspace(t, client, claimedWorkspace.ID) + require.Equal(t, prebuild.ID, workspace.ID) + // Claimed workspaces have an empty Deadline and MaxDeadline + require.Zero(t, workspace.LatestBuild.Deadline) + require.Zero(t, workspace.LatestBuild.MaxDeadline) + + // Given: the claimed workspace has an expired Deadline + err = db.UpdateWorkspaceBuildDeadlineByID(ctx, database.UpdateWorkspaceBuildDeadlineByIDParams{ + ID: workspace.LatestBuild.ID, + Deadline: deadline, + UpdatedAt: clock.Now(), + }) + require.NoError(t, err) + workspace = coderdtest.MustWorkspace(t, client, claimedWorkspace.ID) + + // When: activity bump is applied to a claimed prebuild + workspacestats.ActivityBumpWorkspace(ctx, log, db, workspace.ID, clock.Now().Add(10*time.Hour)) + + // Then: Deadline is extended by the activity bump, MaxDeadline remains unset + workspace = coderdtest.MustWorkspace(t, client, claimedWorkspace.ID) + require.WithinDuration(t, clock.Now().Add(activityBump).UTC(), workspace.LatestBuild.Deadline.Time.UTC(), testutil.WaitMedium) + require.Zero(t, workspace.LatestBuild.MaxDeadline) +} + // TestWorkspaceTemplateParamsChange tests a workspace with a parameter that // validation changes on apply. The params used in create workspace are invalid // according to the static params on import. @@ -3360,7 +3719,6 @@ func TestWorkspaceByOwnerAndName(t *testing.T) { require.Equal(t, ws.LatestBuild.MatchedProvisioners.Available, 0) // Verify that the provisioner daemon is registered in the database - //nolint:gocritic // unit testing daemons, err := db.GetProvisionerDaemons(dbauthz.AsSystemRestricted(ctx)) require.NoError(t, err) require.Equal(t, 1, len(daemons)) @@ -3396,7 +3754,6 @@ func TestWorkspaceByOwnerAndName(t *testing.T) { ctx = testutil.Context(t, testutil.WaitLong) // Reset the context to avoid timeouts. - // nolint:gocritic // unit testing daemons, err := db.GetProvisionerDaemons(dbauthz.AsSystemRestricted(ctx)) require.NoError(t, err) require.Equal(t, len(daemons), 1) @@ -3406,8 +3763,6 @@ func TestWorkspaceByOwnerAndName(t *testing.T) { require.NoError(t, err) // Simulate it's subsequent deletion from the database: - - // nolint:gocritic // unit testing _, err = db.UpsertProvisionerDaemon(dbauthz.AsSystemRestricted(ctx), database.UpsertProvisionerDaemonParams{ Name: daemons[0].Name, OrganizationID: daemons[0].OrganizationID, @@ -3425,7 +3780,6 @@ func TestWorkspaceByOwnerAndName(t *testing.T) { }, }) require.NoError(t, err) - // nolint:gocritic // unit testing err = db.DeleteOldProvisionerDaemons(dbauthz.AsSystemRestricted(ctx)) require.NoError(t, err) @@ -3436,7 +3790,6 @@ func TestWorkspaceByOwnerAndName(t *testing.T) { require.Equal(t, workspace.LatestBuild.MatchedProvisioners.Count, 0) require.Equal(t, workspace.LatestBuild.MatchedProvisioners.Available, 0) - // nolint:gocritic // unit testing _, err = client.WorkspaceByOwnerAndName(dbauthz.As(ctx, userSubject), username, workspace.Name, codersdk.WorkspaceOptions{}) require.NoError(t, err) require.Equal(t, workspace.LatestBuild.Status, codersdk.WorkspaceStatusPending) @@ -3473,7 +3826,6 @@ func TestWorkspaceByOwnerAndName(t *testing.T) { ctx = testutil.Context(t, testutil.WaitLong) // Reset the context to avoid timeouts. - // nolint:gocritic // unit testing daemons, err := db.GetProvisionerDaemons(dbauthz.AsSystemRestricted(ctx)) require.NoError(t, err) require.Equal(t, len(daemons), 1) @@ -3482,7 +3834,6 @@ func TestWorkspaceByOwnerAndName(t *testing.T) { err = closer.Close() require.NoError(t, err) - // nolint:gocritic // unit testing _, err = db.UpsertProvisionerDaemon(dbauthz.AsSystemRestricted(ctx), database.UpsertProvisionerDaemonParams{ Name: daemons[0].Name, OrganizationID: daemons[0].OrganizationID, @@ -3523,3 +3874,84 @@ func must[T any](value T, err error) T { } return value } + +func TestUpdateWorkspaceACL(t *testing.T) { + t.Parallel() + + t.Run("OKWithGroup", func(t *testing.T) { + t.Parallel() + + dv := coderdtest.DeploymentValues(t) + dv.Experiments = []string{string(codersdk.ExperimentWorkspaceSharing)} + adminClient, adminUser := coderdenttest.New(t, &coderdenttest.Options{ + Options: &coderdtest.Options{ + IncludeProvisionerDaemon: true, + DeploymentValues: dv, + }, + LicenseOptions: &coderdenttest.LicenseOptions{ + Features: license.Features{ + codersdk.FeatureTemplateRBAC: 1, + }, + }, + }) + orgID := adminUser.OrganizationID + client, _ := coderdtest.CreateAnotherUser(t, adminClient, orgID) + _, friend := coderdtest.CreateAnotherUser(t, adminClient, orgID) + group := coderdtest.CreateGroup(t, adminClient, orgID, "bloob") + + tv := coderdtest.CreateTemplateVersion(t, adminClient, orgID, nil) + coderdtest.AwaitTemplateVersionJobCompleted(t, adminClient, tv.ID) + template := coderdtest.CreateTemplate(t, adminClient, orgID, tv.ID) + + ws := coderdtest.CreateWorkspace(t, client, template.ID) + coderdtest.AwaitWorkspaceBuildJobCompleted(t, client, ws.LatestBuild.ID) + + ctx := testutil.Context(t, testutil.WaitMedium) + err := client.UpdateWorkspaceACL(ctx, ws.ID, codersdk.UpdateWorkspaceACL{ + UserRoles: map[string]codersdk.WorkspaceRole{ + friend.ID.String(): codersdk.WorkspaceRoleAdmin, + }, + GroupRoles: map[string]codersdk.WorkspaceRole{ + group.ID.String(): codersdk.WorkspaceRoleAdmin, + }, + }) + require.NoError(t, err) + }) + + t.Run("UnknownIDs", func(t *testing.T) { + t.Parallel() + + dv := coderdtest.DeploymentValues(t) + dv.Experiments = []string{string(codersdk.ExperimentWorkspaceSharing)} + adminClient := coderdtest.New(t, &coderdtest.Options{ + IncludeProvisionerDaemon: true, + DeploymentValues: dv, + }) + adminUser := coderdtest.CreateFirstUser(t, adminClient) + orgID := adminUser.OrganizationID + client, _ := coderdtest.CreateAnotherUser(t, adminClient, orgID) + + tv := coderdtest.CreateTemplateVersion(t, adminClient, orgID, nil) + coderdtest.AwaitTemplateVersionJobCompleted(t, adminClient, tv.ID) + template := coderdtest.CreateTemplate(t, adminClient, orgID, tv.ID) + + ws := coderdtest.CreateWorkspace(t, client, template.ID) + coderdtest.AwaitWorkspaceBuildJobCompleted(t, client, ws.LatestBuild.ID) + + ctx := testutil.Context(t, testutil.WaitMedium) + err := client.UpdateWorkspaceACL(ctx, ws.ID, codersdk.UpdateWorkspaceACL{ + UserRoles: map[string]codersdk.WorkspaceRole{ + uuid.NewString(): codersdk.WorkspaceRoleAdmin, + }, + GroupRoles: map[string]codersdk.WorkspaceRole{ + uuid.NewString(): codersdk.WorkspaceRoleAdmin, + }, + }) + require.Error(t, err) + cerr, ok := codersdk.AsError(err) + require.True(t, ok) + require.Len(t, cerr.Validations, 2) + require.Equal(t, cerr.Validations[0].Field, "group_roles") + require.Equal(t, cerr.Validations[1].Field, "user_roles") + }) +} diff --git a/enterprise/replicasync/replicasync.go b/enterprise/replicasync/replicasync.go index 528540a262464..129e652c97de5 100644 --- a/enterprise/replicasync/replicasync.go +++ b/enterprise/replicasync/replicasync.go @@ -23,6 +23,7 @@ import ( "github.com/coder/coder/v2/coderd/database/dbauthz" "github.com/coder/coder/v2/coderd/database/dbtime" "github.com/coder/coder/v2/coderd/database/pubsub" + "github.com/coder/coder/v2/coderd/pproflabel" ) var PubsubEvent = "replica" @@ -104,7 +105,7 @@ func New(ctx context.Context, logger slog.Logger, db database.Store, ps pubsub.P return nil, xerrors.Errorf("subscribe: %w", err) } manager.closeWait.Add(1) - go manager.loop(ctx) + pproflabel.Go(ctx, pproflabel.Service(pproflabel.ServiceReplicaSync), manager.loop) return manager, nil } diff --git a/enterprise/wsproxy/wsproxy.go b/enterprise/wsproxy/wsproxy.go index bce49417fcd35..c2ac1baf2db4e 100644 --- a/enterprise/wsproxy/wsproxy.go +++ b/enterprise/wsproxy/wsproxy.go @@ -333,17 +333,18 @@ func New(ctx context.Context, opts *Options) (*Server, error) { r.Use( // TODO: @emyrk Should we standardize these in some other package? httpmw.Recover(s.Logger), + httpmw.WithProfilingLabels, tracing.StatusWriterMiddleware, tracing.Middleware(s.TracerProvider), httpmw.AttachRequestID, httpmw.ExtractRealIP(s.Options.RealIPConfig), loggermw.Logger(s.Logger), prometheusMW, - corsMW, // HandleSubdomain is a middleware that handles all requests to the // subdomain-based workspace apps. s.AppServer.HandleSubdomain(apiRateLimiter), + corsMW, // Build-Version is helpful for debugging. func(next http.Handler) http.Handler { return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { diff --git a/enterprise/wsproxy/wsproxy_test.go b/enterprise/wsproxy/wsproxy_test.go index b49dfd6c1ceaa..523d429476243 100644 --- a/enterprise/wsproxy/wsproxy_test.go +++ b/enterprise/wsproxy/wsproxy_test.go @@ -94,14 +94,8 @@ func TestDERPOnly(t *testing.T) { func TestDERP(t *testing.T) { t.Parallel() - deploymentValues := coderdtest.DeploymentValues(t) - deploymentValues.Experiments = []string{ - "*", - } - client, closer, api, user := coderdenttest.NewWithAPI(t, &coderdenttest.Options{ Options: &coderdtest.Options{ - DeploymentValues: deploymentValues, AppHostname: "*.primary.test.coder.com", IncludeProvisionerDaemon: true, RealIPConfig: &httpmw.RealIPConfig{ @@ -146,7 +140,7 @@ func TestDERP(t *testing.T) { }) require.NoError(t, err) - // Wait for both running proxies to become healthy. + // Wait for all three running proxies to become healthy. require.Eventually(t, func() bool { err := api.ProxyHealth.ForceUpdate(ctx) if !assert.NoError(t, err) { @@ -207,7 +201,7 @@ resourceLoop: require.NoError(t, err) // There should be three DERP regions in the map: the primary, and each - // of the two running proxies. Also the STUN-only regions. + // of the two DERP-enabled running proxies. Also the STUN-only regions. require.NotNil(t, connInfo.DERPMap) require.Len(t, connInfo.DERPMap.Regions, 3+len(api.DeploymentValues.DERP.Server.STUNAddresses.Value())) @@ -290,6 +284,7 @@ resourceLoop: t.Run(r.RegionName, func(t *testing.T) { t.Parallel() + ctx := testutil.Context(t, testutil.WaitLong) derpMap := &tailcfg.DERPMap{ Regions: map[int]*tailcfg.DERPRegion{ diff --git a/enterprise/wsproxy/wsproxysdk/wsproxysdk.go b/enterprise/wsproxy/wsproxysdk/wsproxysdk.go index b0051551a0f3d..72f5a4291c40e 100644 --- a/enterprise/wsproxy/wsproxysdk/wsproxysdk.go +++ b/enterprise/wsproxy/wsproxysdk/wsproxysdk.go @@ -75,7 +75,7 @@ func (c *Client) RequestIgnoreRedirects(ctx context.Context, method, path string // DialWorkspaceAgent calls the underlying codersdk.Client's DialWorkspaceAgent // method. -func (c *Client) DialWorkspaceAgent(ctx context.Context, agentID uuid.UUID, options *workspacesdk.DialAgentOptions) (agentConn *workspacesdk.AgentConn, err error) { +func (c *Client) DialWorkspaceAgent(ctx context.Context, agentID uuid.UUID, options *workspacesdk.DialAgentOptions) (agentConn workspacesdk.AgentConn, err error) { return workspacesdk.New(c.SDKClient).DialAgent(ctx, agentID, options) } diff --git a/flake.nix b/flake.nix index 6fd251111884a..f934d1641bdc7 100644 --- a/flake.nix +++ b/flake.nix @@ -57,7 +57,7 @@ formatter = pkgs.nixfmt-rfc-style; nodejs = pkgs.nodejs_20; - pnpm = pkgs.pnpm_9.override { + pnpm = pkgs.pnpm_10.override { inherit nodejs; # Ensure it points to the above nodejs version }; diff --git a/go.mod b/go.mod index 8e48f67f65885..3f9d92aa54c0e 100644 --- a/go.mod +++ b/go.mod @@ -1,6 +1,6 @@ module github.com/coder/coder/v2 -go 1.24.4 +go 1.24.6 // Required until a v3 of chroma is created to lazily initialize all XML files. // None of our dependencies seem to use the registries anyways, so this @@ -36,7 +36,7 @@ replace github.com/tcnksm/go-httpstat => github.com/coder/go-httpstat v0.0.0-202 // There are a few minor changes we make to Tailscale that we're slowly upstreaming. Compare here: // https://github.com/tailscale/tailscale/compare/main...coder:tailscale:main -replace tailscale.com => github.com/coder/tailscale v1.1.1-0.20250724015444-494197765996 +replace tailscale.com => github.com/coder/tailscale v1.1.1-0.20250729141742-067f1e5d9716 // This is replaced to include // 1. a fix for a data race: c.f. https://github.com/tailscale/wireguard-go/pull/25 @@ -58,7 +58,7 @@ replace github.com/imulab/go-scim/pkg/v2 => github.com/coder/go-scim/pkg/v2 v2.0 // Adds support for a new Listener from a driver.Connector // This lets us use rotating authentication tokens for passwords in connection strings // which we use in the awsiamrds package. -replace github.com/lib/pq => github.com/coder/pq v1.10.5-0.20250630052411-a259f96b6102 +replace github.com/lib/pq => github.com/coder/pq v1.10.5-0.20250807075151-6ad9b0a25151 // Removes an init() function that causes terminal sequences to be printed to the web terminal when // used in conjunction with agent-exec. See https://github.com/coder/coder/pull/15817 @@ -74,7 +74,7 @@ replace github.com/spf13/afero => github.com/aslilac/afero v0.0.0-20250403163713 require ( cdr.dev/slog v1.6.2-0.20250703074222-9df5e0a6c145 - cloud.google.com/go/compute/metadata v0.7.0 + cloud.google.com/go/compute/metadata v0.8.0 github.com/acarl005/stripansi v0.0.0-20180116102854-5a71ef0e047d github.com/adrg/xdg v0.5.0 github.com/ammario/tlru v0.4.0 @@ -82,7 +82,7 @@ require ( github.com/aquasecurity/trivy-iac v0.8.0 github.com/armon/circbuf v0.0.0-20190214190532-5111143e8da2 github.com/awalterschulze/gographviz v2.0.3+incompatible - github.com/aws/smithy-go v1.22.3 + github.com/aws/smithy-go v1.22.5 github.com/bramvdbogaerde/go-scp v1.5.0 github.com/briandowns/spinner v1.23.0 github.com/cakturk/go-netstat v0.0.0-20200220111822-e5b49efee7a5 @@ -92,8 +92,8 @@ require ( github.com/charmbracelet/bubbletea v1.3.4 github.com/charmbracelet/glamour v0.10.0 github.com/charmbracelet/lipgloss v1.1.1-0.20250404203927-76690c660834 - github.com/chromedp/cdproto v0.0.0-20250319231242-a755498943c8 - github.com/chromedp/chromedp v0.13.3 + github.com/chromedp/cdproto v0.0.0-20250724212937-08a3db8b4327 + github.com/chromedp/chromedp v0.14.1 github.com/cli/safeexec v1.0.1 github.com/coder/flog v1.1.0 github.com/coder/guts v1.5.0 @@ -101,10 +101,10 @@ require ( github.com/coder/quartz v0.2.1 github.com/coder/retry v1.5.1 github.com/coder/serpent v0.10.0 - github.com/coder/terraform-provider-coder/v2 v2.9.0 + github.com/coder/terraform-provider-coder/v2 v2.10.0 github.com/coder/websocket v1.8.13 github.com/coder/wgtunnel v0.1.13-0.20240522110300-ade90dfb2da0 - github.com/coreos/go-oidc/v3 v3.14.1 + github.com/coreos/go-oidc/v3 v3.15.0 github.com/coreos/go-systemd v0.0.0-20191104093116-d3cd4ed1dbcf github.com/creack/pty v1.1.21 github.com/dave/dst v0.27.2 @@ -116,7 +116,7 @@ require ( github.com/fatih/color v1.18.0 github.com/fatih/structs v1.1.0 github.com/fatih/structtag v1.2.0 - github.com/fergusstrange/embedded-postgres v1.31.0 + github.com/fergusstrange/embedded-postgres v1.32.0 github.com/fullsailor/pkcs7 v0.0.0-20190404230743-d7302db945fa github.com/gen2brain/beeep v0.11.1 github.com/gliderlabs/ssh v0.3.8 @@ -165,9 +165,9 @@ require ( github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e github.com/pkg/sftp v1.13.7 github.com/prometheus-community/pro-bing v0.7.0 - github.com/prometheus/client_golang v1.22.0 - github.com/prometheus/client_model v0.6.1 - github.com/prometheus/common v0.63.0 + github.com/prometheus/client_golang v1.23.0 + github.com/prometheus/client_model v0.6.2 + github.com/prometheus/common v0.65.0 github.com/quasilyte/go-ruleguard/dsl v0.3.22 github.com/robfig/cron/v3 v3.0.1 github.com/shirou/gopsutil/v4 v4.25.4 @@ -195,19 +195,19 @@ require ( go.uber.org/goleak v1.3.1-0.20240429205332-517bace7cc29 go.uber.org/mock v0.5.0 go4.org/netipx v0.0.0-20230728180743-ad4cb58a6516 - golang.org/x/crypto v0.40.0 + golang.org/x/crypto v0.41.0 golang.org/x/exp v0.0.0-20250408133849-7e4ce0ab07d0 - golang.org/x/mod v0.26.0 - golang.org/x/net v0.42.0 + golang.org/x/mod v0.27.0 + golang.org/x/net v0.43.0 golang.org/x/oauth2 v0.30.0 golang.org/x/sync v0.16.0 - golang.org/x/sys v0.34.0 - golang.org/x/term v0.33.0 - golang.org/x/text v0.27.0 - golang.org/x/tools v0.35.0 + golang.org/x/sys v0.35.0 + golang.org/x/term v0.34.0 + golang.org/x/text v0.28.0 + golang.org/x/tools v0.36.0 golang.org/x/xerrors v0.0.0-20240903120638-7835f813f4da - google.golang.org/api v0.242.0 - google.golang.org/grpc v1.73.0 + google.golang.org/api v0.246.0 + google.golang.org/grpc v1.74.2 google.golang.org/protobuf v1.36.6 gopkg.in/DataDog/dd-trace-go.v1 v1.74.0 gopkg.in/natefinch/lumberjack.v2 v2.2.1 @@ -219,7 +219,7 @@ require ( ) require ( - cloud.google.com/go/auth v0.16.2 // indirect + cloud.google.com/go/auth v0.16.3 // indirect cloud.google.com/go/auth/oauth2adapt v0.2.8 // indirect cloud.google.com/go/logging v1.13.0 // indirect cloud.google.com/go/longrunning v0.6.7 // indirect @@ -255,20 +255,20 @@ require ( github.com/apparentlymart/go-textseg/v15 v15.0.0 // indirect github.com/armon/go-radix v1.0.1-0.20221118154546-54df44f2176c // indirect github.com/atotto/clipboard v0.1.4 // indirect - github.com/aws/aws-sdk-go-v2 v1.36.4 - github.com/aws/aws-sdk-go-v2/config v1.29.14 - github.com/aws/aws-sdk-go-v2/credentials v1.17.67 // indirect - github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.16.30 // indirect - github.com/aws/aws-sdk-go-v2/feature/rds/auth v1.5.1 - github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.34 // indirect - github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.34 // indirect + github.com/aws/aws-sdk-go-v2 v1.37.2 + github.com/aws/aws-sdk-go-v2/config v1.30.2 + github.com/aws/aws-sdk-go-v2/credentials v1.18.2 // indirect + github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.18.1 // indirect + github.com/aws/aws-sdk-go-v2/feature/rds/auth v1.6.2 + github.com/aws/aws-sdk-go-v2/internal/configsources v1.4.1 // indirect + github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.7.1 // indirect github.com/aws/aws-sdk-go-v2/internal/ini v1.8.3 // indirect - github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.12.3 // indirect - github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.12.15 // indirect + github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.13.0 // indirect + github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.13.1 // indirect github.com/aws/aws-sdk-go-v2/service/ssm v1.52.4 // indirect - github.com/aws/aws-sdk-go-v2/service/sso v1.25.3 // indirect - github.com/aws/aws-sdk-go-v2/service/ssooidc v1.30.1 // indirect - github.com/aws/aws-sdk-go-v2/service/sts v1.33.19 // indirect + github.com/aws/aws-sdk-go-v2/service/sso v1.26.1 // indirect + github.com/aws/aws-sdk-go-v2/service/ssooidc v1.31.1 // indirect + github.com/aws/aws-sdk-go-v2/service/sts v1.35.1 // indirect github.com/aymanbagabas/go-osc52/v2 v2.0.1 // indirect github.com/aymerick/douceur v0.2.0 // indirect github.com/beorn7/perks v1.0.1 // indirect @@ -309,7 +309,7 @@ require ( github.com/go-playground/universal-translator v0.18.1 // indirect github.com/go-sourcemap/sourcemap v2.1.3+incompatible // indirect github.com/go-test/deep v1.1.0 // indirect - github.com/go-viper/mapstructure/v2 v2.3.0 // indirect + github.com/go-viper/mapstructure/v2 v2.4.0 // indirect github.com/gobwas/glob v0.2.3 // indirect github.com/gobwas/httphead v0.1.0 // indirect github.com/gobwas/pool v0.2.1 // indirect @@ -326,7 +326,7 @@ require ( github.com/google/s2a-go v0.1.9 // indirect github.com/google/shlex v0.0.0-20191202100458-e7afc7fbc510 // indirect github.com/googleapis/enterprise-certificate-proxy v0.3.6 // indirect - github.com/googleapis/gax-go/v2 v2.14.2 // indirect + github.com/googleapis/gax-go/v2 v2.15.0 // indirect github.com/gorilla/css v1.0.1 // indirect github.com/gorilla/mux v1.8.1 // indirect github.com/grpc-ecosystem/grpc-gateway/v2 v2.26.1 // indirect @@ -365,7 +365,7 @@ require ( github.com/mdlayher/netlink v1.7.2 // indirect github.com/mdlayher/sdnotify v1.0.0 // indirect github.com/mdlayher/socket v0.5.0 // indirect - github.com/microcosm-cc/bluemonday v1.0.27 // indirect + github.com/microcosm-cc/bluemonday v1.0.27 github.com/miekg/dns v1.1.57 // indirect github.com/mitchellh/copystructure v1.2.0 // indirect github.com/mitchellh/go-homedir v1.1.0 // indirect @@ -394,7 +394,7 @@ require ( github.com/pkg/errors v0.9.1 // indirect github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect github.com/power-devops/perfstat v0.0.0-20240221224432-82ca36839d55 // indirect - github.com/prometheus/procfs v0.15.1 // indirect + github.com/prometheus/procfs v0.16.1 // indirect github.com/rcrowley/go-metrics v0.0.0-20201227073835-cf1acfcdf475 // indirect github.com/riandyrn/otelchi v0.5.1 // indirect github.com/richardartoul/molecule v1.0.1-0.20240531184615-7ca0df43c0b3 // indirect @@ -454,9 +454,9 @@ require ( golang.zx2c4.com/wireguard/wgctrl v0.0.0-20230429144221-925a1e7659e6 // indirect golang.zx2c4.com/wireguard/windows v0.5.3 // indirect google.golang.org/appengine v1.6.8 // indirect - google.golang.org/genproto v0.0.0-20250505200425-f936aa4a68b2 // indirect - google.golang.org/genproto/googleapis/api v0.0.0-20250505200425-f936aa4a68b2 // indirect - google.golang.org/genproto/googleapis/rpc v0.0.0-20250603155806-513f23925822 // indirect + google.golang.org/genproto v0.0.0-20250603155806-513f23925822 // indirect + google.golang.org/genproto/googleapis/api v0.0.0-20250603155806-513f23925822 // indirect + google.golang.org/genproto/googleapis/rpc v0.0.0-20250728155136-f173205681a0 // indirect gopkg.in/ini.v1 v1.67.0 // indirect gopkg.in/yaml.v2 v2.4.0 // indirect howett.net/plist v1.0.0 // indirect @@ -471,22 +471,24 @@ require github.com/SherClockHolmes/webpush-go v1.4.0 require ( github.com/charmbracelet/colorprofile v0.2.3-0.20250311203215-f60798e515dc // indirect github.com/charmbracelet/x/cellbuf v0.0.13 // indirect - github.com/go-json-experiment/json v0.0.0-20250223041408-d3c622f1b874 // indirect + github.com/go-json-experiment/json v0.0.0-20250725192818-e39067aee2d2 // indirect github.com/golang-jwt/jwt/v5 v5.2.2 // indirect github.com/xo/terminfo v0.0.0-20220910002029-abceb7e1c41e // indirect ) require ( + github.com/anthropics/anthropic-sdk-go v1.4.0 + github.com/brianvoe/gofakeit/v7 v7.3.0 github.com/coder/agentapi-sdk-go v0.0.0-20250505131810-560d1d88d225 github.com/coder/aisdk-go v0.0.9 - github.com/coder/preview v1.0.3-0.20250714153828-a737d4750448 + github.com/coder/preview v1.0.3 github.com/fsnotify/fsnotify v1.9.0 github.com/go-git/go-git/v5 v5.16.2 - github.com/mark3labs/mcp-go v0.34.0 + github.com/mark3labs/mcp-go v0.37.0 ) require ( - cel.dev/expr v0.23.0 // indirect + cel.dev/expr v0.24.0 // indirect cloud.google.com/go v0.120.0 // indirect cloud.google.com/go/iam v1.5.2 // indirect cloud.google.com/go/monitoring v1.24.2 // indirect @@ -499,13 +501,14 @@ require ( github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/metric v0.50.0 // indirect github.com/GoogleCloudPlatform/opentelemetry-operations-go/internal/resourcemapping v0.50.0 // indirect github.com/Masterminds/semver/v3 v3.3.1 // indirect - github.com/anthropics/anthropic-sdk-go v1.4.0 // indirect github.com/aquasecurity/go-version v0.0.1 // indirect github.com/aquasecurity/trivy v0.58.2 // indirect github.com/aws/aws-sdk-go v1.55.7 // indirect + github.com/bahlo/generic-list-go v0.2.0 // indirect github.com/bgentry/go-netrc v0.0.0-20140422174119-9fd32a8b3d3d // indirect + github.com/buger/jsonparser v1.1.1 // indirect github.com/charmbracelet/x/exp/slice v0.0.0-20250327172914-2fdc97757edf // indirect - github.com/cncf/xds/go v0.0.0-20250326154945-ae57f3c0d45f // indirect + github.com/cncf/xds/go v0.0.0-20250501225837-2ac532fd4443 // indirect github.com/dgryski/go-farm v0.0.0-20240924180020-3414d57e47da // indirect github.com/envoyproxy/go-control-plane/envoy v1.32.4 // indirect github.com/envoyproxy/protoc-gen-validate v1.2.1 // indirect @@ -513,8 +516,9 @@ require ( github.com/go-git/gcfg v1.5.1-0.20230307220236-3a3c6141e376 // indirect github.com/go-git/go-billy/v5 v5.6.2 // indirect github.com/gorilla/websocket v1.5.4-0.20250319132907-e064f32e3674 // indirect - github.com/hashicorp/go-getter v1.7.8 // indirect + github.com/hashicorp/go-getter v1.7.9 // indirect github.com/hashicorp/go-safetemp v1.0.0 // indirect + github.com/invopop/jsonschema v0.13.0 // indirect github.com/jackmordaunt/icns/v3 v3.0.1 // indirect github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99 // indirect github.com/klauspost/cpuid/v2 v2.2.10 // indirect @@ -530,9 +534,10 @@ require ( github.com/tidwall/sjson v1.2.5 // indirect github.com/tmaxmax/go-sse v0.10.0 // indirect github.com/ulikunitz/xz v0.5.12 // indirect + github.com/wk8/go-ordered-map/v2 v2.1.8 // indirect github.com/yosida95/uritemplate/v3 v3.0.2 // indirect github.com/zeebo/xxh3 v1.0.2 // indirect - go.opentelemetry.io/contrib/detectors/gcp v1.35.0 // indirect + go.opentelemetry.io/contrib/detectors/gcp v1.36.0 // indirect go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.61.0 // indirect go.opentelemetry.io/otel/sdk/metric v1.37.0 // indirect google.golang.org/genai v1.12.0 // indirect diff --git a/go.sum b/go.sum index 7371b07f7f973..4bc0e0336ab06 100644 --- a/go.sum +++ b/go.sum @@ -1,7 +1,7 @@ cdr.dev/slog v1.6.2-0.20250703074222-9df5e0a6c145 h1:Mk4axSLxKw3hjkf3PffBLQYta7nPVIWObuKCPDWgQLc= cdr.dev/slog v1.6.2-0.20250703074222-9df5e0a6c145/go.mod h1:NaoTA7KwopCrnaSb0JXTC0PTp/O/Y83Lndnq0OEV3ZQ= -cel.dev/expr v0.23.0 h1:wUb94w6OYQS4uXraxo9U+wUAs9jT47Xvl4iPgAwM2ss= -cel.dev/expr v0.23.0/go.mod h1:hLPLo1W4QUmuYdA72RBX06QTs6MXw941piREPl3Yfiw= +cel.dev/expr v0.24.0 h1:56OvJKSH3hDGL0ml5uSxZmz3/3Pq4tJ+fb1unVLAFcY= +cel.dev/expr v0.24.0/go.mod h1:hLPLo1W4QUmuYdA72RBX06QTs6MXw941piREPl3Yfiw= cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= cloud.google.com/go v0.34.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= cloud.google.com/go v0.38.0/go.mod h1:990N+gfupTy94rShfmMCWGDn0LpTmnzTp2qbd1dvSRU= @@ -101,8 +101,8 @@ cloud.google.com/go/assuredworkloads v1.7.0/go.mod h1:z/736/oNmtGAyU47reJgGN+KVo cloud.google.com/go/assuredworkloads v1.8.0/go.mod h1:AsX2cqyNCOvEQC8RMPnoc0yEarXQk6WEKkxYfL6kGIo= cloud.google.com/go/assuredworkloads v1.9.0/go.mod h1:kFuI1P78bplYtT77Tb1hi0FMxM0vVpRC7VVoJC3ZoT0= cloud.google.com/go/assuredworkloads v1.10.0/go.mod h1:kwdUQuXcedVdsIaKgKTp9t0UJkE5+PAVNhdQm4ZVq2E= -cloud.google.com/go/auth v0.16.2 h1:QvBAGFPLrDeoiNjyfVunhQ10HKNYuOwZ5noee0M5df4= -cloud.google.com/go/auth v0.16.2/go.mod h1:sRBas2Y1fB1vZTdurouM0AzuYQBMZinrUYL8EufhtEA= +cloud.google.com/go/auth v0.16.3 h1:kabzoQ9/bobUmnseYnBO6qQG7q4a/CffFRlJSxv2wCc= +cloud.google.com/go/auth v0.16.3/go.mod h1:NucRGjaXfzP1ltpcQ7On/VTZ0H4kWB5Jy+Y9Dnm76fA= cloud.google.com/go/auth/oauth2adapt v0.2.8 h1:keo8NaayQZ6wimpNSmW5OPc283g65QNIiLpZnkHRbnc= cloud.google.com/go/auth/oauth2adapt v0.2.8/go.mod h1:XQ9y31RkqZCcwJWNSx2Xvric3RrU88hAYYbjDWYDL+c= cloud.google.com/go/automl v1.5.0/go.mod h1:34EjfoFGMZ5sgJ9EoLsRtdPSNZLcfflJR39VbVNS2M0= @@ -184,8 +184,8 @@ cloud.google.com/go/compute/metadata v0.1.0/go.mod h1:Z1VN+bulIf6bt4P/C37K4DyZYZ cloud.google.com/go/compute/metadata v0.2.0/go.mod h1:zFmK7XCadkQkj6TtorcaGlCW1hT1fIilQDwofLpJ20k= cloud.google.com/go/compute/metadata v0.2.1/go.mod h1:jgHgmJd2RKBGzXqF5LR2EZMGxBkeanZ9wwa75XHJgOM= cloud.google.com/go/compute/metadata v0.2.3/go.mod h1:VAV5nSsACxMJvgaAuX6Pk2AawlZn8kiOGuCv6gTkwuA= -cloud.google.com/go/compute/metadata v0.7.0 h1:PBWF+iiAerVNe8UCHxdOt6eHLVc3ydFeOCw78U8ytSU= -cloud.google.com/go/compute/metadata v0.7.0/go.mod h1:j5MvL9PprKL39t166CoB1uVHfQMs4tFQZZcKwksXUjo= +cloud.google.com/go/compute/metadata v0.8.0 h1:HxMRIbao8w17ZX6wBnjhcDkW6lTFpgcaobyVfZWqRLA= +cloud.google.com/go/compute/metadata v0.8.0/go.mod h1:sYOGTp851OV9bOFJ9CH7elVvyzopvWQFNNghtDQ/Biw= cloud.google.com/go/contactcenterinsights v1.3.0/go.mod h1:Eu2oemoePuEFc/xKFPjbTuPSj0fYJcPls9TFlPNnHHY= cloud.google.com/go/contactcenterinsights v1.4.0/go.mod h1:L2YzkGbPsv+vMQMCADxJoT9YiTTnSEd6fEvCeHTYVck= cloud.google.com/go/contactcenterinsights v1.6.0/go.mod h1:IIDlT6CLcDoyv79kDv8iWxMSTZhLxSCofVV5W6YFM/w= @@ -754,42 +754,44 @@ github.com/awalterschulze/gographviz v2.0.3+incompatible/go.mod h1:GEV5wmg4YquNw github.com/aws/aws-sdk-go v1.44.122/go.mod h1:y4AeaBuwd2Lk+GepC1E9v0qOiTws0MIWAX4oIKwKHZo= github.com/aws/aws-sdk-go v1.55.7 h1:UJrkFq7es5CShfBwlWAC8DA077vp8PyVbQd3lqLiztE= github.com/aws/aws-sdk-go v1.55.7/go.mod h1:eRwEWoyTWFMVYVQzKMNHWP5/RV4xIUGMQfXQHfHkpNU= -github.com/aws/aws-sdk-go-v2 v1.36.4 h1:GySzjhVvx0ERP6eyfAbAuAXLtAda5TEy19E5q5W8I9E= -github.com/aws/aws-sdk-go-v2 v1.36.4/go.mod h1:LLXuLpgzEbD766Z5ECcRmi8AzSwfZItDtmABVkRLGzg= -github.com/aws/aws-sdk-go-v2/config v1.29.14 h1:f+eEi/2cKCg9pqKBoAIwRGzVb70MRKqWX4dg1BDcSJM= -github.com/aws/aws-sdk-go-v2/config v1.29.14/go.mod h1:wVPHWcIFv3WO89w0rE10gzf17ZYy+UVS1Geq8Iei34g= -github.com/aws/aws-sdk-go-v2/credentials v1.17.67 h1:9KxtdcIA/5xPNQyZRgUSpYOE6j9Bc4+D7nZua0KGYOM= -github.com/aws/aws-sdk-go-v2/credentials v1.17.67/go.mod h1:p3C44m+cfnbv763s52gCqrjaqyPikj9Sg47kUVaNZQQ= -github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.16.30 h1:x793wxmUWVDhshP8WW2mlnXuFrO4cOd3HLBroh1paFw= -github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.16.30/go.mod h1:Jpne2tDnYiFascUEs2AWHJL9Yp7A5ZVy3TNyxaAjD6M= -github.com/aws/aws-sdk-go-v2/feature/rds/auth v1.5.1 h1:yg6nrV33ljY6CppoRnnsKLqIZ5ExNdQOGRBGNfc56Yw= -github.com/aws/aws-sdk-go-v2/feature/rds/auth v1.5.1/go.mod h1:hGdIV5nndhIclFFvI1apVfQWn9ZKqedykZ1CtLZd03E= -github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.34 h1:ZK5jHhnrioRkUNOc+hOgQKlUL5JeC3S6JgLxtQ+Rm0Q= -github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.34/go.mod h1:p4VfIceZokChbA9FzMbRGz5OV+lekcVtHlPKEO0gSZY= -github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.34 h1:SZwFm17ZUNNg5Np0ioo/gq8Mn6u9w19Mri8DnJ15Jf0= -github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.34/go.mod h1:dFZsC0BLo346mvKQLWmoJxT+Sjp+qcVR1tRVHQGOH9Q= +github.com/aws/aws-sdk-go-v2 v1.37.2 h1:xkW1iMYawzcmYFYEV0UCMxc8gSsjCGEhBXQkdQywVbo= +github.com/aws/aws-sdk-go-v2 v1.37.2/go.mod h1:9Q0OoGQoboYIAJyslFyF1f5K1Ryddop8gqMhWx/n4Wg= +github.com/aws/aws-sdk-go-v2/config v1.30.2 h1:YE1BmSc4fFYqFgN1mN8uzrtc7R9x+7oSWeX8ckoltAw= +github.com/aws/aws-sdk-go-v2/config v1.30.2/go.mod h1:UNrLGZ6jfAVjgVJpkIxjLufRJqTXCVYOpkeVf83kwBo= +github.com/aws/aws-sdk-go-v2/credentials v1.18.2 h1:mfm0GKY/PHLhs7KO0sUaOtFnIQ15Qqxt+wXbO/5fIfs= +github.com/aws/aws-sdk-go-v2/credentials v1.18.2/go.mod h1:v0SdJX6ayPeZFQxgXUKw5RhLpAoZUuynxWDfh8+Eknc= +github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.18.1 h1:owmNBboeA0kHKDcdF8KiSXmrIuXZustfMGGytv6OMkM= +github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.18.1/go.mod h1:Bg1miN59SGxrZqlP8vJZSmXW+1N8Y1MjQDq1OfuNod8= +github.com/aws/aws-sdk-go-v2/feature/rds/auth v1.6.2 h1:QbFjOdplTkOgviHNKyTW/TZpvIYhD6lqEc3tkIvqMoQ= +github.com/aws/aws-sdk-go-v2/feature/rds/auth v1.6.2/go.mod h1:d0pTYUeTv5/tPSlbPZZQSqssM158jZBs02jx2LDslM8= +github.com/aws/aws-sdk-go-v2/internal/configsources v1.4.1 h1:ksZXBYv80EFTcgc8OJO48aQ8XDWXIQL7gGasPeCoTzI= +github.com/aws/aws-sdk-go-v2/internal/configsources v1.4.1/go.mod h1:HSksQyyJETVZS7uM54cir0IgxttTD+8aEoJMPGepHBI= +github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.7.1 h1:+dn/xF/05utS7tUhjIcndbuaPjfll2LhbH1cCDGLYUQ= +github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.7.1/go.mod h1:hyAGz30LHdm5KBZDI58MXx5lDVZ5CUfvfTZvMu4HCZo= github.com/aws/aws-sdk-go-v2/internal/ini v1.8.3 h1:bIqFDwgGXXN1Kpp99pDOdKMTTb5d2KyU5X/BZxjOkRo= github.com/aws/aws-sdk-go-v2/internal/ini v1.8.3/go.mod h1:H5O/EsxDWyU+LP/V8i5sm8cxoZgc2fdNR9bxlOFrQTo= -github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.12.3 h1:eAh2A4b5IzM/lum78bZ590jy36+d/aFLgKF/4Vd1xPE= -github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.12.3/go.mod h1:0yKJC/kb8sAnmlYa6Zs3QVYqaC8ug2AbnNChv5Ox3uA= -github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.12.15 h1:dM9/92u2F1JbDaGooxTq18wmmFzbJRfXfVfy96/1CXM= -github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.12.15/go.mod h1:SwFBy2vjtA0vZbjjaFtfN045boopadnoVPhu4Fv66vY= +github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.13.0 h1:6+lZi2JeGKtCraAj1rpoZfKqnQ9SptseRZioejfUOLM= +github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.13.0/go.mod h1:eb3gfbVIxIoGgJsi9pGne19dhCBpK6opTYpQqAmdy44= +github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.13.1 h1:ky79ysLMxhwk5rxJtS+ILd3Mc8kC5fhsLBrP27r6h4I= +github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.13.1/go.mod h1:+2MmkvFvPYM1vsozBWduoLJUi5maxFk5B7KJFECujhY= github.com/aws/aws-sdk-go-v2/service/ssm v1.52.4 h1:hgSBvRT7JEWx2+vEGI9/Ld5rZtl7M5lu8PqdvOmbRHw= github.com/aws/aws-sdk-go-v2/service/ssm v1.52.4/go.mod h1:v7NIzEFIHBiicOMaMTuEmbnzGnqW0d+6ulNALul6fYE= -github.com/aws/aws-sdk-go-v2/service/sso v1.25.3 h1:1Gw+9ajCV1jogloEv1RRnvfRFia2cL6c9cuKV2Ps+G8= -github.com/aws/aws-sdk-go-v2/service/sso v1.25.3/go.mod h1:qs4a9T5EMLl/Cajiw2TcbNt2UNo/Hqlyp+GiuG4CFDI= -github.com/aws/aws-sdk-go-v2/service/ssooidc v1.30.1 h1:hXmVKytPfTy5axZ+fYbR5d0cFmC3JvwLm5kM83luako= -github.com/aws/aws-sdk-go-v2/service/ssooidc v1.30.1/go.mod h1:MlYRNmYu/fGPoxBQVvBYr9nyr948aY/WLUvwBMBJubs= -github.com/aws/aws-sdk-go-v2/service/sts v1.33.19 h1:1XuUZ8mYJw9B6lzAkXhqHlJd/XvaX32evhproijJEZY= -github.com/aws/aws-sdk-go-v2/service/sts v1.33.19/go.mod h1:cQnB8CUnxbMU82JvlqjKR2HBOm3fe9pWorWBza6MBJ4= -github.com/aws/smithy-go v1.22.3 h1:Z//5NuZCSW6R4PhQ93hShNbyBbn8BWCmCVCt+Q8Io5k= -github.com/aws/smithy-go v1.22.3/go.mod h1:t1ufH5HMublsJYulve2RKmHDC15xu1f26kHCp/HgceI= +github.com/aws/aws-sdk-go-v2/service/sso v1.26.1 h1:uWaz3DoNK9MNhm7i6UGxqufwu3BEuJZm72WlpGwyVtY= +github.com/aws/aws-sdk-go-v2/service/sso v1.26.1/go.mod h1:ILpVNjL0BO+Z3Mm0SbEeUoYS9e0eJWV1BxNppp0fcb8= +github.com/aws/aws-sdk-go-v2/service/ssooidc v1.31.1 h1:XdG6/o1/ZDmn3wJU5SRAejHaWgKS4zHv0jBamuKuS2k= +github.com/aws/aws-sdk-go-v2/service/ssooidc v1.31.1/go.mod h1:oiotGTKadCOCl3vg/tYh4k45JlDF81Ka8rdumNhEnIQ= +github.com/aws/aws-sdk-go-v2/service/sts v1.35.1 h1:iF4Xxkc0H9c/K2dS0zZw3SCkj0Z7n6AMnUiiyoJND+I= +github.com/aws/aws-sdk-go-v2/service/sts v1.35.1/go.mod h1:0bxIatfN0aLq4mjoLDeBpOjOke68OsFlXPDFJ7V0MYw= +github.com/aws/smithy-go v1.22.5 h1:P9ATCXPMb2mPjYBgueqJNCA5S9UfktsW0tTxi+a7eqw= +github.com/aws/smithy-go v1.22.5/go.mod h1:t1ufH5HMublsJYulve2RKmHDC15xu1f26kHCp/HgceI= github.com/aymanbagabas/go-osc52/v2 v2.0.1 h1:HwpRHbFMcZLEVr42D4p7XBqjyuxQH5SMiErDT4WkJ2k= github.com/aymanbagabas/go-osc52/v2 v2.0.1/go.mod h1:uYgXzlJ7ZpABp8OJ+exZzJJhRNQ2ASbcXHWsFqH8hp8= github.com/aymanbagabas/go-udiff v0.2.0 h1:TK0fH4MteXUDspT88n8CKzvK0X9O2xu9yQjWpi6yML8= github.com/aymanbagabas/go-udiff v0.2.0/go.mod h1:RE4Ex0qsGkTAJoQdQQCA0uG+nAzJO/pI/QwceO5fgrA= github.com/aymerick/douceur v0.2.0 h1:Mv+mAeH1Q+n9Fr+oyamOlAkUNPWPlA8PPGR0QAaYuPk= github.com/aymerick/douceur v0.2.0/go.mod h1:wlT5vV2O3h55X9m7iVYN0TBM0NH/MmbLnd30/FjWUq4= +github.com/bahlo/generic-list-go v0.2.0 h1:5sz/EEAK+ls5wF+NeqDpk5+iNdMDXrh3z3nPnH1Wvgk= +github.com/bahlo/generic-list-go v0.2.0/go.mod h1:2KvAjgMlE5NNynlg/5iLrrCCZ2+5xWbdbCW3pNTGyYg= github.com/beorn7/perks v1.0.1 h1:VlbKKnNfV8bJzeqoa4cOKqO6bYr3WgKZxO8Z16+hsOM= github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw= github.com/bep/clocks v0.5.0 h1:hhvKVGLPQWRVsBP/UB7ErrHYIO42gINVbvqxvYTPVps= @@ -828,6 +830,10 @@ github.com/boombuler/barcode v1.0.0/go.mod h1:paBWMcWSl3LHKBqUq+rly7CNSldXjb2rDl github.com/boombuler/barcode v1.0.1/go.mod h1:paBWMcWSl3LHKBqUq+rly7CNSldXjb2rDl3JlRe0mD8= github.com/bramvdbogaerde/go-scp v1.5.0 h1:a9BinAjTfQh273eh7vd3qUgmBC+bx+3TRDtkZWmIpzM= github.com/bramvdbogaerde/go-scp v1.5.0/go.mod h1:on2aH5AxaFb2G0N5Vsdy6B0Ml7k9HuHSwfo1y0QzAbQ= +github.com/brianvoe/gofakeit/v7 v7.3.0 h1:TWStf7/lLpAjKw+bqwzeORo9jvrxToWEwp9b1J2vApQ= +github.com/brianvoe/gofakeit/v7 v7.3.0/go.mod h1:QXuPeBw164PJCzCUZVmgpgHJ3Llj49jSLVkKPMtxtxA= +github.com/buger/jsonparser v1.1.1 h1:2PnMjfWD7wBILjqQbt530v576A/cAbQvEW9gGIpYMUs= +github.com/buger/jsonparser v1.1.1/go.mod h1:6RYKKt7H4d4+iWqouImQ9R2FZql3VbhNgx27UK13J/0= github.com/bytecodealliance/wasmtime-go/v3 v3.0.2 h1:3uZCA/BLTIu+DqCfguByNMJa2HVHpXvjfy0Dy7g6fuA= github.com/bytecodealliance/wasmtime-go/v3 v3.0.2/go.mod h1:RnUjnIXxEJcL6BgCvNyzCCRzZcxCgsZCi+RNlvYor5Q= github.com/cakturk/go-netstat v0.0.0-20200220111822-e5b49efee7a5 h1:BjkPE3785EwPhhyuFkbINB+2a1xATwk8SNDWnJiD41g= @@ -861,10 +867,10 @@ github.com/charmbracelet/x/exp/slice v0.0.0-20250327172914-2fdc97757edf/go.mod h github.com/charmbracelet/x/term v0.2.1 h1:AQeHeLZ1OqSXhrAWpYUtZyX1T3zVxfpZuEQMIQaGIAQ= github.com/charmbracelet/x/term v0.2.1/go.mod h1:oQ4enTYFV7QN4m0i9mzHrViD7TQKvNEEkHUMCmsxdUg= github.com/cheggaaa/pb v1.0.27/go.mod h1:pQciLPpbU0oxA0h+VJYYLxO+XeDQb5pZijXscXHm81s= -github.com/chromedp/cdproto v0.0.0-20250319231242-a755498943c8 h1:AqW2bDQf67Zbq6Tpop/+yJSIknxhiQecO2B8jNYTAPs= -github.com/chromedp/cdproto v0.0.0-20250319231242-a755498943c8/go.mod h1:NItd7aLkcfOA/dcMXvl8p1u+lQqioRMq/SqDp71Pb/k= -github.com/chromedp/chromedp v0.13.3 h1:c6nTn97XQBykzcXiGYL5LLebw3h3CEyrCihm4HquYh0= -github.com/chromedp/chromedp v0.13.3/go.mod h1:khsDP9OP20GrowpJfZ7N05iGCwcAYxk7qf9AZBzR3Qw= +github.com/chromedp/cdproto v0.0.0-20250724212937-08a3db8b4327 h1:UQ4AU+BGti3Sy/aLU8KVseYKNALcX9UXY6DfpwQ6J8E= +github.com/chromedp/cdproto v0.0.0-20250724212937-08a3db8b4327/go.mod h1:NItd7aLkcfOA/dcMXvl8p1u+lQqioRMq/SqDp71Pb/k= +github.com/chromedp/chromedp v0.14.1 h1:0uAbnxewy/Q+Bg7oafVePE/6EXEho9hnaC38f+TTENg= +github.com/chromedp/chromedp v0.14.1/go.mod h1:rHzAv60xDE7VNy/MYtTUrYreSc0ujt2O1/C3bzctYBo= github.com/chromedp/sysutil v1.1.0 h1:PUFNv5EcprjqXZD9nJb9b/c9ibAbxiYo4exNWZyipwM= github.com/chromedp/sysutil v1.1.0/go.mod h1:WiThHUdltqCNKGc4gaU50XgYjwjYIhKWoHGPTUfWTJ8= github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWRnGsAI= @@ -893,8 +899,8 @@ github.com/cncf/xds/go v0.0.0-20211011173535-cb28da3451f1/go.mod h1:eXthEFrGJvWH github.com/cncf/xds/go v0.0.0-20220314180256-7f1daf1720fc/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= github.com/cncf/xds/go v0.0.0-20230105202645-06c439db220b/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= github.com/cncf/xds/go v0.0.0-20230607035331-e9ce68804cb4/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= -github.com/cncf/xds/go v0.0.0-20250326154945-ae57f3c0d45f h1:C5bqEmzEPLsHm9Mv73lSE9e9bKV23aB1vxOsmZrkl3k= -github.com/cncf/xds/go v0.0.0-20250326154945-ae57f3c0d45f/go.mod h1:W+zGtBO5Y1IgJhy4+A9GOqVhqLpfZi+vwmdNXUehLA8= +github.com/cncf/xds/go v0.0.0-20250501225837-2ac532fd4443 h1:aQ3y1lwWyqYPiWZThqv1aFbZMiM9vblcSArJRf2Irls= +github.com/cncf/xds/go v0.0.0-20250501225837-2ac532fd4443/go.mod h1:W+zGtBO5Y1IgJhy4+A9GOqVhqLpfZi+vwmdNXUehLA8= github.com/coder/agentapi-sdk-go v0.0.0-20250505131810-560d1d88d225 h1:tRIViZ5JRmzdOEo5wUWngaGEFBG8OaE1o2GIHN5ujJ8= github.com/coder/agentapi-sdk-go v0.0.0-20250505131810-560d1d88d225/go.mod h1:rNLVpYgEVeu1Zk29K64z6Od8RBP9DwqCu9OfCzh8MR4= github.com/coder/aisdk-go v0.0.9 h1:Vzo/k2qwVGLTR10ESDeP2Ecek1SdPfZlEjtTfMveiVo= @@ -912,12 +918,12 @@ github.com/coder/go-scim/pkg/v2 v2.0.0-20230221055123-1d63c1222136 h1:0RgB61LcNs github.com/coder/go-scim/pkg/v2 v2.0.0-20230221055123-1d63c1222136/go.mod h1:VkD1P761nykiq75dz+4iFqIQIZka189tx1BQLOp0Skc= github.com/coder/guts v1.5.0 h1:a94apf7xMf5jDdg1bIHzncbRiTn3+BvBZgrFSDbUnyI= github.com/coder/guts v1.5.0/go.mod h1:0Sbv5Kp83u1Nl7MIQiV2zmacJ3o02I341bkWkjWXSUQ= -github.com/coder/pq v1.10.5-0.20250630052411-a259f96b6102 h1:ahTJlTRmTogsubgRVGOUj40dg62WvqPQkzTQP7pyepI= -github.com/coder/pq v1.10.5-0.20250630052411-a259f96b6102/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o= +github.com/coder/pq v1.10.5-0.20250807075151-6ad9b0a25151 h1:YAxwg3lraGNRwoQ18H7R7n+wsCqNve7Brdvj0F1rDnU= +github.com/coder/pq v1.10.5-0.20250807075151-6ad9b0a25151/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o= github.com/coder/pretty v0.0.0-20230908205945-e89ba86370e0 h1:3A0ES21Ke+FxEM8CXx9n47SZOKOpgSE1bbJzlE4qPVs= github.com/coder/pretty v0.0.0-20230908205945-e89ba86370e0/go.mod h1:5UuS2Ts+nTToAMeOjNlnHFkPahrtDkmpydBen/3wgZc= -github.com/coder/preview v1.0.3-0.20250714153828-a737d4750448 h1:S86sFp4Dr4dUn++fXOMOTu6ClnEZ/NrGCYv7bxZjYYc= -github.com/coder/preview v1.0.3-0.20250714153828-a737d4750448/go.mod h1:hQtBEqOFMJ3SHl9Q9pVvDA9CpeCEXBwbONNK29+3MLk= +github.com/coder/preview v1.0.3 h1:et0/frnLB68PPwsGaa1KAZQdBKBxNSqzMplYKsBpcNA= +github.com/coder/preview v1.0.3/go.mod h1:hQtBEqOFMJ3SHl9Q9pVvDA9CpeCEXBwbONNK29+3MLk= github.com/coder/quartz v0.2.1 h1:QgQ2Vc1+mvzewg2uD/nj8MJ9p9gE+QhGJm+Z+NGnrSE= github.com/coder/quartz v0.2.1/go.mod h1:vsiCc+AHViMKH2CQpGIpFgdHIEQsxwm8yCscqKmzbRA= github.com/coder/retry v1.5.1 h1:iWu8YnD8YqHs3XwqrqsjoBTAVqT9ml6z9ViJ2wlMiqc= @@ -926,12 +932,12 @@ github.com/coder/serpent v0.10.0 h1:ofVk9FJXSek+SmL3yVE3GoArP83M+1tX+H7S4t8BSuM= github.com/coder/serpent v0.10.0/go.mod h1:cZFW6/fP+kE9nd/oRkEHJpG6sXCtQ+AX7WMMEHv0Y3Q= github.com/coder/ssh v0.0.0-20231128192721-70855dedb788 h1:YoUSJ19E8AtuUFVYBpXuOD6a/zVP3rcxezNsoDseTUw= github.com/coder/ssh v0.0.0-20231128192721-70855dedb788/go.mod h1:aGQbuCLyhRLMzZF067xc84Lh7JDs1FKwCmF1Crl9dxQ= -github.com/coder/tailscale v1.1.1-0.20250724015444-494197765996 h1:9x+ouDw9BKW1tdGzuQOWGMT2XkWLs+QQjeCrxYuU1lo= -github.com/coder/tailscale v1.1.1-0.20250724015444-494197765996/go.mod h1:l7ml5uu7lFh5hY28lGYM4b/oFSmuPHYX6uk4RAu23Lc= +github.com/coder/tailscale v1.1.1-0.20250729141742-067f1e5d9716 h1:hi7o0sA+RPBq8Rvvz+hNrC/OTL2897OKREMIRIuQeTs= +github.com/coder/tailscale v1.1.1-0.20250729141742-067f1e5d9716/go.mod h1:l7ml5uu7lFh5hY28lGYM4b/oFSmuPHYX6uk4RAu23Lc= github.com/coder/terraform-config-inspect v0.0.0-20250107175719-6d06d90c630e h1:JNLPDi2P73laR1oAclY6jWzAbucf70ASAvf5mh2cME0= github.com/coder/terraform-config-inspect v0.0.0-20250107175719-6d06d90c630e/go.mod h1:Gz/z9Hbn+4KSp8A2FBtNszfLSdT2Tn/uAKGuVqqWmDI= -github.com/coder/terraform-provider-coder/v2 v2.9.0 h1:nd9d1/qHTdx5foBLZoy0SWCc0W13GQUbPTzeGsuLlU0= -github.com/coder/terraform-provider-coder/v2 v2.9.0/go.mod h1:f8xPh0riDTRwqoPWkjas5VgIBaiRiWH+STb0TZw2fgY= +github.com/coder/terraform-provider-coder/v2 v2.10.0 h1:cGPMfARGHKb80kZsbDX/t/YKwMOwI5zkIyVCQziHR2M= +github.com/coder/terraform-provider-coder/v2 v2.10.0/go.mod h1:f8xPh0riDTRwqoPWkjas5VgIBaiRiWH+STb0TZw2fgY= github.com/coder/trivy v0.0.0-20250527170238-9416a59d7019 h1:MHkv/W7l9eRAN9gOG0qZ1TLRGWIIfNi92273vPAQ8Fs= github.com/coder/trivy v0.0.0-20250527170238-9416a59d7019/go.mod h1:eqk+w9RLBmbd/cB5XfPZFuVn77cf/A6fB7qmEVeSmXk= github.com/coder/websocket v1.8.13 h1:f3QZdXy7uGVz+4uCJy2nTZyM0yTBj8yANEHhqlXZ9FE= @@ -948,8 +954,8 @@ github.com/containerd/platforms v1.0.0-rc.1 h1:83KIq4yy1erSRgOVHNk1HYdPvzdJ5CnsW github.com/containerd/platforms v1.0.0-rc.1/go.mod h1:J71L7B+aiM5SdIEqmd9wp6THLVRzJGXfNuWCZCllLA4= github.com/coreos/go-iptables v0.6.0 h1:is9qnZMPYjLd8LYqmm/qlE+wwEgJIkTYdhV3rfZo4jk= github.com/coreos/go-iptables v0.6.0/go.mod h1:Qe8Bv2Xik5FyTXwgIbLAnv2sWSBmvWdFETJConOQ//Q= -github.com/coreos/go-oidc/v3 v3.14.1 h1:9ePWwfdwC4QKRlCXsJGou56adA/owXczOzwKdOumLqk= -github.com/coreos/go-oidc/v3 v3.14.1/go.mod h1:HaZ3szPaZ0e4r6ebqvsLWlk2Tn+aejfmrfah6hnSYEU= +github.com/coreos/go-oidc/v3 v3.15.0 h1:R6Oz8Z4bqWR7VFQ+sPSvZPQv4x8M+sJkDO5ojgwlyAg= +github.com/coreos/go-oidc/v3 v3.15.0/go.mod h1:HaZ3szPaZ0e4r6ebqvsLWlk2Tn+aejfmrfah6hnSYEU= github.com/coreos/go-systemd v0.0.0-20191104093116-d3cd4ed1dbcf h1:iW4rZ826su+pqaw19uhpSCzhj44qo35pNgKFGqzDKkU= github.com/coreos/go-systemd v0.0.0-20191104093116-d3cd4ed1dbcf/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4= github.com/cpuguy83/dockercfg v0.3.2 h1:DlJTyZGBDlXqUZ2Dk2Q3xHs/FtnooJJVaad2S9GKorA= @@ -1054,8 +1060,8 @@ github.com/fatih/structtag v1.2.0/go.mod h1:mBJUNpUnHmRKrKlQQlmCrh5PuhftFbNv8Ys4 github.com/felixge/httpsnoop v1.0.2/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U= github.com/felixge/httpsnoop v1.0.4 h1:NFTV2Zj1bL4mc9sqWACXbQFVBBg2W3GPvqp8/ESS2Wg= github.com/felixge/httpsnoop v1.0.4/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U= -github.com/fergusstrange/embedded-postgres v1.31.0 h1:JmRxw2BcPRcU141nOEuGXbIU6jsh437cBB40rmftZSk= -github.com/fergusstrange/embedded-postgres v1.31.0/go.mod h1:w0YvnCgf19o6tskInrOOACtnqfVlOvluz3hlNLY7tRk= +github.com/fergusstrange/embedded-postgres v1.32.0 h1:kh2ozEvAx2A0LoIJZEGNwHmoFTEQD243KrHjifcYGMo= +github.com/fergusstrange/embedded-postgres v1.32.0/go.mod h1:w0YvnCgf19o6tskInrOOACtnqfVlOvluz3hlNLY7tRk= github.com/fogleman/gg v1.2.1-0.20190220221249-0403632d5b90/go.mod h1:R/bRT+9gY/C5z7JzPU0zXsXHKM4/ayA+zqcVNZzPa1k= github.com/fogleman/gg v1.3.0/go.mod h1:R/bRT+9gY/C5z7JzPU0zXsXHKM4/ayA+zqcVNZzPa1k= github.com/fortytw2/leaktest v1.3.0 h1:u8491cBMTQ8ft8aeV+adlcytMZylmA5nnwwkRZjI8vw= @@ -1109,8 +1115,8 @@ github.com/go-ini/ini v1.67.0 h1:z6ZrTEZqSWOTyH2FlglNbNgARyHG8oLW9gMELqKr06A= github.com/go-ini/ini v1.67.0/go.mod h1:ByCAeIL28uOIIG0E3PJtZPDL8WnHpFKFOtgjp+3Ies8= github.com/go-jose/go-jose/v4 v4.1.0 h1:cYSYxd3pw5zd2FSXk2vGdn9igQU2PS8MuxrCOCl0FdY= github.com/go-jose/go-jose/v4 v4.1.0/go.mod h1:GG/vqmYm3Von2nYiB2vGTXzdoNKE5tix5tuc6iAd+sw= -github.com/go-json-experiment/json v0.0.0-20250223041408-d3c622f1b874 h1:F8d1AJ6M9UQCavhwmO6ZsrYLfG8zVFWfEfMS2MXPkSY= -github.com/go-json-experiment/json v0.0.0-20250223041408-d3c622f1b874/go.mod h1:TiCD2a1pcmjd7YnhGH0f/zKNcCD06B029pHhzV23c2M= +github.com/go-json-experiment/json v0.0.0-20250725192818-e39067aee2d2 h1:iizUGZ9pEquQS5jTGkh4AqeeHCMbfbjeb0zMt0aEFzs= +github.com/go-json-experiment/json v0.0.0-20250725192818-e39067aee2d2/go.mod h1:TiCD2a1pcmjd7YnhGH0f/zKNcCD06B029pHhzV23c2M= github.com/go-latex/latex v0.0.0-20210118124228-b3d85cf34e07/go.mod h1:CO1AlKB2CSIqUrmQPqA0gdRIlnLEY0gK5JGjh37zN5U= github.com/go-latex/latex v0.0.0-20210823091927-c0d11ff05a81/go.mod h1:SX0U8uGpxhq9o2S/CELCSUxEWWAuoCUcVCQWv7G2OCk= github.com/go-logr/logr v1.2.0/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A= @@ -1148,8 +1154,8 @@ github.com/go-sql-driver/mysql v1.8.1 h1:LedoTUt/eveggdHS9qUFC1EFSa8bU2+1pZjSRpv github.com/go-sql-driver/mysql v1.8.1/go.mod h1:wEBSXgmK//2ZFJyE+qWnIsVGmvmEKlqwuVSjsCm7DZg= github.com/go-test/deep v1.1.0 h1:WOcxcdHcvdgThNXjw0t76K42FXTU7HpNQWHpA2HHNlg= github.com/go-test/deep v1.1.0/go.mod h1:5C2ZWiW0ErCdrYzpqxLbTX7MG14M9iiw8DgHncVwcsE= -github.com/go-viper/mapstructure/v2 v2.3.0 h1:27XbWsHIqhbdR5TIC911OfYvgSaW93HM+dX7970Q7jk= -github.com/go-viper/mapstructure/v2 v2.3.0/go.mod h1:oJDH3BJKyqBA2TXFhDsKDGDTlndYOZ6rGS0BRZIxGhM= +github.com/go-viper/mapstructure/v2 v2.4.0 h1:EBsztssimR/CONLSZZ04E8qAkxNYq4Qp9LvH92wZUgs= +github.com/go-viper/mapstructure/v2 v2.4.0/go.mod h1:oJDH3BJKyqBA2TXFhDsKDGDTlndYOZ6rGS0BRZIxGhM= github.com/gobuffalo/flect v1.0.3 h1:xeWBM2nui+qnVvNM4S3foBhCAL2XgPU+a7FdpelbTq4= github.com/gobuffalo/flect v1.0.3/go.mod h1:A5msMlrHtLqh9umBSnvabjsMrCcCpAyzglnDvkbYKHs= github.com/gobwas/glob v0.2.3 h1:A4xDbljILXROh+kObIiy5kIaPYD8e96x1tgBhUI5J+Y= @@ -1322,8 +1328,8 @@ github.com/googleapis/gax-go/v2 v2.5.1/go.mod h1:h6B0KMMFNtI2ddbGJn3T3ZbwkeT6yqE github.com/googleapis/gax-go/v2 v2.6.0/go.mod h1:1mjbznJAPHFpesgE5ucqfYEscaz5kMdcIDwU/6+DDoY= github.com/googleapis/gax-go/v2 v2.7.0/go.mod h1:TEop28CZZQ2y+c0VxMUmu1lV+fQx57QpBWsYpwqHJx8= github.com/googleapis/gax-go/v2 v2.7.1/go.mod h1:4orTrqY6hXxxaUL4LHIPl6lGo8vAE38/qKbhSAKP6QI= -github.com/googleapis/gax-go/v2 v2.14.2 h1:eBLnkZ9635krYIPD+ag1USrOAI0Nr0QYF3+/3GqO0k0= -github.com/googleapis/gax-go/v2 v2.14.2/go.mod h1:ON64QhlJkhVtSqp4v1uaK92VyZ2gmvDQsweuyLV+8+w= +github.com/googleapis/gax-go/v2 v2.15.0 h1:SyjDc1mGgZU5LncH8gimWo9lW1DtIfPibOG81vgd/bo= +github.com/googleapis/gax-go/v2 v2.15.0/go.mod h1:zVVkkxAQHa1RQpg9z2AUCMnKhi0Qld9rcmyfL1OZhoc= github.com/googleapis/go-type-adapters v1.0.0/go.mod h1:zHW75FOG2aur7gAO2B+MLby+cLsWGBF62rFAi7WjWO4= github.com/gorilla/css v1.0.1 h1:ntNaBIghp6JmvWnxbZKANoLyuXTPZ4cAMlo6RyhlbO8= github.com/gorilla/css v1.0.1/go.mod h1:BvnYkspnSzMmwRK+b8/xgNPLiIuNZr6vbZBTPQ2A3b0= @@ -1347,8 +1353,8 @@ github.com/hashicorp/go-cleanhttp v0.5.2 h1:035FKYIWjmULyFRBKPs8TBQoi0x6d9G4xc9n github.com/hashicorp/go-cleanhttp v0.5.2/go.mod h1:kO/YDlP8L1346E6Sodw+PrpBSV4/SoxCXGY6BqNFT48= github.com/hashicorp/go-cty v1.5.0 h1:EkQ/v+dDNUqnuVpmS5fPqyY71NXVgT5gf32+57xY8g0= github.com/hashicorp/go-cty v1.5.0/go.mod h1:lFUCG5kd8exDobgSfyj4ONE/dc822kiYMguVKdHGMLM= -github.com/hashicorp/go-getter v1.7.8 h1:mshVHx1Fto0/MydBekWan5zUipGq7jO0novchgMmSiY= -github.com/hashicorp/go-getter v1.7.8/go.mod h1:2c6CboOEb9jG6YvmC9xdD+tyAFsrUaJPedwXDGr0TM4= +github.com/hashicorp/go-getter v1.7.9 h1:G9gcjrDixz7glqJ+ll5IWvggSBR+R0B54DSRt4qfdC4= +github.com/hashicorp/go-getter v1.7.9/go.mod h1:dyFCmT1AQkDfOIt9NH8pw9XBDqNrIKJT5ylbpi7zPNE= github.com/hashicorp/go-hclog v1.6.3 h1:Qr2kF+eVWjTiYmU7Y31tYlP1h0q/X3Nl3tPGdaB11/k= github.com/hashicorp/go-hclog v1.6.3/go.mod h1:W4Qnvbt70Wk/zYJryRzDRU/4r0kIg0PVHBcfoyhpF5M= github.com/hashicorp/go-multierror v1.1.1 h1:H5DkEtf6CXdFp0N0Em5UCwQpXMWke8IA0+lD48awMYo= @@ -1413,6 +1419,8 @@ github.com/illarion/gonotify v1.0.1 h1:F1d+0Fgbq/sDWjj/r66ekjDG+IDeecQKUFH4wNwso github.com/illarion/gonotify v1.0.1/go.mod h1:zt5pmDofZpU1f8aqlK0+95eQhoEAn/d4G4B/FjVW4jE= github.com/insomniacslk/dhcp v0.0.0-20231206064809-8c70d406f6d2 h1:9K06NfxkBh25x56yVhWWlKFE8YpicaSfHwoV8SFbueA= github.com/insomniacslk/dhcp v0.0.0-20231206064809-8c70d406f6d2/go.mod h1:3A9PQ1cunSDF/1rbTq99Ts4pVnycWg+vlPkfeD2NLFI= +github.com/invopop/jsonschema v0.13.0 h1:KvpoAJWEjR3uD9Kbm2HWJmqsEaHt8lBUpd0qHcIi21E= +github.com/invopop/jsonschema v0.13.0/go.mod h1:ffZ5Km5SWWRAIN6wbDXItl95euhFz2uON45H2qjYt+0= github.com/jackmordaunt/icns/v3 v3.0.1 h1:xxot6aNuGrU+lNgxz5I5H0qSeCjNKp8uTXB1j8D4S3o= github.com/jackmordaunt/icns/v3 v3.0.1/go.mod h1:5sHL59nqTd2ynTnowxB/MDQFhKNqkK8X687uKNygaSQ= github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99 h1:BQSFePA1RWJOlocH6Fxy8MmwDt+yVQYULKfN0RoTN8A= @@ -1503,8 +1511,8 @@ github.com/makeworld-the-better-one/dither/v2 v2.4.0 h1:Az/dYXiTcwcRSe59Hzw4RI1r github.com/makeworld-the-better-one/dither/v2 v2.4.0/go.mod h1:VBtN8DXO7SNtyGmLiGA7IsFeKrBkQPze1/iAeM95arc= github.com/marekm4/color-extractor v1.2.1 h1:3Zb2tQsn6bITZ8MBVhc33Qn1k5/SEuZ18mrXGUqIwn0= github.com/marekm4/color-extractor v1.2.1/go.mod h1:90VjmiHI6M8ez9eYUaXLdcKnS+BAOp7w+NpwBdkJmpA= -github.com/mark3labs/mcp-go v0.34.0 h1:eWy7WBGvhk6EyAAyVzivTCprE52iXJwNtvHV6Cv3bR0= -github.com/mark3labs/mcp-go v0.34.0/go.mod h1:rXqOudj/djTORU/ThxYx8fqEVj/5pvTuuebQ2RC7uk4= +github.com/mark3labs/mcp-go v0.37.0 h1:BywvZLPRT6Zx6mMG/MJfxLSZQkTGIcJSEGKsvr4DsoQ= +github.com/mark3labs/mcp-go v0.37.0/go.mod h1:T7tUa2jO6MavG+3P25Oy/jR7iCeJPHImCZHRymCn39g= github.com/mattn/go-colorable v0.0.9/go.mod h1:9vuHe8Xs5qXnSaW/c/ABM9alt+Vo+STaOChaDxuIBZU= github.com/mattn/go-colorable v0.1.2/go.mod h1:U0ppj6V5qS13XJ6of8GYAs25YV2eR4EVcfRqFIhoBtE= github.com/mattn/go-colorable v0.1.9/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc= @@ -1675,17 +1683,17 @@ github.com/power-devops/perfstat v0.0.0-20240221224432-82ca36839d55 h1:o4JXh1EVt github.com/power-devops/perfstat v0.0.0-20240221224432-82ca36839d55/go.mod h1:OmDBASR4679mdNQnz2pUhc2G8CO2JrUAVFDRBDP/hJE= github.com/prometheus-community/pro-bing v0.7.0 h1:KFYFbxC2f2Fp6c+TyxbCOEarf7rbnzr9Gw8eIb0RfZA= github.com/prometheus-community/pro-bing v0.7.0/go.mod h1:Moob9dvlY50Bfq6i88xIwfyw7xLFHH69LUgx9n5zqCE= -github.com/prometheus/client_golang v1.22.0 h1:rb93p9lokFEsctTys46VnV1kLCDpVZ0a/Y92Vm0Zc6Q= -github.com/prometheus/client_golang v1.22.0/go.mod h1:R7ljNsLXhuQXYZYtw6GAE9AZg8Y7vEW5scdCXrWRXC0= +github.com/prometheus/client_golang v1.23.0 h1:ust4zpdl9r4trLY/gSjlm07PuiBq2ynaXXlptpfy8Uc= +github.com/prometheus/client_golang v1.23.0/go.mod h1:i/o0R9ByOnHX0McrTMTyhYvKE4haaf2mW08I+jGAjEE= github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= github.com/prometheus/client_model v0.2.0/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= github.com/prometheus/client_model v0.3.0/go.mod h1:LDGWKZIo7rky3hgvBe+caln+Dr3dPggB5dvjtD7w9+w= -github.com/prometheus/client_model v0.6.1 h1:ZKSh/rekM+n3CeS952MLRAdFwIKqeY8b62p8ais2e9E= -github.com/prometheus/client_model v0.6.1/go.mod h1:OrxVMOVHjw3lKMa8+x6HeMGkHMQyHDk9E3jmP2AmGiY= -github.com/prometheus/common v0.63.0 h1:YR/EIY1o3mEFP/kZCD7iDMnLPlGyuU2Gb3HIcXnA98k= -github.com/prometheus/common v0.63.0/go.mod h1:VVFF/fBIoToEnWRVkYoXEkq3R3paCoxG9PXP74SnV18= -github.com/prometheus/procfs v0.15.1 h1:YagwOFzUgYfKKHX6Dr+sHT7km/hxC76UB0learggepc= -github.com/prometheus/procfs v0.15.1/go.mod h1:fB45yRUv8NstnjriLhBQLuOUt+WW4BsoGhij/e3PBqk= +github.com/prometheus/client_model v0.6.2 h1:oBsgwpGs7iVziMvrGhE53c/GrLUsZdHnqNwqPLxwZyk= +github.com/prometheus/client_model v0.6.2/go.mod h1:y3m2F6Gdpfy6Ut/GBsUqTWZqCUvMVzSfMLjcu6wAwpE= +github.com/prometheus/common v0.65.0 h1:QDwzd+G1twt//Kwj/Ww6E9FQq1iVMmODnILtW1t2VzE= +github.com/prometheus/common v0.65.0/go.mod h1:0gZns+BLRQ3V6NdaerOhMbwwRbNh9hkGINtQAsP5GS8= +github.com/prometheus/procfs v0.16.1 h1:hZ15bTNuirocR6u0JZ6BAHHmwS1p8B4P6MRqxtzMyRg= +github.com/prometheus/procfs v0.16.1/go.mod h1:teAbpZRB1iIAJYREa1LsoWUXykVXA1KlTmWl8x/U+Is= github.com/puzpuzpuz/xsync/v3 v3.5.1 h1:GJYJZwO6IdxN/IKbneznS6yPkVC+c3zyY/j19c++5Fg= github.com/puzpuzpuz/xsync/v3 v3.5.1/go.mod h1:VjzYrABPabuM4KyBh1Ftq6u8nhwY5tBPKP9jpmh0nnA= github.com/quasilyte/go-ruleguard/dsl v0.3.22 h1:wd8zkOhSNr+I+8Qeciml08ivDt1pSXe60+5DqOpCjPE= @@ -1852,6 +1860,8 @@ github.com/vmihailenco/tagparser/v2 v2.0.0 h1:y09buUbR+b5aycVFQs/g70pqKVZNBmxwAh github.com/vmihailenco/tagparser/v2 v2.0.0/go.mod h1:Wri+At7QHww0WTrCBeu4J6bNtoV6mEfg5OIWRZA9qds= github.com/wagslane/go-password-validator v0.3.0 h1:vfxOPzGHkz5S146HDpavl0cw1DSVP061Ry2PX0/ON6I= github.com/wagslane/go-password-validator v0.3.0/go.mod h1:TI1XJ6T5fRdRnHqHt14pvy1tNVnrwe7m3/f1f2fDphQ= +github.com/wk8/go-ordered-map/v2 v2.1.8 h1:5h/BUHu93oj4gIdvHHHGsScSTMijfx5PeYkE/fJgbpc= +github.com/wk8/go-ordered-map/v2 v2.1.8/go.mod h1:5nJHM5DyteebpVlHnWMV0rPz6Zp7+xBAnxjb1X5vnTw= github.com/wlynxg/anet v0.0.3/go.mod h1:eay5PRQr7fIVAMbTbchTnO9gG65Hg/uYGdc7mguHxoA= github.com/x448/float16 v0.8.4 h1:qLwI1I70+NjRFUR3zs1JPUCgaCXSh3SW62uAKT1mSBM= github.com/x448/float16 v0.8.4/go.mod h1:14CWIYCyZA/cWjXOioeEpHeN/83MdbZDRQHoFcYsOfg= @@ -1940,8 +1950,8 @@ go.opentelemetry.io/collector/semconv v0.123.0/go.mod h1:te6VQ4zZJO5Lp8dM2XIhDxD go.opentelemetry.io/contrib v1.0.0/go.mod h1:EH4yDYeNoaTqn/8yCWQmfNB78VHfGX2Jt2bvnvzBlGM= go.opentelemetry.io/contrib v1.19.0 h1:rnYI7OEPMWFeM4QCqWQ3InMJ0arWMR1i0Cx9A5hcjYM= go.opentelemetry.io/contrib v1.19.0/go.mod h1:gIzjwWFoGazJmtCaDgViqOSJPde2mCWzv60o0bWPcZs= -go.opentelemetry.io/contrib/detectors/gcp v1.35.0 h1:bGvFt68+KTiAKFlacHW6AhA56GF2rS0bdD3aJYEnmzA= -go.opentelemetry.io/contrib/detectors/gcp v1.35.0/go.mod h1:qGWP8/+ILwMRIUf9uIVLloR1uo5ZYAslM4O6OqUi1DA= +go.opentelemetry.io/contrib/detectors/gcp v1.36.0 h1:F7q2tNlCaHY9nMKHR6XH9/qkp8FktLnIcy6jJNyOCQw= +go.opentelemetry.io/contrib/detectors/gcp v1.36.0/go.mod h1:IbBN8uAIIx734PTonTPxAxnjc2pQTxWNkwfstZ+6H2k= go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.61.0 h1:q4XOmH/0opmeuJtPsbFNivyl7bCt7yRBbeEm2sC/XtQ= go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.61.0/go.mod h1:snMWehoOh2wsEwnvvwtDyFCxVeDAODenXHtn5vzrKjo= go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.61.0 h1:F7Jx+6hwnZ41NSFTO5q4LYDtJRXBf2PD0rNBkeB/lus= @@ -2004,8 +2014,8 @@ golang.org/x/crypto v0.19.0/go.mod h1:Iy9bg/ha4yyC70EfRS8jz+B6ybOBKMaSxLj6P6oBDf golang.org/x/crypto v0.23.0/go.mod h1:CKFgDieR+mRhux2Lsu27y0fO304Db0wZe70UKqHu0v8= golang.org/x/crypto v0.31.0/go.mod h1:kDsLvtWBEx7MV9tJOj9bnXsPbxwJQ6csT/x4KIN4Ssk= golang.org/x/crypto v0.32.0/go.mod h1:ZnnJkOaASj8g0AjIduWNlq2NRxL0PlBrbKVyZ6V/Ugc= -golang.org/x/crypto v0.40.0 h1:r4x+VvoG5Fm+eJcxMaY8CQM7Lb0l1lsmjGBQ6s8BfKM= -golang.org/x/crypto v0.40.0/go.mod h1:Qr1vMER5WyS2dfPHAlsOj01wgLbsyWtFn/aY+5+ZdxY= +golang.org/x/crypto v0.41.0 h1:WKYxWedPGCTVVl5+WHSSrOBT0O8lx32+zxmHxijgXp4= +golang.org/x/crypto v0.41.0/go.mod h1:pO5AFd7FA68rFak7rOAGVuygIISepHftHnr8dr6+sUc= golang.org/x/exp v0.0.0-20180321215751-8460e604b9de/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20180807140117-3d87b88a115f/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= @@ -2070,8 +2080,8 @@ golang.org/x/mod v0.9.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= golang.org/x/mod v0.12.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= golang.org/x/mod v0.15.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c= golang.org/x/mod v0.17.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c= -golang.org/x/mod v0.26.0 h1:EGMPT//Ezu+ylkCijjPc+f4Aih7sZvaAr+O3EHBxvZg= -golang.org/x/mod v0.26.0/go.mod h1:/j6NAhSk8iQ723BGAUyoAcn7SlD7s15Dp9Nd/SfeaFQ= +golang.org/x/mod v0.27.0 h1:kb+q2PyFnEADO2IEF935ehFUXlWiNjJWtRNgBLSfbxQ= +golang.org/x/mod v0.27.0/go.mod h1:rWI627Fq0DEoudcK+MBkNkCe0EetEaDSwJJkCcjpazc= golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= @@ -2134,8 +2144,8 @@ golang.org/x/net v0.15.0/go.mod h1:idbUs1IY1+zTqbi8yxTbhexhEEk5ur9LInksu6HrEpk= golang.org/x/net v0.21.0/go.mod h1:bIjVDfnllIU7BJ2DNgfnXvpSvtn8VRwhlsaeUTyUS44= golang.org/x/net v0.25.0/go.mod h1:JkAGAh7GEvH74S6FOH42FLoXpXbE/aqXSrIQjXgsiwM= golang.org/x/net v0.34.0/go.mod h1:di0qlW3YNM5oh6GqDGQr92MyTozJPmybPK4Ev/Gm31k= -golang.org/x/net v0.42.0 h1:jzkYrhi3YQWD6MLBJcsklgQsoAcw89EcZbJw8Z614hs= -golang.org/x/net v0.42.0/go.mod h1:FF1RA5d3u7nAYA4z2TkclSCKh68eSXtiFwcWQpPXdt8= +golang.org/x/net v0.43.0 h1:lat02VYK2j4aLzMzecihNvTlJNQUq316m2Mr9rnM6YE= +golang.org/x/net v0.43.0/go.mod h1:vhO1fvI4dGsIjh73sWfUVjj3N7CA9WkKJNQm2svM6Jg= golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= @@ -2286,8 +2296,8 @@ golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/sys v0.20.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/sys v0.28.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/sys v0.29.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= -golang.org/x/sys v0.34.0 h1:H5Y5sJ2L2JRdyv7ROF1he/lPdvFsd0mJHFw2ThKHxLA= -golang.org/x/sys v0.34.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k= +golang.org/x/sys v0.35.0 h1:vz1N37gP5bs89s7He8XuIYXpyY0+QlsKmzipCbUtyxI= +golang.org/x/sys v0.35.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k= golang.org/x/telemetry v0.0.0-20240228155512-f48c80bd79b2/go.mod h1:TeRTkGYfJXctD9OcfyVLyj2J3IxLnKwHJR8f4D8a3YE= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= @@ -2306,8 +2316,8 @@ golang.org/x/term v0.17.0/go.mod h1:lLRBjIVuehSbZlaOtGMbcMncT+aqLLLmKrsjNrUguwk= golang.org/x/term v0.20.0/go.mod h1:8UkIAJTvZgivsXaD6/pH6U9ecQzZ45awqEOzuCvwpFY= golang.org/x/term v0.27.0/go.mod h1:iMsnZpn0cago0GOrHO2+Y7u7JPn5AylBrcoWkElMTSM= golang.org/x/term v0.28.0/go.mod h1:Sw/lC2IAUZ92udQNf3WodGtn4k/XoLyZoh8v/8uiwek= -golang.org/x/term v0.33.0 h1:NuFncQrRcaRvVmgRkvM3j/F00gWIAlcmlB8ACEKmGIg= -golang.org/x/term v0.33.0/go.mod h1:s18+ql9tYWp1IfpV9DmCtQDDSRBUjKaw9M1eAv5UeF0= +golang.org/x/term v0.34.0 h1:O/2T7POpk0ZZ7MAzMeWFSg6S5IpWd/RXDlM9hgM3DR4= +golang.org/x/term v0.34.0/go.mod h1:5jC53AEywhIVebHgPVeg0mj8OD3VO9OzclacVrqpaAw= golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= @@ -2330,8 +2340,8 @@ golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= golang.org/x/text v0.15.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= golang.org/x/text v0.21.0/go.mod h1:4IBbMaMmOPCJ8SecivzSH54+73PCFmPWxNTLm+vZkEQ= golang.org/x/text v0.23.0/go.mod h1:/BLNzu4aZCJ1+kcD0DNRotWKage4q2rGVAg4o22unh4= -golang.org/x/text v0.27.0 h1:4fGWRpyh641NLlecmyl4LOe6yDdfaYNrGb2zdfo4JV4= -golang.org/x/text v0.27.0/go.mod h1:1D28KMCvyooCX9hBiosv5Tz/+YLxj0j7XhWjpSUF7CU= +golang.org/x/text v0.28.0 h1:rhazDwis8INMIwQ4tpjLDzUhx6RlXqZNPEM0huQojng= +golang.org/x/text v0.28.0/go.mod h1:U8nCwOR8jO/marOQ0QbDiOngZVEBB7MAiitBuMjXiNU= golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= @@ -2404,8 +2414,8 @@ golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU= golang.org/x/tools v0.7.0/go.mod h1:4pg6aUX35JBAogB10C9AtvVL+qowtN4pT3CGSQex14s= golang.org/x/tools v0.13.0/go.mod h1:HvlwmtVNQAhOuCjW7xxvovg8wbNq7LwfXh/k7wXUl58= golang.org/x/tools v0.21.1-0.20240508182429-e35e4ccd0d2d/go.mod h1:aiJjzUbINMkxbQROHiO6hDPo2LHcIPhhQsa9DLh0yGk= -golang.org/x/tools v0.35.0 h1:mBffYraMEf7aa0sB+NuKnuCy8qI/9Bughn8dC2Gu5r0= -golang.org/x/tools v0.35.0/go.mod h1:NKdj5HkL/73byiZSJjqJgKn3ep7KjFkBOkR/Hps3VPw= +golang.org/x/tools v0.36.0 h1:kWS0uv/zsvHEle1LbV5LE8QujrxB3wfQyxHfhOk0Qkg= +golang.org/x/tools v0.36.0/go.mod h1:WBDiHKJK8YgLHlcQPYQzNCkUxUypCaa5ZegCVutKm+s= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= @@ -2487,8 +2497,8 @@ google.golang.org/api v0.108.0/go.mod h1:2Ts0XTHNVWxypznxWOYUeI4g3WdP9Pk2Qk58+a/ google.golang.org/api v0.110.0/go.mod h1:7FC4Vvx1Mooxh8C5HWjzZHcavuS2f6pmJpZx60ca7iI= google.golang.org/api v0.111.0/go.mod h1:qtFHvU9mhgTJegR31csQ+rwxyUTHOKFqCKWp1J0fdw0= google.golang.org/api v0.114.0/go.mod h1:ifYI2ZsFK6/uGddGfAD5BMxlnkBqCmqHSDUVi45N5Yg= -google.golang.org/api v0.242.0 h1:7Lnb1nfnpvbkCiZek6IXKdJ0MFuAZNAJKQfA1ws62xg= -google.golang.org/api v0.242.0/go.mod h1:cOVEm2TpdAGHL2z+UwyS+kmlGr3bVWQQ6sYEqkKje50= +google.golang.org/api v0.246.0 h1:H0ODDs5PnMZVZAEtdLMn2Ul2eQi7QNjqM2DIFp8TlTM= +google.golang.org/api v0.246.0/go.mod h1:dMVhVcylamkirHdzEBAIQWUCgqY885ivNeZYd7VAVr8= google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= google.golang.org/appengine v1.5.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= @@ -2629,12 +2639,12 @@ google.golang.org/genproto v0.0.0-20230323212658-478b75c54725/go.mod h1:UUQDJDOl google.golang.org/genproto v0.0.0-20230330154414-c0448cd141ea/go.mod h1:UUQDJDOlWu4KYeJZffbWgBkS1YFobzKbLVfK69pe0Ak= google.golang.org/genproto v0.0.0-20230331144136-dcfb400f0633/go.mod h1:UUQDJDOlWu4KYeJZffbWgBkS1YFobzKbLVfK69pe0Ak= google.golang.org/genproto v0.0.0-20230410155749-daa745c078e1/go.mod h1:nKE/iIaLqn2bQwXBg8f1g2Ylh6r5MN5CmZvuzZCgsCU= -google.golang.org/genproto v0.0.0-20250505200425-f936aa4a68b2 h1:1tXaIXCracvtsRxSBsYDiSBN0cuJvM7QYW+MrpIRY78= -google.golang.org/genproto v0.0.0-20250505200425-f936aa4a68b2/go.mod h1:49MsLSx0oWMOZqcpB3uL8ZOkAh1+TndpJ8ONoCBWiZk= -google.golang.org/genproto/googleapis/api v0.0.0-20250505200425-f936aa4a68b2 h1:vPV0tzlsK6EzEDHNNH5sa7Hs9bd7iXR7B1tSiPepkV0= -google.golang.org/genproto/googleapis/api v0.0.0-20250505200425-f936aa4a68b2/go.mod h1:pKLAc5OolXC3ViWGI62vvC0n10CpwAtRcTNCFwTKBEw= -google.golang.org/genproto/googleapis/rpc v0.0.0-20250603155806-513f23925822 h1:fc6jSaCT0vBduLYZHYrBBNY4dsWuvgyff9noRNDdBeE= -google.golang.org/genproto/googleapis/rpc v0.0.0-20250603155806-513f23925822/go.mod h1:qQ0YXyHHx3XkvlzUtpXDkS29lDSafHMZBAZDc03LQ3A= +google.golang.org/genproto v0.0.0-20250603155806-513f23925822 h1:rHWScKit0gvAPuOnu87KpaYtjK5zBMLcULh7gxkCXu4= +google.golang.org/genproto v0.0.0-20250603155806-513f23925822/go.mod h1:HubltRL7rMh0LfnQPkMH4NPDFEWp0jw3vixw7jEM53s= +google.golang.org/genproto/googleapis/api v0.0.0-20250603155806-513f23925822 h1:oWVWY3NzT7KJppx2UKhKmzPq4SRe0LdCijVRwvGeikY= +google.golang.org/genproto/googleapis/api v0.0.0-20250603155806-513f23925822/go.mod h1:h3c4v36UTKzUiuaOKQ6gr3S+0hovBtUrXzTG/i3+XEc= +google.golang.org/genproto/googleapis/rpc v0.0.0-20250728155136-f173205681a0 h1:MAKi5q709QWfnkkpNQ0M12hYJ1+e8qYVDyowc4U1XZM= +google.golang.org/genproto/googleapis/rpc v0.0.0-20250728155136-f173205681a0/go.mod h1:qQ0YXyHHx3XkvlzUtpXDkS29lDSafHMZBAZDc03LQ3A= google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38= google.golang.org/grpc v1.21.1/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM= @@ -2676,8 +2686,8 @@ google.golang.org/grpc v1.52.3/go.mod h1:pu6fVzoFb+NBYNAvQL08ic+lvB2IojljRYuun5v google.golang.org/grpc v1.53.0/go.mod h1:OnIrk0ipVdj4N5d9IUoFUx72/VlD7+jUsHwZgwSMQpw= google.golang.org/grpc v1.54.0/go.mod h1:PUSEXI6iWghWaB6lXM4knEgpJNu2qUcKfDtNci3EC2g= google.golang.org/grpc v1.56.3/go.mod h1:I9bI3vqKfayGqPUAwGdOSu7kt6oIJLixfffKrpXqQ9s= -google.golang.org/grpc v1.73.0 h1:VIWSmpI2MegBtTuFt5/JWy2oXxtjJ/e89Z70ImfD2ok= -google.golang.org/grpc v1.73.0/go.mod h1:50sbHOUqWoCQGI8V2HQLJM0B+LMlIUjNSZmow7EVBQc= +google.golang.org/grpc v1.74.2 h1:WoosgB65DlWVC9FqI82dGsZhWFNBSLjQ84bjROOpMu4= +google.golang.org/grpc v1.74.2/go.mod h1:CtQ+BGjaAIXHs/5YS3i473GqwBBa1zGQNevxdeBEXrM= google.golang.org/grpc/cmd/protoc-gen-go-grpc v1.1.0/go.mod h1:6Kw0yEErY5E/yWrBtf03jp27GLLJujG4z/JK95pnjjw= google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8= google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0= diff --git a/helm/coder/templates/_coder.tpl b/helm/coder/templates/_coder.tpl index 3964fd1e3f66d..2efa530c34a47 100644 --- a/helm/coder/templates/_coder.tpl +++ b/helm/coder/templates/_coder.tpl @@ -41,6 +41,8 @@ env: value: "0.0.0.0:8080" - name: CODER_PROMETHEUS_ADDRESS value: "0.0.0.0:2112" +- name: CODER_PPROF_ADDRESS + value: "0.0.0.0:6060" {{- if .Values.provisionerDaemon.pskSecretName }} - name: CODER_PROVISIONER_DAEMON_PSK valueFrom: diff --git a/helm/coder/tests/chart_test.go b/helm/coder/tests/chart_test.go index a11d631a2f247..17678a85e0dec 100644 --- a/helm/coder/tests/chart_test.go +++ b/helm/coder/tests/chart_test.go @@ -125,6 +125,10 @@ var testCases = []testCase{ name: "partial_resources", expectedError: "", }, + { + name: "pod_securitycontext", + expectedError: "", + }, } type testCase struct { diff --git a/helm/coder/tests/testdata/auto_access_url_1.golden b/helm/coder/tests/testdata/auto_access_url_1.golden index a8455dd53357f..82b78f878e0a9 100644 --- a/helm/coder/tests/testdata/auto_access_url_1.golden +++ b/helm/coder/tests/testdata/auto_access_url_1.golden @@ -153,6 +153,8 @@ spec: value: 0.0.0.0:8080 - name: CODER_PROMETHEUS_ADDRESS value: 0.0.0.0:2112 + - name: CODER_PPROF_ADDRESS + value: 0.0.0.0:6060 - name: KUBE_POD_IP valueFrom: fieldRef: diff --git a/helm/coder/tests/testdata/auto_access_url_1_coder.golden b/helm/coder/tests/testdata/auto_access_url_1_coder.golden index 5862de46fa900..849553b8ab023 100644 --- a/helm/coder/tests/testdata/auto_access_url_1_coder.golden +++ b/helm/coder/tests/testdata/auto_access_url_1_coder.golden @@ -153,6 +153,8 @@ spec: value: 0.0.0.0:8080 - name: CODER_PROMETHEUS_ADDRESS value: 0.0.0.0:2112 + - name: CODER_PPROF_ADDRESS + value: 0.0.0.0:6060 - name: KUBE_POD_IP valueFrom: fieldRef: diff --git a/helm/coder/tests/testdata/auto_access_url_2.golden b/helm/coder/tests/testdata/auto_access_url_2.golden index d5c7ce4dd17ac..666341a133394 100644 --- a/helm/coder/tests/testdata/auto_access_url_2.golden +++ b/helm/coder/tests/testdata/auto_access_url_2.golden @@ -153,6 +153,8 @@ spec: value: 0.0.0.0:8080 - name: CODER_PROMETHEUS_ADDRESS value: 0.0.0.0:2112 + - name: CODER_PPROF_ADDRESS + value: 0.0.0.0:6060 - name: CODER_ACCESS_URL value: http://coder.default.svc.cluster.local - name: KUBE_POD_IP diff --git a/helm/coder/tests/testdata/auto_access_url_2_coder.golden b/helm/coder/tests/testdata/auto_access_url_2_coder.golden index 94341b196c2b3..4a2c6074b058e 100644 --- a/helm/coder/tests/testdata/auto_access_url_2_coder.golden +++ b/helm/coder/tests/testdata/auto_access_url_2_coder.golden @@ -153,6 +153,8 @@ spec: value: 0.0.0.0:8080 - name: CODER_PROMETHEUS_ADDRESS value: 0.0.0.0:2112 + - name: CODER_PPROF_ADDRESS + value: 0.0.0.0:6060 - name: CODER_ACCESS_URL value: http://coder.coder.svc.cluster.local - name: KUBE_POD_IP diff --git a/helm/coder/tests/testdata/auto_access_url_3.golden b/helm/coder/tests/testdata/auto_access_url_3.golden index 5ce3303dcdca3..a0b24ff212346 100644 --- a/helm/coder/tests/testdata/auto_access_url_3.golden +++ b/helm/coder/tests/testdata/auto_access_url_3.golden @@ -153,6 +153,8 @@ spec: value: 0.0.0.0:8080 - name: CODER_PROMETHEUS_ADDRESS value: 0.0.0.0:2112 + - name: CODER_PPROF_ADDRESS + value: 0.0.0.0:6060 - name: KUBE_POD_IP valueFrom: fieldRef: diff --git a/helm/coder/tests/testdata/auto_access_url_3_coder.golden b/helm/coder/tests/testdata/auto_access_url_3_coder.golden index 9298e7411bc74..2e62cb18b60ab 100644 --- a/helm/coder/tests/testdata/auto_access_url_3_coder.golden +++ b/helm/coder/tests/testdata/auto_access_url_3_coder.golden @@ -153,6 +153,8 @@ spec: value: 0.0.0.0:8080 - name: CODER_PROMETHEUS_ADDRESS value: 0.0.0.0:2112 + - name: CODER_PPROF_ADDRESS + value: 0.0.0.0:6060 - name: KUBE_POD_IP valueFrom: fieldRef: diff --git a/helm/coder/tests/testdata/command.golden b/helm/coder/tests/testdata/command.golden index 9ef66d7ad3a07..a11cb7564e392 100644 --- a/helm/coder/tests/testdata/command.golden +++ b/helm/coder/tests/testdata/command.golden @@ -153,6 +153,8 @@ spec: value: 0.0.0.0:8080 - name: CODER_PROMETHEUS_ADDRESS value: 0.0.0.0:2112 + - name: CODER_PPROF_ADDRESS + value: 0.0.0.0:6060 - name: CODER_ACCESS_URL value: http://coder.default.svc.cluster.local - name: KUBE_POD_IP diff --git a/helm/coder/tests/testdata/command_args.golden b/helm/coder/tests/testdata/command_args.golden index d5633ce361966..d296c1a8b58d9 100644 --- a/helm/coder/tests/testdata/command_args.golden +++ b/helm/coder/tests/testdata/command_args.golden @@ -154,6 +154,8 @@ spec: value: 0.0.0.0:8080 - name: CODER_PROMETHEUS_ADDRESS value: 0.0.0.0:2112 + - name: CODER_PPROF_ADDRESS + value: 0.0.0.0:6060 - name: CODER_ACCESS_URL value: http://coder.default.svc.cluster.local - name: KUBE_POD_IP diff --git a/helm/coder/tests/testdata/command_args_coder.golden b/helm/coder/tests/testdata/command_args_coder.golden index 8fafa90a7f080..c606627a02e67 100644 --- a/helm/coder/tests/testdata/command_args_coder.golden +++ b/helm/coder/tests/testdata/command_args_coder.golden @@ -154,6 +154,8 @@ spec: value: 0.0.0.0:8080 - name: CODER_PROMETHEUS_ADDRESS value: 0.0.0.0:2112 + - name: CODER_PPROF_ADDRESS + value: 0.0.0.0:6060 - name: CODER_ACCESS_URL value: http://coder.coder.svc.cluster.local - name: KUBE_POD_IP diff --git a/helm/coder/tests/testdata/command_coder.golden b/helm/coder/tests/testdata/command_coder.golden index 055cec2380d59..a7027d4eed4da 100644 --- a/helm/coder/tests/testdata/command_coder.golden +++ b/helm/coder/tests/testdata/command_coder.golden @@ -153,6 +153,8 @@ spec: value: 0.0.0.0:8080 - name: CODER_PROMETHEUS_ADDRESS value: 0.0.0.0:2112 + - name: CODER_PPROF_ADDRESS + value: 0.0.0.0:6060 - name: CODER_ACCESS_URL value: http://coder.coder.svc.cluster.local - name: KUBE_POD_IP diff --git a/helm/coder/tests/testdata/custom_resources.golden b/helm/coder/tests/testdata/custom_resources.golden index ca5391e3ac5d9..e9889d36dee51 100644 --- a/helm/coder/tests/testdata/custom_resources.golden +++ b/helm/coder/tests/testdata/custom_resources.golden @@ -153,6 +153,8 @@ spec: value: 0.0.0.0:8080 - name: CODER_PROMETHEUS_ADDRESS value: 0.0.0.0:2112 + - name: CODER_PPROF_ADDRESS + value: 0.0.0.0:6060 - name: CODER_ACCESS_URL value: http://coder.default.svc.cluster.local - name: KUBE_POD_IP diff --git a/helm/coder/tests/testdata/custom_resources_coder.golden b/helm/coder/tests/testdata/custom_resources_coder.golden index f783a4f7e53e5..3e45a160f1c58 100644 --- a/helm/coder/tests/testdata/custom_resources_coder.golden +++ b/helm/coder/tests/testdata/custom_resources_coder.golden @@ -153,6 +153,8 @@ spec: value: 0.0.0.0:8080 - name: CODER_PROMETHEUS_ADDRESS value: 0.0.0.0:2112 + - name: CODER_PPROF_ADDRESS + value: 0.0.0.0:6060 - name: CODER_ACCESS_URL value: http://coder.coder.svc.cluster.local - name: KUBE_POD_IP diff --git a/helm/coder/tests/testdata/default_values.golden b/helm/coder/tests/testdata/default_values.golden index c48dffefd12f1..bbaa590568e46 100644 --- a/helm/coder/tests/testdata/default_values.golden +++ b/helm/coder/tests/testdata/default_values.golden @@ -153,6 +153,8 @@ spec: value: 0.0.0.0:8080 - name: CODER_PROMETHEUS_ADDRESS value: 0.0.0.0:2112 + - name: CODER_PPROF_ADDRESS + value: 0.0.0.0:6060 - name: CODER_ACCESS_URL value: http://coder.default.svc.cluster.local - name: KUBE_POD_IP diff --git a/helm/coder/tests/testdata/default_values_coder.golden b/helm/coder/tests/testdata/default_values_coder.golden index bb8157ea46153..d63411508ed66 100644 --- a/helm/coder/tests/testdata/default_values_coder.golden +++ b/helm/coder/tests/testdata/default_values_coder.golden @@ -153,6 +153,8 @@ spec: value: 0.0.0.0:8080 - name: CODER_PROMETHEUS_ADDRESS value: 0.0.0.0:2112 + - name: CODER_PPROF_ADDRESS + value: 0.0.0.0:6060 - name: CODER_ACCESS_URL value: http://coder.coder.svc.cluster.local - name: KUBE_POD_IP diff --git a/helm/coder/tests/testdata/env_from.golden b/helm/coder/tests/testdata/env_from.golden index eb43115a79187..aca0cb45b3825 100644 --- a/helm/coder/tests/testdata/env_from.golden +++ b/helm/coder/tests/testdata/env_from.golden @@ -153,6 +153,8 @@ spec: value: 0.0.0.0:8080 - name: CODER_PROMETHEUS_ADDRESS value: 0.0.0.0:2112 + - name: CODER_PPROF_ADDRESS + value: 0.0.0.0:6060 - name: CODER_ACCESS_URL value: http://coder.default.svc.cluster.local - name: KUBE_POD_IP diff --git a/helm/coder/tests/testdata/env_from_coder.golden b/helm/coder/tests/testdata/env_from_coder.golden index a539842ce9187..b4c074225011b 100644 --- a/helm/coder/tests/testdata/env_from_coder.golden +++ b/helm/coder/tests/testdata/env_from_coder.golden @@ -153,6 +153,8 @@ spec: value: 0.0.0.0:8080 - name: CODER_PROMETHEUS_ADDRESS value: 0.0.0.0:2112 + - name: CODER_PPROF_ADDRESS + value: 0.0.0.0:6060 - name: CODER_ACCESS_URL value: http://coder.coder.svc.cluster.local - name: KUBE_POD_IP diff --git a/helm/coder/tests/testdata/extra_templates.golden b/helm/coder/tests/testdata/extra_templates.golden index 2b0d5117c855f..77f06833e3c27 100644 --- a/helm/coder/tests/testdata/extra_templates.golden +++ b/helm/coder/tests/testdata/extra_templates.golden @@ -162,6 +162,8 @@ spec: value: 0.0.0.0:8080 - name: CODER_PROMETHEUS_ADDRESS value: 0.0.0.0:2112 + - name: CODER_PPROF_ADDRESS + value: 0.0.0.0:6060 - name: CODER_ACCESS_URL value: http://coder.default.svc.cluster.local - name: KUBE_POD_IP diff --git a/helm/coder/tests/testdata/extra_templates_coder.golden b/helm/coder/tests/testdata/extra_templates_coder.golden index bca6beee0c1ea..ec5d34eec870d 100644 --- a/helm/coder/tests/testdata/extra_templates_coder.golden +++ b/helm/coder/tests/testdata/extra_templates_coder.golden @@ -162,6 +162,8 @@ spec: value: 0.0.0.0:8080 - name: CODER_PROMETHEUS_ADDRESS value: 0.0.0.0:2112 + - name: CODER_PPROF_ADDRESS + value: 0.0.0.0:6060 - name: CODER_ACCESS_URL value: http://coder.coder.svc.cluster.local - name: KUBE_POD_IP diff --git a/helm/coder/tests/testdata/labels_annotations.golden b/helm/coder/tests/testdata/labels_annotations.golden index 6a83ee5ec1684..0acc2521ba045 100644 --- a/helm/coder/tests/testdata/labels_annotations.golden +++ b/helm/coder/tests/testdata/labels_annotations.golden @@ -161,6 +161,8 @@ spec: value: 0.0.0.0:8080 - name: CODER_PROMETHEUS_ADDRESS value: 0.0.0.0:2112 + - name: CODER_PPROF_ADDRESS + value: 0.0.0.0:6060 - name: CODER_ACCESS_URL value: http://coder.default.svc.cluster.local - name: KUBE_POD_IP diff --git a/helm/coder/tests/testdata/labels_annotations_coder.golden b/helm/coder/tests/testdata/labels_annotations_coder.golden index f4454b575ba93..bef5c25d68525 100644 --- a/helm/coder/tests/testdata/labels_annotations_coder.golden +++ b/helm/coder/tests/testdata/labels_annotations_coder.golden @@ -161,6 +161,8 @@ spec: value: 0.0.0.0:8080 - name: CODER_PROMETHEUS_ADDRESS value: 0.0.0.0:2112 + - name: CODER_PPROF_ADDRESS + value: 0.0.0.0:6060 - name: CODER_ACCESS_URL value: http://coder.coder.svc.cluster.local - name: KUBE_POD_IP diff --git a/helm/coder/tests/testdata/partial_resources.golden b/helm/coder/tests/testdata/partial_resources.golden index 9eade81274a44..2f5fd5f3c7cad 100644 --- a/helm/coder/tests/testdata/partial_resources.golden +++ b/helm/coder/tests/testdata/partial_resources.golden @@ -153,6 +153,8 @@ spec: value: 0.0.0.0:8080 - name: CODER_PROMETHEUS_ADDRESS value: 0.0.0.0:2112 + - name: CODER_PPROF_ADDRESS + value: 0.0.0.0:6060 - name: CODER_ACCESS_URL value: http://coder.default.svc.cluster.local - name: KUBE_POD_IP diff --git a/helm/coder/tests/testdata/partial_resources_coder.golden b/helm/coder/tests/testdata/partial_resources_coder.golden index 3edfa2a2fcbb3..14c47eab84c8e 100644 --- a/helm/coder/tests/testdata/partial_resources_coder.golden +++ b/helm/coder/tests/testdata/partial_resources_coder.golden @@ -153,6 +153,8 @@ spec: value: 0.0.0.0:8080 - name: CODER_PROMETHEUS_ADDRESS value: 0.0.0.0:2112 + - name: CODER_PPROF_ADDRESS + value: 0.0.0.0:6060 - name: CODER_ACCESS_URL value: http://coder.coder.svc.cluster.local - name: KUBE_POD_IP diff --git a/helm/coder/tests/testdata/pod_securitycontext.golden b/helm/coder/tests/testdata/pod_securitycontext.golden new file mode 100644 index 0000000000000..e0b02c62ed91c --- /dev/null +++ b/helm/coder/tests/testdata/pod_securitycontext.golden @@ -0,0 +1,210 @@ +--- +# Source: coder/templates/coder.yaml +apiVersion: v1 +kind: ServiceAccount +metadata: + annotations: {} + labels: + app.kubernetes.io/instance: release-name + app.kubernetes.io/managed-by: Helm + app.kubernetes.io/name: coder + app.kubernetes.io/part-of: coder + app.kubernetes.io/version: 0.1.0 + helm.sh/chart: coder-0.1.0 + name: coder + namespace: default +--- +# Source: coder/templates/rbac.yaml +apiVersion: rbac.authorization.k8s.io/v1 +kind: Role +metadata: + name: coder-workspace-perms + namespace: default +rules: + - apiGroups: [""] + resources: ["pods"] + verbs: + - create + - delete + - deletecollection + - get + - list + - patch + - update + - watch + - apiGroups: [""] + resources: ["persistentvolumeclaims"] + verbs: + - create + - delete + - deletecollection + - get + - list + - patch + - update + - watch + - apiGroups: + - apps + resources: + - deployments + verbs: + - create + - delete + - deletecollection + - get + - list + - patch + - update + - watch +--- +# Source: coder/templates/rbac.yaml +apiVersion: rbac.authorization.k8s.io/v1 +kind: RoleBinding +metadata: + name: "coder" + namespace: default +subjects: + - kind: ServiceAccount + name: "coder" +roleRef: + apiGroup: rbac.authorization.k8s.io + kind: Role + name: coder-workspace-perms +--- +# Source: coder/templates/service.yaml +apiVersion: v1 +kind: Service +metadata: + name: coder + namespace: default + labels: + helm.sh/chart: coder-0.1.0 + app.kubernetes.io/name: coder + app.kubernetes.io/instance: release-name + app.kubernetes.io/part-of: coder + app.kubernetes.io/version: "0.1.0" + app.kubernetes.io/managed-by: Helm + annotations: + {} +spec: + type: LoadBalancer + sessionAffinity: None + ports: + - name: "http" + port: 80 + targetPort: "http" + protocol: TCP + nodePort: + externalTrafficPolicy: "Cluster" + selector: + app.kubernetes.io/name: coder + app.kubernetes.io/instance: release-name +--- +# Source: coder/templates/coder.yaml +apiVersion: apps/v1 +kind: Deployment +metadata: + annotations: {} + labels: + app.kubernetes.io/instance: release-name + app.kubernetes.io/managed-by: Helm + app.kubernetes.io/name: coder + app.kubernetes.io/part-of: coder + app.kubernetes.io/version: 0.1.0 + helm.sh/chart: coder-0.1.0 + name: coder + namespace: default +spec: + replicas: 1 + selector: + matchLabels: + app.kubernetes.io/instance: release-name + app.kubernetes.io/name: coder + template: + metadata: + annotations: {} + labels: + app.kubernetes.io/instance: release-name + app.kubernetes.io/managed-by: Helm + app.kubernetes.io/name: coder + app.kubernetes.io/part-of: coder + app.kubernetes.io/version: 0.1.0 + helm.sh/chart: coder-0.1.0 + spec: + affinity: + podAntiAffinity: + preferredDuringSchedulingIgnoredDuringExecution: + - podAffinityTerm: + labelSelector: + matchExpressions: + - key: app.kubernetes.io/instance + operator: In + values: + - coder + topologyKey: kubernetes.io/hostname + weight: 1 + containers: + - args: + - server + command: + - /opt/coder + env: + - name: CODER_HTTP_ADDRESS + value: 0.0.0.0:8080 + - name: CODER_PROMETHEUS_ADDRESS + value: 0.0.0.0:2112 + - name: CODER_PPROF_ADDRESS + value: 0.0.0.0:6060 + - name: CODER_ACCESS_URL + value: http://coder.default.svc.cluster.local + - name: KUBE_POD_IP + valueFrom: + fieldRef: + fieldPath: status.podIP + - name: CODER_DERP_SERVER_RELAY_URL + value: http://$(KUBE_POD_IP):8080 + image: ghcr.io/coder/coder:latest + imagePullPolicy: IfNotPresent + lifecycle: {} + livenessProbe: + httpGet: + path: /healthz + port: http + scheme: HTTP + initialDelaySeconds: 0 + name: coder + ports: + - containerPort: 8080 + name: http + protocol: TCP + readinessProbe: + httpGet: + path: /healthz + port: http + scheme: HTTP + initialDelaySeconds: 0 + resources: + limits: + cpu: 2000m + memory: 4096Mi + requests: + cpu: 2000m + memory: 4096Mi + securityContext: + allowPrivilegeEscalation: false + readOnlyRootFilesystem: null + runAsGroup: 1000 + runAsNonRoot: true + runAsUser: 1000 + seccompProfile: + type: RuntimeDefault + volumeMounts: [] + restartPolicy: Always + securityContext: + fsgroup: 1000 + runAsGroup: 1000 + runAsNonRoot: true + runAsUser: 1000 + serviceAccountName: coder + terminationGracePeriodSeconds: 60 + volumes: [] diff --git a/helm/coder/tests/testdata/pod_securitycontext.yaml b/helm/coder/tests/testdata/pod_securitycontext.yaml new file mode 100644 index 0000000000000..ba0a2ba37f952 --- /dev/null +++ b/helm/coder/tests/testdata/pod_securitycontext.yaml @@ -0,0 +1,8 @@ +coder: + image: + tag: latest + podSecurityContext: + fsgroup: 1000 + runAsUser: 1000 + runAsGroup: 1000 + runAsNonRoot: true diff --git a/helm/coder/tests/testdata/pod_securitycontext_coder.golden b/helm/coder/tests/testdata/pod_securitycontext_coder.golden new file mode 100644 index 0000000000000..9133b085074f6 --- /dev/null +++ b/helm/coder/tests/testdata/pod_securitycontext_coder.golden @@ -0,0 +1,210 @@ +--- +# Source: coder/templates/coder.yaml +apiVersion: v1 +kind: ServiceAccount +metadata: + annotations: {} + labels: + app.kubernetes.io/instance: release-name + app.kubernetes.io/managed-by: Helm + app.kubernetes.io/name: coder + app.kubernetes.io/part-of: coder + app.kubernetes.io/version: 0.1.0 + helm.sh/chart: coder-0.1.0 + name: coder + namespace: coder +--- +# Source: coder/templates/rbac.yaml +apiVersion: rbac.authorization.k8s.io/v1 +kind: Role +metadata: + name: coder-workspace-perms + namespace: coder +rules: + - apiGroups: [""] + resources: ["pods"] + verbs: + - create + - delete + - deletecollection + - get + - list + - patch + - update + - watch + - apiGroups: [""] + resources: ["persistentvolumeclaims"] + verbs: + - create + - delete + - deletecollection + - get + - list + - patch + - update + - watch + - apiGroups: + - apps + resources: + - deployments + verbs: + - create + - delete + - deletecollection + - get + - list + - patch + - update + - watch +--- +# Source: coder/templates/rbac.yaml +apiVersion: rbac.authorization.k8s.io/v1 +kind: RoleBinding +metadata: + name: "coder" + namespace: coder +subjects: + - kind: ServiceAccount + name: "coder" +roleRef: + apiGroup: rbac.authorization.k8s.io + kind: Role + name: coder-workspace-perms +--- +# Source: coder/templates/service.yaml +apiVersion: v1 +kind: Service +metadata: + name: coder + namespace: coder + labels: + helm.sh/chart: coder-0.1.0 + app.kubernetes.io/name: coder + app.kubernetes.io/instance: release-name + app.kubernetes.io/part-of: coder + app.kubernetes.io/version: "0.1.0" + app.kubernetes.io/managed-by: Helm + annotations: + {} +spec: + type: LoadBalancer + sessionAffinity: None + ports: + - name: "http" + port: 80 + targetPort: "http" + protocol: TCP + nodePort: + externalTrafficPolicy: "Cluster" + selector: + app.kubernetes.io/name: coder + app.kubernetes.io/instance: release-name +--- +# Source: coder/templates/coder.yaml +apiVersion: apps/v1 +kind: Deployment +metadata: + annotations: {} + labels: + app.kubernetes.io/instance: release-name + app.kubernetes.io/managed-by: Helm + app.kubernetes.io/name: coder + app.kubernetes.io/part-of: coder + app.kubernetes.io/version: 0.1.0 + helm.sh/chart: coder-0.1.0 + name: coder + namespace: coder +spec: + replicas: 1 + selector: + matchLabels: + app.kubernetes.io/instance: release-name + app.kubernetes.io/name: coder + template: + metadata: + annotations: {} + labels: + app.kubernetes.io/instance: release-name + app.kubernetes.io/managed-by: Helm + app.kubernetes.io/name: coder + app.kubernetes.io/part-of: coder + app.kubernetes.io/version: 0.1.0 + helm.sh/chart: coder-0.1.0 + spec: + affinity: + podAntiAffinity: + preferredDuringSchedulingIgnoredDuringExecution: + - podAffinityTerm: + labelSelector: + matchExpressions: + - key: app.kubernetes.io/instance + operator: In + values: + - coder + topologyKey: kubernetes.io/hostname + weight: 1 + containers: + - args: + - server + command: + - /opt/coder + env: + - name: CODER_HTTP_ADDRESS + value: 0.0.0.0:8080 + - name: CODER_PROMETHEUS_ADDRESS + value: 0.0.0.0:2112 + - name: CODER_PPROF_ADDRESS + value: 0.0.0.0:6060 + - name: CODER_ACCESS_URL + value: http://coder.coder.svc.cluster.local + - name: KUBE_POD_IP + valueFrom: + fieldRef: + fieldPath: status.podIP + - name: CODER_DERP_SERVER_RELAY_URL + value: http://$(KUBE_POD_IP):8080 + image: ghcr.io/coder/coder:latest + imagePullPolicy: IfNotPresent + lifecycle: {} + livenessProbe: + httpGet: + path: /healthz + port: http + scheme: HTTP + initialDelaySeconds: 0 + name: coder + ports: + - containerPort: 8080 + name: http + protocol: TCP + readinessProbe: + httpGet: + path: /healthz + port: http + scheme: HTTP + initialDelaySeconds: 0 + resources: + limits: + cpu: 2000m + memory: 4096Mi + requests: + cpu: 2000m + memory: 4096Mi + securityContext: + allowPrivilegeEscalation: false + readOnlyRootFilesystem: null + runAsGroup: 1000 + runAsNonRoot: true + runAsUser: 1000 + seccompProfile: + type: RuntimeDefault + volumeMounts: [] + restartPolicy: Always + securityContext: + fsgroup: 1000 + runAsGroup: 1000 + runAsNonRoot: true + runAsUser: 1000 + serviceAccountName: coder + terminationGracePeriodSeconds: 60 + volumes: [] diff --git a/helm/coder/tests/testdata/prometheus.golden b/helm/coder/tests/testdata/prometheus.golden index 0caa782975e8f..2e6b185a6c326 100644 --- a/helm/coder/tests/testdata/prometheus.golden +++ b/helm/coder/tests/testdata/prometheus.golden @@ -152,6 +152,8 @@ spec: value: 0.0.0.0:8080 - name: CODER_PROMETHEUS_ADDRESS value: 0.0.0.0:2112 + - name: CODER_PPROF_ADDRESS + value: 0.0.0.0:6060 - name: CODER_ACCESS_URL value: http://coder.default.svc.cluster.local - name: KUBE_POD_IP diff --git a/helm/coder/tests/testdata/prometheus_coder.golden b/helm/coder/tests/testdata/prometheus_coder.golden index 6985f714612c1..e335d22523709 100644 --- a/helm/coder/tests/testdata/prometheus_coder.golden +++ b/helm/coder/tests/testdata/prometheus_coder.golden @@ -152,6 +152,8 @@ spec: value: 0.0.0.0:8080 - name: CODER_PROMETHEUS_ADDRESS value: 0.0.0.0:2112 + - name: CODER_PPROF_ADDRESS + value: 0.0.0.0:6060 - name: CODER_ACCESS_URL value: http://coder.coder.svc.cluster.local - name: KUBE_POD_IP diff --git a/helm/coder/tests/testdata/provisionerd_psk.golden b/helm/coder/tests/testdata/provisionerd_psk.golden index 8efac9058c2fc..72cfdd976b5e9 100644 --- a/helm/coder/tests/testdata/provisionerd_psk.golden +++ b/helm/coder/tests/testdata/provisionerd_psk.golden @@ -153,6 +153,8 @@ spec: value: 0.0.0.0:8080 - name: CODER_PROMETHEUS_ADDRESS value: 0.0.0.0:2112 + - name: CODER_PPROF_ADDRESS + value: 0.0.0.0:6060 - name: CODER_PROVISIONER_DAEMON_PSK valueFrom: secretKeyRef: diff --git a/helm/coder/tests/testdata/provisionerd_psk_coder.golden b/helm/coder/tests/testdata/provisionerd_psk_coder.golden index cb9908874c686..a34e294f992dc 100644 --- a/helm/coder/tests/testdata/provisionerd_psk_coder.golden +++ b/helm/coder/tests/testdata/provisionerd_psk_coder.golden @@ -153,6 +153,8 @@ spec: value: 0.0.0.0:8080 - name: CODER_PROMETHEUS_ADDRESS value: 0.0.0.0:2112 + - name: CODER_PPROF_ADDRESS + value: 0.0.0.0:6060 - name: CODER_PROVISIONER_DAEMON_PSK valueFrom: secretKeyRef: diff --git a/helm/coder/tests/testdata/sa.golden b/helm/coder/tests/testdata/sa.golden index f57293b211df6..ff423c318baa5 100644 --- a/helm/coder/tests/testdata/sa.golden +++ b/helm/coder/tests/testdata/sa.golden @@ -154,6 +154,8 @@ spec: value: 0.0.0.0:8080 - name: CODER_PROMETHEUS_ADDRESS value: 0.0.0.0:2112 + - name: CODER_PPROF_ADDRESS + value: 0.0.0.0:6060 - name: CODER_ACCESS_URL value: http://coder.default.svc.cluster.local - name: KUBE_POD_IP diff --git a/helm/coder/tests/testdata/sa_coder.golden b/helm/coder/tests/testdata/sa_coder.golden index ae3ce59e35a24..8725a6724e6a8 100644 --- a/helm/coder/tests/testdata/sa_coder.golden +++ b/helm/coder/tests/testdata/sa_coder.golden @@ -154,6 +154,8 @@ spec: value: 0.0.0.0:8080 - name: CODER_PROMETHEUS_ADDRESS value: 0.0.0.0:2112 + - name: CODER_PPROF_ADDRESS + value: 0.0.0.0:6060 - name: CODER_ACCESS_URL value: http://coder.coder.svc.cluster.local - name: KUBE_POD_IP diff --git a/helm/coder/tests/testdata/sa_disabled.golden b/helm/coder/tests/testdata/sa_disabled.golden index 387a05f79536f..122c297571a44 100644 --- a/helm/coder/tests/testdata/sa_disabled.golden +++ b/helm/coder/tests/testdata/sa_disabled.golden @@ -139,6 +139,8 @@ spec: value: 0.0.0.0:8080 - name: CODER_PROMETHEUS_ADDRESS value: 0.0.0.0:2112 + - name: CODER_PPROF_ADDRESS + value: 0.0.0.0:6060 - name: CODER_ACCESS_URL value: http://coder.default.svc.cluster.local - name: KUBE_POD_IP diff --git a/helm/coder/tests/testdata/sa_disabled_coder.golden b/helm/coder/tests/testdata/sa_disabled_coder.golden index 77f9b0fc58ae9..da091e00279a2 100644 --- a/helm/coder/tests/testdata/sa_disabled_coder.golden +++ b/helm/coder/tests/testdata/sa_disabled_coder.golden @@ -139,6 +139,8 @@ spec: value: 0.0.0.0:8080 - name: CODER_PROMETHEUS_ADDRESS value: 0.0.0.0:2112 + - name: CODER_PPROF_ADDRESS + value: 0.0.0.0:6060 - name: CODER_ACCESS_URL value: http://coder.coder.svc.cluster.local - name: KUBE_POD_IP diff --git a/helm/coder/tests/testdata/sa_extra_rules.golden b/helm/coder/tests/testdata/sa_extra_rules.golden index 8d74df5001d34..0a01a6411e33a 100644 --- a/helm/coder/tests/testdata/sa_extra_rules.golden +++ b/helm/coder/tests/testdata/sa_extra_rules.golden @@ -167,6 +167,8 @@ spec: value: 0.0.0.0:8080 - name: CODER_PROMETHEUS_ADDRESS value: 0.0.0.0:2112 + - name: CODER_PPROF_ADDRESS + value: 0.0.0.0:6060 - name: CODER_ACCESS_URL value: http://coder.default.svc.cluster.local - name: KUBE_POD_IP diff --git a/helm/coder/tests/testdata/sa_extra_rules_coder.golden b/helm/coder/tests/testdata/sa_extra_rules_coder.golden index 50849b76e89f2..91133dd9803bf 100644 --- a/helm/coder/tests/testdata/sa_extra_rules_coder.golden +++ b/helm/coder/tests/testdata/sa_extra_rules_coder.golden @@ -167,6 +167,8 @@ spec: value: 0.0.0.0:8080 - name: CODER_PROMETHEUS_ADDRESS value: 0.0.0.0:2112 + - name: CODER_PPROF_ADDRESS + value: 0.0.0.0:6060 - name: CODER_ACCESS_URL value: http://coder.coder.svc.cluster.local - name: KUBE_POD_IP diff --git a/helm/coder/tests/testdata/securitycontext.golden b/helm/coder/tests/testdata/securitycontext.golden index ee1a3e3a795fd..486447d93a4aa 100644 --- a/helm/coder/tests/testdata/securitycontext.golden +++ b/helm/coder/tests/testdata/securitycontext.golden @@ -153,6 +153,8 @@ spec: value: 0.0.0.0:8080 - name: CODER_PROMETHEUS_ADDRESS value: 0.0.0.0:2112 + - name: CODER_PPROF_ADDRESS + value: 0.0.0.0:6060 - name: CODER_ACCESS_URL value: http://coder.default.svc.cluster.local - name: KUBE_POD_IP diff --git a/helm/coder/tests/testdata/securitycontext_coder.golden b/helm/coder/tests/testdata/securitycontext_coder.golden index fd3d70482df5b..7d5b409b8eed3 100644 --- a/helm/coder/tests/testdata/securitycontext_coder.golden +++ b/helm/coder/tests/testdata/securitycontext_coder.golden @@ -153,6 +153,8 @@ spec: value: 0.0.0.0:8080 - name: CODER_PROMETHEUS_ADDRESS value: 0.0.0.0:2112 + - name: CODER_PPROF_ADDRESS + value: 0.0.0.0:6060 - name: CODER_ACCESS_URL value: http://coder.coder.svc.cluster.local - name: KUBE_POD_IP diff --git a/helm/coder/tests/testdata/svc_loadbalancer.golden b/helm/coder/tests/testdata/svc_loadbalancer.golden index dd55f8d530087..71310077bb6c0 100644 --- a/helm/coder/tests/testdata/svc_loadbalancer.golden +++ b/helm/coder/tests/testdata/svc_loadbalancer.golden @@ -153,6 +153,8 @@ spec: value: 0.0.0.0:8080 - name: CODER_PROMETHEUS_ADDRESS value: 0.0.0.0:2112 + - name: CODER_PPROF_ADDRESS + value: 0.0.0.0:6060 - name: CODER_ACCESS_URL value: http://coder.default.svc.cluster.local - name: KUBE_POD_IP diff --git a/helm/coder/tests/testdata/svc_loadbalancer_class.golden b/helm/coder/tests/testdata/svc_loadbalancer_class.golden index 92969226da94b..548c360f1c089 100644 --- a/helm/coder/tests/testdata/svc_loadbalancer_class.golden +++ b/helm/coder/tests/testdata/svc_loadbalancer_class.golden @@ -154,6 +154,8 @@ spec: value: 0.0.0.0:8080 - name: CODER_PROMETHEUS_ADDRESS value: 0.0.0.0:2112 + - name: CODER_PPROF_ADDRESS + value: 0.0.0.0:6060 - name: CODER_ACCESS_URL value: http://coder.default.svc.cluster.local - name: KUBE_POD_IP diff --git a/helm/coder/tests/testdata/svc_loadbalancer_class_coder.golden b/helm/coder/tests/testdata/svc_loadbalancer_class_coder.golden index aa8a19a234b3d..aad0731549777 100644 --- a/helm/coder/tests/testdata/svc_loadbalancer_class_coder.golden +++ b/helm/coder/tests/testdata/svc_loadbalancer_class_coder.golden @@ -154,6 +154,8 @@ spec: value: 0.0.0.0:8080 - name: CODER_PROMETHEUS_ADDRESS value: 0.0.0.0:2112 + - name: CODER_PPROF_ADDRESS + value: 0.0.0.0:6060 - name: CODER_ACCESS_URL value: http://coder.coder.svc.cluster.local - name: KUBE_POD_IP diff --git a/helm/coder/tests/testdata/svc_loadbalancer_coder.golden b/helm/coder/tests/testdata/svc_loadbalancer_coder.golden index a7d389fb048df..667f4f84cd7f8 100644 --- a/helm/coder/tests/testdata/svc_loadbalancer_coder.golden +++ b/helm/coder/tests/testdata/svc_loadbalancer_coder.golden @@ -153,6 +153,8 @@ spec: value: 0.0.0.0:8080 - name: CODER_PROMETHEUS_ADDRESS value: 0.0.0.0:2112 + - name: CODER_PPROF_ADDRESS + value: 0.0.0.0:6060 - name: CODER_ACCESS_URL value: http://coder.coder.svc.cluster.local - name: KUBE_POD_IP diff --git a/helm/coder/tests/testdata/svc_nodeport.golden b/helm/coder/tests/testdata/svc_nodeport.golden index 9a271628728f7..d2f1c5c9767ef 100644 --- a/helm/coder/tests/testdata/svc_nodeport.golden +++ b/helm/coder/tests/testdata/svc_nodeport.golden @@ -152,6 +152,8 @@ spec: value: 0.0.0.0:8080 - name: CODER_PROMETHEUS_ADDRESS value: 0.0.0.0:2112 + - name: CODER_PPROF_ADDRESS + value: 0.0.0.0:6060 - name: CODER_ACCESS_URL value: http://coder.default.svc.cluster.local - name: KUBE_POD_IP diff --git a/helm/coder/tests/testdata/svc_nodeport_coder.golden b/helm/coder/tests/testdata/svc_nodeport_coder.golden index 0a8805f84ba8b..5d258cfb10d8c 100644 --- a/helm/coder/tests/testdata/svc_nodeport_coder.golden +++ b/helm/coder/tests/testdata/svc_nodeport_coder.golden @@ -152,6 +152,8 @@ spec: value: 0.0.0.0:8080 - name: CODER_PROMETHEUS_ADDRESS value: 0.0.0.0:2112 + - name: CODER_PPROF_ADDRESS + value: 0.0.0.0:6060 - name: CODER_ACCESS_URL value: http://coder.coder.svc.cluster.local - name: KUBE_POD_IP diff --git a/helm/coder/tests/testdata/tls.golden b/helm/coder/tests/testdata/tls.golden index 1cd0fb75bc6c6..66e1dd69915df 100644 --- a/helm/coder/tests/testdata/tls.golden +++ b/helm/coder/tests/testdata/tls.golden @@ -158,6 +158,8 @@ spec: value: 0.0.0.0:8080 - name: CODER_PROMETHEUS_ADDRESS value: 0.0.0.0:2112 + - name: CODER_PPROF_ADDRESS + value: 0.0.0.0:6060 - name: CODER_ACCESS_URL value: https://coder.default.svc.cluster.local - name: KUBE_POD_IP diff --git a/helm/coder/tests/testdata/tls_coder.golden b/helm/coder/tests/testdata/tls_coder.golden index 95bec4a8c510e..ddad245300a6f 100644 --- a/helm/coder/tests/testdata/tls_coder.golden +++ b/helm/coder/tests/testdata/tls_coder.golden @@ -158,6 +158,8 @@ spec: value: 0.0.0.0:8080 - name: CODER_PROMETHEUS_ADDRESS value: 0.0.0.0:2112 + - name: CODER_PPROF_ADDRESS + value: 0.0.0.0:6060 - name: CODER_ACCESS_URL value: https://coder.coder.svc.cluster.local - name: KUBE_POD_IP diff --git a/helm/coder/tests/testdata/topology.golden b/helm/coder/tests/testdata/topology.golden index 4d8af24ce3c7f..2a061efaf2b8d 100644 --- a/helm/coder/tests/testdata/topology.golden +++ b/helm/coder/tests/testdata/topology.golden @@ -153,6 +153,8 @@ spec: value: 0.0.0.0:8080 - name: CODER_PROMETHEUS_ADDRESS value: 0.0.0.0:2112 + - name: CODER_PPROF_ADDRESS + value: 0.0.0.0:6060 - name: CODER_ACCESS_URL value: http://coder.default.svc.cluster.local - name: KUBE_POD_IP diff --git a/helm/coder/tests/testdata/topology_coder.golden b/helm/coder/tests/testdata/topology_coder.golden index 3b81214417262..0256522c4dcc7 100644 --- a/helm/coder/tests/testdata/topology_coder.golden +++ b/helm/coder/tests/testdata/topology_coder.golden @@ -153,6 +153,8 @@ spec: value: 0.0.0.0:8080 - name: CODER_PROMETHEUS_ADDRESS value: 0.0.0.0:2112 + - name: CODER_PPROF_ADDRESS + value: 0.0.0.0:6060 - name: CODER_ACCESS_URL value: http://coder.coder.svc.cluster.local - name: KUBE_POD_IP diff --git a/helm/coder/tests/testdata/workspace_proxy.golden b/helm/coder/tests/testdata/workspace_proxy.golden index d096bfe94feea..3a7386af35d25 100644 --- a/helm/coder/tests/testdata/workspace_proxy.golden +++ b/helm/coder/tests/testdata/workspace_proxy.golden @@ -154,6 +154,8 @@ spec: value: 0.0.0.0:8080 - name: CODER_PROMETHEUS_ADDRESS value: 0.0.0.0:2112 + - name: CODER_PPROF_ADDRESS + value: 0.0.0.0:6060 - name: CODER_ACCESS_URL value: http://coder.default.svc.cluster.local - name: KUBE_POD_IP diff --git a/helm/coder/tests/testdata/workspace_proxy_coder.golden b/helm/coder/tests/testdata/workspace_proxy_coder.golden index 2ed59d5591261..3cafe9855474e 100644 --- a/helm/coder/tests/testdata/workspace_proxy_coder.golden +++ b/helm/coder/tests/testdata/workspace_proxy_coder.golden @@ -154,6 +154,8 @@ spec: value: 0.0.0.0:8080 - name: CODER_PROMETHEUS_ADDRESS value: 0.0.0.0:2112 + - name: CODER_PPROF_ADDRESS + value: 0.0.0.0:6060 - name: CODER_ACCESS_URL value: http://coder.coder.svc.cluster.local - name: KUBE_POD_IP diff --git a/helm/coder/values.yaml b/helm/coder/values.yaml index fa6cb2c3622f8..72708c88495ad 100644 --- a/helm/coder/values.yaml +++ b/helm/coder/values.yaml @@ -12,6 +12,8 @@ coder: # - CODER_TLS_KEY_FILE: set if tls.secretName is not empty. # - CODER_PROMETHEUS_ADDRESS: set to 0.0.0.0:2112 and cannot be changed. # Prometheus must still be enabled by setting CODER_PROMETHEUS_ENABLE. + # - CODER_PPROF_ADDRESS: set to 0.0.0.0:6060 and cannot be changed. + # Profiling must still be enabled by setting CODER_PPROF_ENABLE. # - KUBE_POD_IP # - CODER_DERP_SERVER_RELAY_URL # @@ -142,6 +144,38 @@ coder: # root. It is recommended to leave this setting disabled in production. allowPrivilegeEscalation: false + # coder.podSecurityContext -- Pod-level security context settings that apply + # to all containers in the pod. This is useful for setting volume ownership + # (fsGroup) when mounting secrets like TLS certificates. These settings are + # applied at the pod level, while coder.securityContext applies at the + # container level. Container-level settings take precedence over pod-level + # settings for overlapping fields. This is opt-in and not set by default. + # Common use case: Set fsGroup to ensure mounted secret volumes have correct + # group ownership for the coder user to read certificate files. + podSecurityContext: {} + # Example configuration for certificate mounting: + # podSecurityContext: + # # Sets group ownership of mounted volumes (e.g., for certificate secrets) + # fsGroup: 1000 + # # Additional pod-level security settings (optional) + # runAsUser: 1000 + # runAsGroup: 1000 + # runAsNonRoot: true + # supplementalGroups: [4000] + # seccompProfile: + # type: RuntimeDefault + # # Note: Avoid conflicts with container-level securityContext settings + # # Container-level settings take precedence over pod-level settings + # + # IMPORTANT: OpenShift Compatibility + # On OpenShift, Security Context Constraints (SCCs) may restrict or override + # these values. If you encounter pod creation failures: + # 1. Check your namespace's assigned SCC with: oc describe scc + # 2. Ensure runAsUser/fsGroup values are within allowed UID/GID ranges + # 3. Consider using 'anyuid' SCC for more flexibility, or + # 4. Omit runAsUser/runAsGroup and only set fsGroup for volume ownership + # 5. OpenShift may automatically assign compatible values if left unset + # coder.volumes -- A list of extra volumes to add to the Coder pod. volumes: [] # - name: "my-volume" @@ -159,6 +193,10 @@ coder: # Helm deployment and should be of type "kubernetes.io/tls". The secrets # will be automatically mounted into the pod if specified, and the correct # "CODER_TLS_*" environment variables will be set for you. + + # Note: If you encounter permission issues reading mounted certificates, + # consider setting coder.podSecurityContext.fsGroup to match your container + # user (typically 1000) to ensure proper file ownership. secretNames: [] # coder.replicaCount -- The number of Kubernetes deployment replicas. This diff --git a/helm/libcoder/templates/_coder.yaml b/helm/libcoder/templates/_coder.yaml index b836bdf1df77f..6001df90d6580 100644 --- a/helm/libcoder/templates/_coder.yaml +++ b/helm/libcoder/templates/_coder.yaml @@ -26,6 +26,10 @@ spec: {{- toYaml .Values.coder.podAnnotations | nindent 8 }} spec: serviceAccountName: {{ .Values.coder.serviceAccount.name | quote }} + {{- with .Values.coder.podSecurityContext }} + securityContext: + {{- toYaml . | nindent 8 }} + {{- end }} restartPolicy: Always {{- with .Values.coder.image.pullSecrets }} imagePullSecrets: diff --git a/package.json b/package.json index ee5cba7ecf538..f8ab3fa89170b 100644 --- a/package.json +++ b/package.json @@ -2,13 +2,14 @@ "_comment": "This version doesn't matter, it's just to allow importing from other repos.", "name": "coder", "version": "0.0.0", - "packageManager": "pnpm@9.14.4", + "packageManager": "pnpm@10.14.0+sha512.ad27a79641b49c3e481a16a805baa71817a04bbe06a38d17e60e2eaee83f6a146c6a688125f5792e48dd5ba30e7da52a5cda4c3992b9ccf333f9ce223af84748", "scripts": { "format-docs": "markdown-table-formatter $(find docs -name '*.md') *.md", "lint-docs": "markdownlint-cli2 --fix $(find docs -name '*.md') *.md", "storybook": "pnpm run -C site/ storybook" }, "devDependencies": { + "@biomejs/biome": "2.2.0", "markdown-table-formatter": "^1.6.1", "markdownlint-cli2": "^0.16.0", "quicktype": "^23.0.0" diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index c136ad0acdcbf..4e6996283b064 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -8,6 +8,9 @@ importers: .: devDependencies: + '@biomejs/biome': + specifier: 2.2.0 + version: 2.2.0 markdown-table-formatter: specifier: ^1.6.1 version: 1.6.1 @@ -20,6 +23,59 @@ importers: packages: + '@biomejs/biome@2.2.0': + resolution: {integrity: sha512-3On3RSYLsX+n9KnoSgfoYlckYBoU6VRM22cw1gB4Y0OuUVSYd/O/2saOJMrA4HFfA1Ff0eacOvMN1yAAvHtzIw==} + engines: {node: '>=14.21.3'} + hasBin: true + + '@biomejs/cli-darwin-arm64@2.2.0': + resolution: {integrity: sha512-zKbwUUh+9uFmWfS8IFxmVD6XwqFcENjZvEyfOxHs1epjdH3wyyMQG80FGDsmauPwS2r5kXdEM0v/+dTIA9FXAg==} + engines: {node: '>=14.21.3'} + cpu: [arm64] + os: [darwin] + + '@biomejs/cli-darwin-x64@2.2.0': + resolution: {integrity: sha512-+OmT4dsX2eTfhD5crUOPw3RPhaR+SKVspvGVmSdZ9y9O/AgL8pla6T4hOn1q+VAFBHuHhsdxDRJgFCSC7RaMOw==} + engines: {node: '>=14.21.3'} + cpu: [x64] + os: [darwin] + + '@biomejs/cli-linux-arm64-musl@2.2.0': + resolution: {integrity: sha512-egKpOa+4FL9YO+SMUMLUvf543cprjevNc3CAgDNFLcjknuNMcZ0GLJYa3EGTCR2xIkIUJDVneBV3O9OcIlCEZQ==} + engines: {node: '>=14.21.3'} + cpu: [arm64] + os: [linux] + + '@biomejs/cli-linux-arm64@2.2.0': + resolution: {integrity: sha512-6eoRdF2yW5FnW9Lpeivh7Mayhq0KDdaDMYOJnH9aT02KuSIX5V1HmWJCQQPwIQbhDh68Zrcpl8inRlTEan0SXw==} + engines: {node: '>=14.21.3'} + cpu: [arm64] + os: [linux] + + '@biomejs/cli-linux-x64-musl@2.2.0': + resolution: {integrity: sha512-I5J85yWwUWpgJyC1CcytNSGusu2p9HjDnOPAFG4Y515hwRD0jpR9sT9/T1cKHtuCvEQ/sBvx+6zhz9l9wEJGAg==} + engines: {node: '>=14.21.3'} + cpu: [x64] + os: [linux] + + '@biomejs/cli-linux-x64@2.2.0': + resolution: {integrity: sha512-5UmQx/OZAfJfi25zAnAGHUMuOd+LOsliIt119x2soA2gLggQYrVPA+2kMUxR6Mw5M1deUF/AWWP2qpxgH7Nyfw==} + engines: {node: '>=14.21.3'} + cpu: [x64] + os: [linux] + + '@biomejs/cli-win32-arm64@2.2.0': + resolution: {integrity: sha512-n9a1/f2CwIDmNMNkFs+JI0ZjFnMO0jdOyGNtihgUNFnlmd84yIYY2KMTBmMV58ZlVHjgmY5Y6E1hVTnSRieggA==} + engines: {node: '>=14.21.3'} + cpu: [arm64] + os: [win32] + + '@biomejs/cli-win32-x64@2.2.0': + resolution: {integrity: sha512-Nawu5nHjP/zPKTIryh2AavzTc/KEg4um/MxWdXW0A6P/RZOyIpa7+QSjeXwAwX/utJGaCoXRPWtF3m5U/bB3Ww==} + engines: {node: '>=14.21.3'} + cpu: [x64] + os: [win32] + '@cspotcode/source-map-support@0.8.1': resolution: {integrity: sha512-IchNf6dN4tHoMFIn/7OE8LWZ19Y6q/67Bmf6vnGREv8RSbBVb9LPJxEcnwrcwX6ixSvaiGoomAUvu4YSxXrVgw==} engines: {node: '>=12'} @@ -722,6 +778,41 @@ packages: snapshots: + '@biomejs/biome@2.2.0': + optionalDependencies: + '@biomejs/cli-darwin-arm64': 2.2.0 + '@biomejs/cli-darwin-x64': 2.2.0 + '@biomejs/cli-linux-arm64': 2.2.0 + '@biomejs/cli-linux-arm64-musl': 2.2.0 + '@biomejs/cli-linux-x64': 2.2.0 + '@biomejs/cli-linux-x64-musl': 2.2.0 + '@biomejs/cli-win32-arm64': 2.2.0 + '@biomejs/cli-win32-x64': 2.2.0 + + '@biomejs/cli-darwin-arm64@2.2.0': + optional: true + + '@biomejs/cli-darwin-x64@2.2.0': + optional: true + + '@biomejs/cli-linux-arm64-musl@2.2.0': + optional: true + + '@biomejs/cli-linux-arm64@2.2.0': + optional: true + + '@biomejs/cli-linux-x64-musl@2.2.0': + optional: true + + '@biomejs/cli-linux-x64@2.2.0': + optional: true + + '@biomejs/cli-win32-arm64@2.2.0': + optional: true + + '@biomejs/cli-win32-x64@2.2.0': + optional: true + '@cspotcode/source-map-support@0.8.1': dependencies: '@jridgewell/trace-mapping': 0.3.9 diff --git a/provisioner/echo/serve.go b/provisioner/echo/serve.go index 031af97317aca..4bb2a1dd6b78b 100644 --- a/provisioner/echo/serve.go +++ b/provisioner/echo/serve.go @@ -8,6 +8,7 @@ import ( "os" "path/filepath" "strings" + "text/template" "github.com/google/uuid" "golang.org/x/xerrors" @@ -377,6 +378,45 @@ func TarWithOptions(ctx context.Context, logger slog.Logger, responses *Response logger.Debug(context.Background(), "extra file written", slog.F("name", name), slog.F("bytes_written", n)) } + + // Write a main.tf with the appropriate parameters. This is to write terraform + // that matches the parameters defined in the responses. Dynamic parameters + // parsed these, even in the echo provisioner. + var mainTF bytes.Buffer + for _, respPlan := range responses.ProvisionPlan { + plan := respPlan.GetPlan() + if plan == nil { + continue + } + + for _, param := range plan.Parameters { + paramTF, err := ParameterTerraform(param) + if err != nil { + return nil, xerrors.Errorf("parameter terraform: %w", err) + } + _, _ = mainTF.WriteString(paramTF) + } + } + + if mainTF.Len() > 0 { + mainTFData := ` +terraform { + required_providers { + coder = { + source = "coder/coder" + } + } +} +` + mainTF.String() + + _ = writer.WriteHeader(&tar.Header{ + Name: `main.tf`, + Size: int64(len(mainTFData)), + Mode: 0o644, + }) + _, _ = writer.Write([]byte(mainTFData)) + } + // `writer.Close()` function flushes the writer buffer, and adds extra padding to create a legal tarball. err := writer.Close() if err != nil { @@ -385,6 +425,69 @@ func TarWithOptions(ctx context.Context, logger slog.Logger, responses *Response return buffer.Bytes(), nil } +// ParameterTerraform will create a Terraform data block for the provided parameter. +func ParameterTerraform(param *proto.RichParameter) (string, error) { + tmpl := template.Must(template.New("parameter").Funcs(map[string]any{ + "showValidation": func(v *proto.RichParameter) bool { + return v != nil && (v.ValidationMax != nil || v.ValidationMin != nil || + v.ValidationError != "" || v.ValidationRegex != "" || + v.ValidationMonotonic != "") + }, + "formType": func(v *proto.RichParameter) string { + s, _ := proto.ProviderFormType(v.FormType) + return string(s) + }, + }).Parse(` +data "coder_parameter" "{{ .Name }}" { + name = "{{ .Name }}" + display_name = "{{ .DisplayName }}" + description = "{{ .Description }}" + icon = "{{ .Icon }}" + mutable = {{ .Mutable }} + ephemeral = {{ .Ephemeral }} + order = {{ .Order }} +{{- if .DefaultValue }} + default = {{ .DefaultValue }} +{{- end }} +{{- if .Type }} + type = "{{ .Type }}" +{{- end }} +{{- if .FormType }} + form_type = "{{ formType . }}" +{{- end }} +{{- range .Options }} + option { + name = "{{ .Name }}" + value = "{{ .Value }}" + } +{{- end }} +{{- if showValidation .}} + validation { + {{- if .ValidationRegex }} + regex = "{{ .ValidationRegex }}" + {{- end }} + {{- if .ValidationError }} + error = "{{ .ValidationError }}" + {{- end }} + {{- if .ValidationMin }} + min = {{ .ValidationMin }} + {{- end }} + {{- if .ValidationMax }} + max = {{ .ValidationMax }} + {{- end }} + {{- if .ValidationMonotonic }} + monotonic = "{{ .ValidationMonotonic }}" + {{- end }} + } +{{- end }} +} +`)) + + var buf bytes.Buffer + err := tmpl.Execute(&buf, param) + return buf.String(), err +} + func WithResources(resources []*proto.Resource) *Responses { return &Responses{ Parse: ParseComplete, diff --git a/provisioner/terraform/executor.go b/provisioner/terraform/executor.go index ea63f8c59877e..8940a1708bf19 100644 --- a/provisioner/terraform/executor.go +++ b/provisioner/terraform/executor.go @@ -363,6 +363,7 @@ func (e *executor) plan(ctx, killCtx context.Context, env, vars []string, logr l ModuleFiles: moduleFiles, HasAiTasks: state.HasAITasks, AiTasks: state.AITasks, + HasExternalAgents: state.HasExternalAgents, } return msg, nil diff --git a/provisioner/terraform/provision_test.go b/provisioner/terraform/provision_test.go index d067965997308..90a34e6d03a8c 100644 --- a/provisioner/terraform/provision_test.go +++ b/provisioner/terraform/provision_test.go @@ -1135,6 +1135,31 @@ func TestProvision(t *testing.T) { HasAiTasks: true, }, }, + { + Name: "external-agent", + Files: map[string]string{ + "main.tf": `terraform { + required_providers { + coder = { + source = "coder/coder" + version = ">= 2.7.0" + } + } + } + resource "coder_external_agent" "example" { + agent_id = "123" + } + `, + }, + Response: &proto.PlanComplete{ + Resources: []*proto.Resource{{ + Name: "example", + Type: "coder_external_agent", + }}, + HasExternalAgents: true, + }, + SkipCacheProviders: true, + }, } // Remove unused cache dirs before running tests. @@ -1237,6 +1262,7 @@ func TestProvision(t *testing.T) { require.Equal(t, string(modulesWant), string(modulesGot)) require.Equal(t, planComplete.HasAiTasks, testCase.Response.HasAiTasks) + require.Equal(t, planComplete.HasExternalAgents, testCase.Response.HasExternalAgents) } if testCase.Apply { diff --git a/provisioner/terraform/resources.go b/provisioner/terraform/resources.go index 84174c90b435d..3dcead074c22a 100644 --- a/provisioner/terraform/resources.go +++ b/provisioner/terraform/resources.go @@ -165,6 +165,7 @@ type State struct { ExternalAuthProviders []*proto.ExternalAuthProviderResource AITasks []*proto.AITask HasAITasks bool + HasExternalAgents bool } var ErrInvalidTerraformAddr = xerrors.New("invalid terraform address") @@ -188,6 +189,20 @@ func hasAITaskResources(graph *gographviz.Graph) bool { return false } +func hasExternalAgentResources(graph *gographviz.Graph) bool { + for _, node := range graph.Nodes.Lookup { + if label, exists := node.Attrs["label"]; exists { + labelValue := strings.Trim(label, `"`) + // The first condition is for the case where the resource is in the root module. + // The second condition is for the case where the resource is in a child module. + if strings.HasPrefix(labelValue, "coder_external_agent.") || strings.Contains(labelValue, ".coder_external_agent.") { + return true + } + } + } + return false +} + // ConvertState consumes Terraform state and a GraphViz representation // produced by `terraform graph` to produce resources consumable by Coder. // nolint:gocognit // This function makes more sense being large for now, until refactored. @@ -965,7 +980,9 @@ func ConvertState(ctx context.Context, modules []*tfjson.StateModule, rawGraph s ExpirationPolicy: expirationPolicy, Scheduling: scheduling, }, - Default: preset.Default, + Default: preset.Default, + Description: preset.Description, + Icon: preset.Icon, } if slice.Contains(duplicatedPresetNames, preset.Name) { @@ -1063,6 +1080,7 @@ func ConvertState(ctx context.Context, modules []*tfjson.StateModule, rawGraph s ExternalAuthProviders: externalAuthProviders, HasAITasks: hasAITasks, AITasks: aiTasks, + HasExternalAgents: hasExternalAgentResources(graph), }, nil } @@ -1250,7 +1268,8 @@ func findResourcesInGraph(graph *gographviz.Graph, tfResourcesByLabel map[string continue } // Don't associate Coder resources with other Coder resources! - if strings.HasPrefix(resource.Type, "coder_") { + // Except for coder_external_agent, which is a special case. + if strings.HasPrefix(resource.Type, "coder_") && resource.Type != "coder_external_agent" { continue } graphResources = append(graphResources, &graphResource{ diff --git a/provisioner/terraform/resources_test.go b/provisioner/terraform/resources_test.go index 1575c6c9c159e..715055c00cad9 100644 --- a/provisioner/terraform/resources_test.go +++ b/provisioner/terraform/resources_test.go @@ -1573,6 +1573,35 @@ func TestAITasks(t *testing.T) { }) } +func TestExternalAgents(t *testing.T) { + t.Parallel() + ctx, logger := ctxAndLogger(t) + + t.Run("External agents can be defined", func(t *testing.T) { + t.Parallel() + + // nolint:dogsled + _, filename, _, _ := runtime.Caller(0) + + dir := filepath.Join(filepath.Dir(filename), "testdata", "resources", "external-agents") + tfPlanRaw, err := os.ReadFile(filepath.Join(dir, "external-agents.tfplan.json")) + require.NoError(t, err) + var tfPlan tfjson.Plan + err = json.Unmarshal(tfPlanRaw, &tfPlan) + require.NoError(t, err) + tfPlanGraph, err := os.ReadFile(filepath.Join(dir, "external-agents.tfplan.dot")) + require.NoError(t, err) + + state, err := terraform.ConvertState(ctx, []*tfjson.StateModule{tfPlan.PlannedValues.RootModule, tfPlan.PriorState.Values.RootModule}, string(tfPlanGraph), logger) + require.NotNil(t, state) + require.NoError(t, err) + require.True(t, state.HasExternalAgents) + require.Len(t, state.Resources, 1) + require.Len(t, state.Resources[0].Agents, 1) + require.Equal(t, "dev1", state.Resources[0].Agents[0].Name) + }) +} + // sortResource ensures resources appear in a consistent ordering // to prevent tests from flaking. func sortResources(resources []*proto.Resource) { diff --git a/provisioner/terraform/testdata/resources/external-agents/external-agents.tfplan.dot b/provisioner/terraform/testdata/resources/external-agents/external-agents.tfplan.dot new file mode 100644 index 0000000000000..d2db86a89e488 --- /dev/null +++ b/provisioner/terraform/testdata/resources/external-agents/external-agents.tfplan.dot @@ -0,0 +1,22 @@ +digraph { + compound = "true" + newrank = "true" + subgraph "root" { + "[root] coder_agent.dev1 (expand)" [label = "coder_agent.dev1", shape = "box"] + "[root] coder_external_agent.dev1 (expand)" [label = "coder_external_agent.dev1", shape = "box"] + "[root] data.coder_provisioner.me (expand)" [label = "data.coder_provisioner.me", shape = "box"] + "[root] data.coder_workspace.me (expand)" [label = "data.coder_workspace.me", shape = "box"] + "[root] data.coder_workspace_owner.me (expand)" [label = "data.coder_workspace_owner.me", shape = "box"] + "[root] provider[\"registry.terraform.io/coder/coder\"]" [label = "provider[\"registry.terraform.io/coder/coder\"]", shape = "diamond"] + "[root] coder_agent.dev1 (expand)" -> "[root] provider[\"registry.terraform.io/coder/coder\"]" + "[root] coder_external_agent.dev1 (expand)" -> "[root] coder_agent.dev1 (expand)" + "[root] data.coder_provisioner.me (expand)" -> "[root] provider[\"registry.terraform.io/coder/coder\"]" + "[root] data.coder_workspace.me (expand)" -> "[root] provider[\"registry.terraform.io/coder/coder\"]" + "[root] data.coder_workspace_owner.me (expand)" -> "[root] provider[\"registry.terraform.io/coder/coder\"]" + "[root] provider[\"registry.terraform.io/coder/coder\"] (close)" -> "[root] coder_external_agent.dev1 (expand)" + "[root] provider[\"registry.terraform.io/coder/coder\"] (close)" -> "[root] data.coder_provisioner.me (expand)" + "[root] provider[\"registry.terraform.io/coder/coder\"] (close)" -> "[root] data.coder_workspace.me (expand)" + "[root] provider[\"registry.terraform.io/coder/coder\"] (close)" -> "[root] data.coder_workspace_owner.me (expand)" + "[root] root" -> "[root] provider[\"registry.terraform.io/coder/coder\"] (close)" + } +} diff --git a/provisioner/terraform/testdata/resources/external-agents/external-agents.tfplan.json b/provisioner/terraform/testdata/resources/external-agents/external-agents.tfplan.json new file mode 100644 index 0000000000000..3d085a535b2bf --- /dev/null +++ b/provisioner/terraform/testdata/resources/external-agents/external-agents.tfplan.json @@ -0,0 +1,277 @@ +{ + "format_version": "1.2", + "terraform_version": "1.12.2", + "planned_values": { + "root_module": { + "resources": [ + { + "address": "coder_agent.dev1", + "mode": "managed", + "type": "coder_agent", + "name": "dev1", + "provider_name": "registry.terraform.io/coder/coder", + "schema_version": 1, + "values": { + "api_key_scope": "all", + "arch": "amd64", + "auth": "token", + "connection_timeout": 120, + "dir": null, + "env": null, + "metadata": [], + "motd_file": null, + "order": null, + "os": "linux", + "resources_monitoring": [], + "shutdown_script": null, + "startup_script": null, + "startup_script_behavior": "non-blocking", + "troubleshooting_url": null + }, + "sensitive_values": { + "display_apps": [], + "metadata": [], + "resources_monitoring": [], + "token": true + } + }, + { + "address": "coder_external_agent.dev1", + "mode": "managed", + "type": "coder_external_agent", + "name": "dev1", + "provider_name": "registry.terraform.io/coder/coder", + "schema_version": 1, + "sensitive_values": { + "agent_id": true + } + } + ] + } + }, + "resource_changes": [ + { + "address": "coder_agent.dev1", + "mode": "managed", + "type": "coder_agent", + "name": "dev1", + "provider_name": "registry.terraform.io/coder/coder", + "change": { + "actions": [ + "create" + ], + "before": null, + "after": { + "api_key_scope": "all", + "arch": "amd64", + "auth": "token", + "connection_timeout": 120, + "dir": null, + "env": null, + "metadata": [], + "motd_file": null, + "order": null, + "os": "linux", + "resources_monitoring": [], + "shutdown_script": null, + "startup_script": null, + "startup_script_behavior": "non-blocking", + "troubleshooting_url": null + }, + "after_unknown": { + "display_apps": true, + "id": true, + "init_script": true, + "metadata": [], + "resources_monitoring": [], + "token": true + }, + "before_sensitive": false, + "after_sensitive": { + "display_apps": [], + "metadata": [], + "resources_monitoring": [], + "token": true + } + } + }, + { + "address": "coder_external_agent.dev1", + "mode": "managed", + "type": "coder_external_agent", + "name": "dev1", + "provider_name": "registry.terraform.io/coder/coder", + "change": { + "actions": [ + "create" + ], + "before": null, + "after": {}, + "after_unknown": { + "agent_id": true, + "id": true + }, + "before_sensitive": false, + "after_sensitive": { + "agent_id": true + } + } + } + ], + "prior_state": { + "format_version": "1.0", + "terraform_version": "1.12.2", + "values": { + "root_module": { + "resources": [ + { + "address": "data.coder_provisioner.me", + "mode": "data", + "type": "coder_provisioner", + "name": "me", + "provider_name": "registry.terraform.io/coder/coder", + "schema_version": 1, + "values": { + "arch": "amd64", + "id": "d607be41-7697-475f-8257-2f6e24adbede", + "os": "linux" + }, + "sensitive_values": {} + }, + { + "address": "data.coder_workspace.me", + "mode": "data", + "type": "coder_workspace", + "name": "me", + "provider_name": "registry.terraform.io/coder/coder", + "schema_version": 1, + "values": { + "access_port": 443, + "access_url": "https://dev.coder.com/", + "id": "0b7fc772-5e27-4096-b8a3-9e6a8b914ebe", + "is_prebuild": false, + "is_prebuild_claim": false, + "name": "kacper", + "prebuild_count": 0, + "start_count": 1, + "template_id": "", + "template_name": "", + "template_version": "", + "transition": "start" + }, + "sensitive_values": {} + }, + { + "address": "data.coder_workspace_owner.me", + "mode": "data", + "type": "coder_workspace_owner", + "name": "me", + "provider_name": "registry.terraform.io/coder/coder", + "schema_version": 0, + "values": { + "email": "default@example.com", + "full_name": "kacpersaw", + "groups": [], + "id": "1ebd1795-7cf2-47c5-8024-5d56e68f1681", + "login_type": null, + "name": "default", + "oidc_access_token": "", + "rbac_roles": [], + "session_token": "", + "ssh_private_key": "", + "ssh_public_key": "" + }, + "sensitive_values": { + "groups": [], + "oidc_access_token": true, + "rbac_roles": [], + "session_token": true, + "ssh_private_key": true + } + } + ] + } + } + }, + "configuration": { + "provider_config": { + "coder": { + "name": "coder", + "full_name": "registry.terraform.io/coder/coder", + "version_constraint": ">= 2.0.0" + } + }, + "root_module": { + "resources": [ + { + "address": "coder_agent.dev1", + "mode": "managed", + "type": "coder_agent", + "name": "dev1", + "provider_config_key": "coder", + "expressions": { + "arch": { + "constant_value": "amd64" + }, + "os": { + "constant_value": "linux" + } + }, + "schema_version": 1 + }, + { + "address": "coder_external_agent.dev1", + "mode": "managed", + "type": "coder_external_agent", + "name": "dev1", + "provider_config_key": "coder", + "expressions": { + "agent_id": { + "references": [ + "coder_agent.dev1.token", + "coder_agent.dev1" + ] + } + }, + "schema_version": 1 + }, + { + "address": "data.coder_provisioner.me", + "mode": "data", + "type": "coder_provisioner", + "name": "me", + "provider_config_key": "coder", + "schema_version": 1 + }, + { + "address": "data.coder_workspace.me", + "mode": "data", + "type": "coder_workspace", + "name": "me", + "provider_config_key": "coder", + "schema_version": 1 + }, + { + "address": "data.coder_workspace_owner.me", + "mode": "data", + "type": "coder_workspace_owner", + "name": "me", + "provider_config_key": "coder", + "schema_version": 0 + } + ] + } + }, + "relevant_attributes": [ + { + "resource": "coder_agent.dev1", + "attribute": [ + "token" + ] + } + ], + "timestamp": "2025-07-31T11:08:54Z", + "applyable": true, + "complete": true, + "errored": false +} diff --git a/provisioner/terraform/testdata/resources/external-agents/external-agents.tfstate.dot b/provisioner/terraform/testdata/resources/external-agents/external-agents.tfstate.dot new file mode 100644 index 0000000000000..d2db86a89e488 --- /dev/null +++ b/provisioner/terraform/testdata/resources/external-agents/external-agents.tfstate.dot @@ -0,0 +1,22 @@ +digraph { + compound = "true" + newrank = "true" + subgraph "root" { + "[root] coder_agent.dev1 (expand)" [label = "coder_agent.dev1", shape = "box"] + "[root] coder_external_agent.dev1 (expand)" [label = "coder_external_agent.dev1", shape = "box"] + "[root] data.coder_provisioner.me (expand)" [label = "data.coder_provisioner.me", shape = "box"] + "[root] data.coder_workspace.me (expand)" [label = "data.coder_workspace.me", shape = "box"] + "[root] data.coder_workspace_owner.me (expand)" [label = "data.coder_workspace_owner.me", shape = "box"] + "[root] provider[\"registry.terraform.io/coder/coder\"]" [label = "provider[\"registry.terraform.io/coder/coder\"]", shape = "diamond"] + "[root] coder_agent.dev1 (expand)" -> "[root] provider[\"registry.terraform.io/coder/coder\"]" + "[root] coder_external_agent.dev1 (expand)" -> "[root] coder_agent.dev1 (expand)" + "[root] data.coder_provisioner.me (expand)" -> "[root] provider[\"registry.terraform.io/coder/coder\"]" + "[root] data.coder_workspace.me (expand)" -> "[root] provider[\"registry.terraform.io/coder/coder\"]" + "[root] data.coder_workspace_owner.me (expand)" -> "[root] provider[\"registry.terraform.io/coder/coder\"]" + "[root] provider[\"registry.terraform.io/coder/coder\"] (close)" -> "[root] coder_external_agent.dev1 (expand)" + "[root] provider[\"registry.terraform.io/coder/coder\"] (close)" -> "[root] data.coder_provisioner.me (expand)" + "[root] provider[\"registry.terraform.io/coder/coder\"] (close)" -> "[root] data.coder_workspace.me (expand)" + "[root] provider[\"registry.terraform.io/coder/coder\"] (close)" -> "[root] data.coder_workspace_owner.me (expand)" + "[root] root" -> "[root] provider[\"registry.terraform.io/coder/coder\"] (close)" + } +} diff --git a/provisioner/terraform/testdata/resources/external-agents/external-agents.tfstate.json b/provisioner/terraform/testdata/resources/external-agents/external-agents.tfstate.json new file mode 100644 index 0000000000000..af884a315ec9d --- /dev/null +++ b/provisioner/terraform/testdata/resources/external-agents/external-agents.tfstate.json @@ -0,0 +1,138 @@ +{ + "format_version": "1.0", + "terraform_version": "1.12.2", + "values": { + "root_module": { + "resources": [ + { + "address": "data.coder_provisioner.me", + "mode": "data", + "type": "coder_provisioner", + "name": "me", + "provider_name": "registry.terraform.io/coder/coder", + "schema_version": 1, + "values": { + "arch": "amd64", + "id": "0ce4713c-28d6-4999-9381-52b8a603b672", + "os": "linux" + }, + "sensitive_values": {} + }, + { + "address": "data.coder_workspace.me", + "mode": "data", + "type": "coder_workspace", + "name": "me", + "provider_name": "registry.terraform.io/coder/coder", + "schema_version": 1, + "values": { + "access_port": 443, + "access_url": "https://dev.coder.com/", + "id": "dfa1dbe8-ad31-410b-b201-a4ed4d884938", + "is_prebuild": false, + "is_prebuild_claim": false, + "name": "kacper", + "prebuild_count": 0, + "start_count": 1, + "template_id": "", + "template_name": "", + "template_version": "", + "transition": "start" + }, + "sensitive_values": {} + }, + { + "address": "data.coder_workspace_owner.me", + "mode": "data", + "type": "coder_workspace_owner", + "name": "me", + "provider_name": "registry.terraform.io/coder/coder", + "schema_version": 0, + "values": { + "email": "default@example.com", + "full_name": "kacpersaw", + "groups": [], + "id": "f5e82b90-ea22-4288-8286-9cf7af651143", + "login_type": null, + "name": "default", + "oidc_access_token": "", + "rbac_roles": [], + "session_token": "", + "ssh_private_key": "", + "ssh_public_key": "" + }, + "sensitive_values": { + "groups": [], + "oidc_access_token": true, + "rbac_roles": [], + "session_token": true, + "ssh_private_key": true + } + }, + { + "address": "coder_agent.dev1", + "mode": "managed", + "type": "coder_agent", + "name": "dev1", + "provider_name": "registry.terraform.io/coder/coder", + "schema_version": 1, + "values": { + "api_key_scope": "all", + "arch": "amd64", + "auth": "token", + "connection_timeout": 120, + "dir": null, + "display_apps": [ + { + "port_forwarding_helper": true, + "ssh_helper": true, + "vscode": true, + "vscode_insiders": false, + "web_terminal": true + } + ], + "env": null, + "id": "15a35370-3b2e-4ee7-8b28-81cef0152d8b", + "init_script": "", + "metadata": [], + "motd_file": null, + "order": null, + "os": "linux", + "resources_monitoring": [], + "shutdown_script": null, + "startup_script": null, + "startup_script_behavior": "non-blocking", + "token": "d054c66b-cc5c-41ae-aa0c-2098a1075272", + "troubleshooting_url": null + }, + "sensitive_values": { + "display_apps": [ + {} + ], + "metadata": [], + "resources_monitoring": [], + "token": true + } + }, + { + "address": "coder_external_agent.dev1", + "mode": "managed", + "type": "coder_external_agent", + "name": "dev1", + "provider_name": "registry.terraform.io/coder/coder", + "schema_version": 1, + "values": { + "agent_id": "d054c66b-cc5c-41ae-aa0c-2098a1075272", + "id": "4d87dd70-879c-4347-b0c1-b8f3587d1021" + }, + "sensitive_values": { + "agent_id": true + }, + "depends_on": [ + "coder_agent.dev1" + ] + } + ] + } + } +} diff --git a/provisioner/terraform/testdata/resources/external-agents/main.tf b/provisioner/terraform/testdata/resources/external-agents/main.tf new file mode 100644 index 0000000000000..282b77e1474a9 --- /dev/null +++ b/provisioner/terraform/testdata/resources/external-agents/main.tf @@ -0,0 +1,21 @@ +terraform { + required_providers { + coder = { + source = "coder/coder" + version = ">=2.0.0" + } + } +} + +data "coder_provisioner" "me" {} +data "coder_workspace" "me" {} +data "coder_workspace_owner" "me" {} + +resource "coder_agent" "dev1" { + os = "linux" + arch = "amd64" +} + +resource "coder_external_agent" "dev1" { + agent_id = coder_agent.dev1.token +} diff --git a/provisioner/terraform/testdata/resources/version.txt b/provisioner/terraform/testdata/resources/version.txt index 3d0e62313ced1..6b89d58f861a7 100644 --- a/provisioner/terraform/testdata/resources/version.txt +++ b/provisioner/terraform/testdata/resources/version.txt @@ -1 +1 @@ -1.11.4 +1.12.2 diff --git a/provisionerd/proto/provisionerd.pb.go b/provisionerd/proto/provisionerd.pb.go index 9960105c78962..818719f1b3995 100644 --- a/provisionerd/proto/provisionerd.pb.go +++ b/provisionerd/proto/provisionerd.pb.go @@ -1403,6 +1403,7 @@ type CompletedJob_TemplateImport struct { ModuleFiles []byte `protobuf:"bytes,10,opt,name=module_files,json=moduleFiles,proto3" json:"module_files,omitempty"` ModuleFilesHash []byte `protobuf:"bytes,11,opt,name=module_files_hash,json=moduleFilesHash,proto3" json:"module_files_hash,omitempty"` HasAiTasks bool `protobuf:"varint,12,opt,name=has_ai_tasks,json=hasAiTasks,proto3" json:"has_ai_tasks,omitempty"` + HasExternalAgents bool `protobuf:"varint,13,opt,name=has_external_agents,json=hasExternalAgents,proto3" json:"has_external_agents,omitempty"` } func (x *CompletedJob_TemplateImport) Reset() { @@ -1521,6 +1522,13 @@ func (x *CompletedJob_TemplateImport) GetHasAiTasks() bool { return false } +func (x *CompletedJob_TemplateImport) GetHasExternalAgents() bool { + if x != nil { + return x.HasExternalAgents + } + return false +} + type CompletedJob_TemplateDryRun struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache @@ -1710,7 +1718,7 @@ var file_provisionerd_proto_provisionerd_proto_rawDesc = []byte{ 0x6d, 0x69, 0x6e, 0x67, 0x52, 0x07, 0x74, 0x69, 0x6d, 0x69, 0x6e, 0x67, 0x73, 0x1a, 0x10, 0x0a, 0x0e, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x49, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x1a, 0x10, 0x0a, 0x0e, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x44, 0x72, 0x79, 0x52, 0x75, - 0x6e, 0x42, 0x06, 0x0a, 0x04, 0x74, 0x79, 0x70, 0x65, 0x22, 0x8b, 0x0b, 0x0a, 0x0c, 0x43, 0x6f, + 0x6e, 0x42, 0x06, 0x0a, 0x04, 0x74, 0x79, 0x70, 0x65, 0x22, 0xbb, 0x0b, 0x0a, 0x0c, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x65, 0x64, 0x4a, 0x6f, 0x62, 0x12, 0x15, 0x0a, 0x06, 0x6a, 0x6f, 0x62, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x6a, 0x6f, 0x62, 0x49, 0x64, 0x12, 0x54, 0x0a, 0x0f, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x5f, 0x62, @@ -1749,7 +1757,7 @@ var file_provisionerd_proto_provisionerd_proto_rawDesc = []byte{ 0x63, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x12, 0x2e, 0x0a, 0x08, 0x61, 0x69, 0x5f, 0x74, 0x61, 0x73, 0x6b, 0x73, 0x18, 0x06, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x13, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x41, 0x49, 0x54, 0x61, 0x73, 0x6b, 0x52, 0x07, - 0x61, 0x69, 0x54, 0x61, 0x73, 0x6b, 0x73, 0x1a, 0x9f, 0x05, 0x0a, 0x0e, 0x54, 0x65, 0x6d, 0x70, + 0x61, 0x69, 0x54, 0x61, 0x73, 0x6b, 0x73, 0x1a, 0xcf, 0x05, 0x0a, 0x0e, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x49, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x12, 0x3e, 0x0a, 0x0f, 0x73, 0x74, 0x61, 0x72, 0x74, 0x5f, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, @@ -1791,7 +1799,10 @@ var file_provisionerd_proto_provisionerd_proto_rawDesc = []byte{ 0x18, 0x0b, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x0f, 0x6d, 0x6f, 0x64, 0x75, 0x6c, 0x65, 0x46, 0x69, 0x6c, 0x65, 0x73, 0x48, 0x61, 0x73, 0x68, 0x12, 0x20, 0x0a, 0x0c, 0x68, 0x61, 0x73, 0x5f, 0x61, 0x69, 0x5f, 0x74, 0x61, 0x73, 0x6b, 0x73, 0x18, 0x0c, 0x20, 0x01, 0x28, 0x08, 0x52, 0x0a, 0x68, - 0x61, 0x73, 0x41, 0x69, 0x54, 0x61, 0x73, 0x6b, 0x73, 0x1a, 0x74, 0x0a, 0x0e, 0x54, 0x65, 0x6d, + 0x61, 0x73, 0x41, 0x69, 0x54, 0x61, 0x73, 0x6b, 0x73, 0x12, 0x2e, 0x0a, 0x13, 0x68, 0x61, 0x73, + 0x5f, 0x65, 0x78, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x5f, 0x61, 0x67, 0x65, 0x6e, 0x74, 0x73, + 0x18, 0x0d, 0x20, 0x01, 0x28, 0x08, 0x52, 0x11, 0x68, 0x61, 0x73, 0x45, 0x78, 0x74, 0x65, 0x72, + 0x6e, 0x61, 0x6c, 0x41, 0x67, 0x65, 0x6e, 0x74, 0x73, 0x1a, 0x74, 0x0a, 0x0e, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x44, 0x72, 0x79, 0x52, 0x75, 0x6e, 0x12, 0x33, 0x0a, 0x09, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x52, 0x65, 0x73, diff --git a/provisionerd/proto/provisionerd.proto b/provisionerd/proto/provisionerd.proto index eeeb5f02da0fb..b008da33ea87e 100644 --- a/provisionerd/proto/provisionerd.proto +++ b/provisionerd/proto/provisionerd.proto @@ -95,6 +95,7 @@ message CompletedJob { bytes module_files = 10; bytes module_files_hash = 11; bool has_ai_tasks = 12; + bool has_external_agents = 13; } message TemplateDryRun { repeated provisioner.Resource resources = 1; diff --git a/provisionerd/proto/version.go b/provisionerd/proto/version.go index e61aac0c5abd8..3ae1bbae04454 100644 --- a/provisionerd/proto/version.go +++ b/provisionerd/proto/version.go @@ -44,9 +44,15 @@ import "github.com/coder/coder/v2/apiversion" // -> `has_ai_tasks` in `CompleteJob.TemplateImport` // -> `has_ai_tasks` and `ai_tasks` in `PlanComplete` // -> new message types `AITaskSidebarApp` and `AITask` +// +// API v1.8: +// - Add new fields `description` and `icon` to `Preset`. +// +// API v1.9: +// - Added new field named 'has_external_agent' in 'CompleteJob.TemplateImport' const ( CurrentMajor = 1 - CurrentMinor = 7 + CurrentMinor = 9 ) // CurrentVersion is the current provisionerd API version. diff --git a/provisionerd/runner/runner.go b/provisionerd/runner/runner.go index b80cf9060b358..924f0628820ce 100644 --- a/provisionerd/runner/runner.go +++ b/provisionerd/runner/runner.go @@ -600,8 +600,9 @@ func (r *Runner) runTemplateImport(ctx context.Context) (*proto.CompletedJob, *p // ModuleFiles are not on the stopProvision. So grab from the startProvision. ModuleFiles: startProvision.ModuleFiles, // ModuleFileHash will be populated if the file is uploaded async - ModuleFilesHash: []byte{}, - HasAiTasks: startProvision.HasAITasks, + ModuleFilesHash: []byte{}, + HasAiTasks: startProvision.HasAITasks, + HasExternalAgents: startProvision.HasExternalAgents, }, }, }, nil @@ -666,6 +667,7 @@ type templateImportProvision struct { Plan json.RawMessage ModuleFiles []byte HasAITasks bool + HasExternalAgents bool } // Performs a dry-run provision when importing a template. @@ -807,6 +809,7 @@ func (r *Runner) runTemplateImportProvisionWithRichParameters( Plan: c.Plan, ModuleFiles: moduleFilesData, HasAITasks: c.HasAiTasks, + HasExternalAgents: c.HasExternalAgents, }, nil default: return nil, xerrors.Errorf("invalid message type %q received from provisioner", diff --git a/provisionersdk/proto/provisioner.pb.go b/provisionersdk/proto/provisioner.pb.go index 7412cb6155610..c96878fba5fea 100644 --- a/provisionersdk/proto/provisioner.pb.go +++ b/provisionersdk/proto/provisioner.pb.go @@ -1107,10 +1107,12 @@ type Preset struct { sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields - Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` - Parameters []*PresetParameter `protobuf:"bytes,2,rep,name=parameters,proto3" json:"parameters,omitempty"` - Prebuild *Prebuild `protobuf:"bytes,3,opt,name=prebuild,proto3" json:"prebuild,omitempty"` - Default bool `protobuf:"varint,4,opt,name=default,proto3" json:"default,omitempty"` + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + Parameters []*PresetParameter `protobuf:"bytes,2,rep,name=parameters,proto3" json:"parameters,omitempty"` + Prebuild *Prebuild `protobuf:"bytes,3,opt,name=prebuild,proto3" json:"prebuild,omitempty"` + Default bool `protobuf:"varint,4,opt,name=default,proto3" json:"default,omitempty"` + Description string `protobuf:"bytes,5,opt,name=description,proto3" json:"description,omitempty"` + Icon string `protobuf:"bytes,6,opt,name=icon,proto3" json:"icon,omitempty"` } func (x *Preset) Reset() { @@ -1173,6 +1175,20 @@ func (x *Preset) GetDefault() bool { return false } +func (x *Preset) GetDescription() string { + if x != nil { + return x.Description + } + return "" +} + +func (x *Preset) GetIcon() string { + if x != nil { + return x.Icon + } + return "" +} + type PresetParameter struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache @@ -3385,8 +3401,9 @@ type PlanComplete struct { // still need to know that such resources are defined. // // See `hasAITaskResources` in provisioner/terraform/resources.go for more details. - HasAiTasks bool `protobuf:"varint,13,opt,name=has_ai_tasks,json=hasAiTasks,proto3" json:"has_ai_tasks,omitempty"` - AiTasks []*AITask `protobuf:"bytes,14,rep,name=ai_tasks,json=aiTasks,proto3" json:"ai_tasks,omitempty"` + HasAiTasks bool `protobuf:"varint,13,opt,name=has_ai_tasks,json=hasAiTasks,proto3" json:"has_ai_tasks,omitempty"` + AiTasks []*AITask `protobuf:"bytes,14,rep,name=ai_tasks,json=aiTasks,proto3" json:"ai_tasks,omitempty"` + HasExternalAgents bool `protobuf:"varint,15,opt,name=has_external_agents,json=hasExternalAgents,proto3" json:"has_external_agents,omitempty"` } func (x *PlanComplete) Reset() { @@ -3512,6 +3529,13 @@ func (x *PlanComplete) GetAiTasks() []*AITask { return nil } +func (x *PlanComplete) GetHasExternalAgents() bool { + if x != nil { + return x.HasExternalAgents + } + return false +} + // ApplyRequest asks the provisioner to apply the changes. Apply MUST be preceded by a successful plan request/response // in the same Session. The plan data is not transmitted over the wire and is cached by the provisioner in the Session. type ApplyRequest struct { @@ -4449,7 +4473,7 @@ var file_provisionersdk_proto_provisioner_proto_rawDesc = []byte{ 0x68, 0x65, 0x64, 0x75, 0x6c, 0x69, 0x6e, 0x67, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x53, 0x63, 0x68, 0x65, 0x64, 0x75, 0x6c, 0x69, 0x6e, 0x67, 0x52, 0x0a, 0x73, 0x63, 0x68, 0x65, 0x64, 0x75, 0x6c, - 0x69, 0x6e, 0x67, 0x22, 0xa7, 0x01, 0x0a, 0x06, 0x50, 0x72, 0x65, 0x73, 0x65, 0x74, 0x12, 0x12, + 0x69, 0x6e, 0x67, 0x22, 0xdd, 0x01, 0x0a, 0x06, 0x50, 0x72, 0x65, 0x73, 0x65, 0x74, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x3c, 0x0a, 0x0a, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1c, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, @@ -4459,569 +4483,575 @@ var file_provisionersdk_proto_provisioner_proto_rawDesc = []byte{ 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x50, 0x72, 0x65, 0x62, 0x75, 0x69, 0x6c, 0x64, 0x52, 0x08, 0x70, 0x72, 0x65, 0x62, 0x75, 0x69, 0x6c, 0x64, 0x12, 0x18, 0x0a, 0x07, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x18, 0x04, - 0x20, 0x01, 0x28, 0x08, 0x52, 0x07, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x22, 0x3b, 0x0a, - 0x0f, 0x50, 0x72, 0x65, 0x73, 0x65, 0x74, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, - 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, - 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, - 0x01, 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x22, 0x47, 0x0a, 0x13, 0x52, 0x65, - 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x52, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x6d, 0x65, 0x6e, - 0x74, 0x12, 0x1a, 0x0a, 0x08, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x18, 0x01, 0x20, - 0x01, 0x28, 0x09, 0x52, 0x08, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x12, 0x14, 0x0a, - 0x05, 0x70, 0x61, 0x74, 0x68, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x09, 0x52, 0x05, 0x70, 0x61, - 0x74, 0x68, 0x73, 0x22, 0x57, 0x0a, 0x0d, 0x56, 0x61, 0x72, 0x69, 0x61, 0x62, 0x6c, 0x65, 0x56, - 0x61, 0x6c, 0x75, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, - 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, - 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x12, 0x1c, - 0x0a, 0x09, 0x73, 0x65, 0x6e, 0x73, 0x69, 0x74, 0x69, 0x76, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, - 0x08, 0x52, 0x09, 0x73, 0x65, 0x6e, 0x73, 0x69, 0x74, 0x69, 0x76, 0x65, 0x22, 0x4a, 0x0a, 0x03, - 0x4c, 0x6f, 0x67, 0x12, 0x2b, 0x0a, 0x05, 0x6c, 0x65, 0x76, 0x65, 0x6c, 0x18, 0x01, 0x20, 0x01, - 0x28, 0x0e, 0x32, 0x15, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, - 0x2e, 0x4c, 0x6f, 0x67, 0x4c, 0x65, 0x76, 0x65, 0x6c, 0x52, 0x05, 0x6c, 0x65, 0x76, 0x65, 0x6c, - 0x12, 0x16, 0x0a, 0x06, 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, - 0x52, 0x06, 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x22, 0x37, 0x0a, 0x14, 0x49, 0x6e, 0x73, 0x74, - 0x61, 0x6e, 0x63, 0x65, 0x49, 0x64, 0x65, 0x6e, 0x74, 0x69, 0x74, 0x79, 0x41, 0x75, 0x74, 0x68, - 0x12, 0x1f, 0x0a, 0x0b, 0x69, 0x6e, 0x73, 0x74, 0x61, 0x6e, 0x63, 0x65, 0x5f, 0x69, 0x64, 0x18, - 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, 0x69, 0x6e, 0x73, 0x74, 0x61, 0x6e, 0x63, 0x65, 0x49, - 0x64, 0x22, 0x4a, 0x0a, 0x1c, 0x45, 0x78, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x41, 0x75, 0x74, - 0x68, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, - 0x65, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, - 0x64, 0x12, 0x1a, 0x0a, 0x08, 0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x61, 0x6c, 0x18, 0x02, 0x20, - 0x01, 0x28, 0x08, 0x52, 0x08, 0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x61, 0x6c, 0x22, 0x49, 0x0a, - 0x14, 0x45, 0x78, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x41, 0x75, 0x74, 0x68, 0x50, 0x72, 0x6f, - 0x76, 0x69, 0x64, 0x65, 0x72, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, - 0x09, 0x52, 0x02, 0x69, 0x64, 0x12, 0x21, 0x0a, 0x0c, 0x61, 0x63, 0x63, 0x65, 0x73, 0x73, 0x5f, - 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x61, 0x63, 0x63, - 0x65, 0x73, 0x73, 0x54, 0x6f, 0x6b, 0x65, 0x6e, 0x22, 0xda, 0x08, 0x0a, 0x05, 0x41, 0x67, 0x65, - 0x6e, 0x74, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, - 0x69, 0x64, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, - 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x2d, 0x0a, 0x03, 0x65, 0x6e, 0x76, 0x18, 0x03, 0x20, - 0x03, 0x28, 0x0b, 0x32, 0x1b, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, - 0x72, 0x2e, 0x41, 0x67, 0x65, 0x6e, 0x74, 0x2e, 0x45, 0x6e, 0x76, 0x45, 0x6e, 0x74, 0x72, 0x79, - 0x52, 0x03, 0x65, 0x6e, 0x76, 0x12, 0x29, 0x0a, 0x10, 0x6f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x69, - 0x6e, 0x67, 0x5f, 0x73, 0x79, 0x73, 0x74, 0x65, 0x6d, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x52, - 0x0f, 0x6f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6e, 0x67, 0x53, 0x79, 0x73, 0x74, 0x65, 0x6d, - 0x12, 0x22, 0x0a, 0x0c, 0x61, 0x72, 0x63, 0x68, 0x69, 0x74, 0x65, 0x63, 0x74, 0x75, 0x72, 0x65, - 0x18, 0x06, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0c, 0x61, 0x72, 0x63, 0x68, 0x69, 0x74, 0x65, 0x63, - 0x74, 0x75, 0x72, 0x65, 0x12, 0x1c, 0x0a, 0x09, 0x64, 0x69, 0x72, 0x65, 0x63, 0x74, 0x6f, 0x72, - 0x79, 0x18, 0x07, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x64, 0x69, 0x72, 0x65, 0x63, 0x74, 0x6f, - 0x72, 0x79, 0x12, 0x24, 0x0a, 0x04, 0x61, 0x70, 0x70, 0x73, 0x18, 0x08, 0x20, 0x03, 0x28, 0x0b, - 0x32, 0x10, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x41, - 0x70, 0x70, 0x52, 0x04, 0x61, 0x70, 0x70, 0x73, 0x12, 0x16, 0x0a, 0x05, 0x74, 0x6f, 0x6b, 0x65, - 0x6e, 0x18, 0x09, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x05, 0x74, 0x6f, 0x6b, 0x65, 0x6e, - 0x12, 0x21, 0x0a, 0x0b, 0x69, 0x6e, 0x73, 0x74, 0x61, 0x6e, 0x63, 0x65, 0x5f, 0x69, 0x64, 0x18, - 0x0a, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x0a, 0x69, 0x6e, 0x73, 0x74, 0x61, 0x6e, 0x63, - 0x65, 0x49, 0x64, 0x12, 0x3c, 0x0a, 0x1a, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x69, 0x6f, - 0x6e, 0x5f, 0x74, 0x69, 0x6d, 0x65, 0x6f, 0x75, 0x74, 0x5f, 0x73, 0x65, 0x63, 0x6f, 0x6e, 0x64, - 0x73, 0x18, 0x0b, 0x20, 0x01, 0x28, 0x05, 0x52, 0x18, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, - 0x69, 0x6f, 0x6e, 0x54, 0x69, 0x6d, 0x65, 0x6f, 0x75, 0x74, 0x53, 0x65, 0x63, 0x6f, 0x6e, 0x64, - 0x73, 0x12, 0x2f, 0x0a, 0x13, 0x74, 0x72, 0x6f, 0x75, 0x62, 0x6c, 0x65, 0x73, 0x68, 0x6f, 0x6f, - 0x74, 0x69, 0x6e, 0x67, 0x5f, 0x75, 0x72, 0x6c, 0x18, 0x0c, 0x20, 0x01, 0x28, 0x09, 0x52, 0x12, - 0x74, 0x72, 0x6f, 0x75, 0x62, 0x6c, 0x65, 0x73, 0x68, 0x6f, 0x6f, 0x74, 0x69, 0x6e, 0x67, 0x55, - 0x72, 0x6c, 0x12, 0x1b, 0x0a, 0x09, 0x6d, 0x6f, 0x74, 0x64, 0x5f, 0x66, 0x69, 0x6c, 0x65, 0x18, - 0x0d, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x6d, 0x6f, 0x74, 0x64, 0x46, 0x69, 0x6c, 0x65, 0x12, - 0x37, 0x0a, 0x08, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x18, 0x12, 0x20, 0x03, 0x28, - 0x0b, 0x32, 0x1b, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, - 0x41, 0x67, 0x65, 0x6e, 0x74, 0x2e, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x52, 0x08, - 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x12, 0x3b, 0x0a, 0x0c, 0x64, 0x69, 0x73, 0x70, - 0x6c, 0x61, 0x79, 0x5f, 0x61, 0x70, 0x70, 0x73, 0x18, 0x14, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x18, - 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x44, 0x69, 0x73, - 0x70, 0x6c, 0x61, 0x79, 0x41, 0x70, 0x70, 0x73, 0x52, 0x0b, 0x64, 0x69, 0x73, 0x70, 0x6c, 0x61, - 0x79, 0x41, 0x70, 0x70, 0x73, 0x12, 0x2d, 0x0a, 0x07, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x73, - 0x18, 0x15, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x13, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, - 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x53, 0x63, 0x72, 0x69, 0x70, 0x74, 0x52, 0x07, 0x73, 0x63, 0x72, - 0x69, 0x70, 0x74, 0x73, 0x12, 0x2f, 0x0a, 0x0a, 0x65, 0x78, 0x74, 0x72, 0x61, 0x5f, 0x65, 0x6e, - 0x76, 0x73, 0x18, 0x16, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x10, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, - 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x45, 0x6e, 0x76, 0x52, 0x09, 0x65, 0x78, 0x74, 0x72, - 0x61, 0x45, 0x6e, 0x76, 0x73, 0x12, 0x14, 0x0a, 0x05, 0x6f, 0x72, 0x64, 0x65, 0x72, 0x18, 0x17, - 0x20, 0x01, 0x28, 0x03, 0x52, 0x05, 0x6f, 0x72, 0x64, 0x65, 0x72, 0x12, 0x53, 0x0a, 0x14, 0x72, - 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x5f, 0x6d, 0x6f, 0x6e, 0x69, 0x74, 0x6f, 0x72, - 0x69, 0x6e, 0x67, 0x18, 0x18, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x20, 0x2e, 0x70, 0x72, 0x6f, 0x76, - 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, - 0x73, 0x4d, 0x6f, 0x6e, 0x69, 0x74, 0x6f, 0x72, 0x69, 0x6e, 0x67, 0x52, 0x13, 0x72, 0x65, 0x73, - 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x4d, 0x6f, 0x6e, 0x69, 0x74, 0x6f, 0x72, 0x69, 0x6e, 0x67, - 0x12, 0x3f, 0x0a, 0x0d, 0x64, 0x65, 0x76, 0x63, 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65, 0x72, - 0x73, 0x18, 0x19, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, - 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x44, 0x65, 0x76, 0x63, 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, - 0x65, 0x72, 0x52, 0x0d, 0x64, 0x65, 0x76, 0x63, 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65, 0x72, - 0x73, 0x12, 0x22, 0x0a, 0x0d, 0x61, 0x70, 0x69, 0x5f, 0x6b, 0x65, 0x79, 0x5f, 0x73, 0x63, 0x6f, - 0x70, 0x65, 0x18, 0x1a, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x61, 0x70, 0x69, 0x4b, 0x65, 0x79, - 0x53, 0x63, 0x6f, 0x70, 0x65, 0x1a, 0xa3, 0x01, 0x0a, 0x08, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, - 0x74, 0x61, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, - 0x03, 0x6b, 0x65, 0x79, 0x12, 0x21, 0x0a, 0x0c, 0x64, 0x69, 0x73, 0x70, 0x6c, 0x61, 0x79, 0x5f, - 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x64, 0x69, 0x73, 0x70, - 0x6c, 0x61, 0x79, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x16, 0x0a, 0x06, 0x73, 0x63, 0x72, 0x69, 0x70, - 0x74, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x12, - 0x1a, 0x0a, 0x08, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x76, 0x61, 0x6c, 0x18, 0x04, 0x20, 0x01, 0x28, - 0x03, 0x52, 0x08, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x76, 0x61, 0x6c, 0x12, 0x18, 0x0a, 0x07, 0x74, - 0x69, 0x6d, 0x65, 0x6f, 0x75, 0x74, 0x18, 0x05, 0x20, 0x01, 0x28, 0x03, 0x52, 0x07, 0x74, 0x69, - 0x6d, 0x65, 0x6f, 0x75, 0x74, 0x12, 0x14, 0x0a, 0x05, 0x6f, 0x72, 0x64, 0x65, 0x72, 0x18, 0x06, - 0x20, 0x01, 0x28, 0x03, 0x52, 0x05, 0x6f, 0x72, 0x64, 0x65, 0x72, 0x1a, 0x36, 0x0a, 0x08, 0x45, - 0x6e, 0x76, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, - 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, - 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, - 0x02, 0x38, 0x01, 0x42, 0x06, 0x0a, 0x04, 0x61, 0x75, 0x74, 0x68, 0x4a, 0x04, 0x08, 0x0e, 0x10, - 0x0f, 0x52, 0x12, 0x6c, 0x6f, 0x67, 0x69, 0x6e, 0x5f, 0x62, 0x65, 0x66, 0x6f, 0x72, 0x65, 0x5f, - 0x72, 0x65, 0x61, 0x64, 0x79, 0x22, 0x8f, 0x01, 0x0a, 0x13, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, - 0x63, 0x65, 0x73, 0x4d, 0x6f, 0x6e, 0x69, 0x74, 0x6f, 0x72, 0x69, 0x6e, 0x67, 0x12, 0x3a, 0x0a, - 0x06, 0x6d, 0x65, 0x6d, 0x6f, 0x72, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x22, 0x2e, - 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x4d, 0x65, 0x6d, 0x6f, - 0x72, 0x79, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x4d, 0x6f, 0x6e, 0x69, 0x74, 0x6f, - 0x72, 0x52, 0x06, 0x6d, 0x65, 0x6d, 0x6f, 0x72, 0x79, 0x12, 0x3c, 0x0a, 0x07, 0x76, 0x6f, 0x6c, - 0x75, 0x6d, 0x65, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x22, 0x2e, 0x70, 0x72, 0x6f, - 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x56, 0x6f, 0x6c, 0x75, 0x6d, 0x65, 0x52, - 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x4d, 0x6f, 0x6e, 0x69, 0x74, 0x6f, 0x72, 0x52, 0x07, - 0x76, 0x6f, 0x6c, 0x75, 0x6d, 0x65, 0x73, 0x22, 0x4f, 0x0a, 0x15, 0x4d, 0x65, 0x6d, 0x6f, 0x72, - 0x79, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x4d, 0x6f, 0x6e, 0x69, 0x74, 0x6f, 0x72, - 0x12, 0x18, 0x0a, 0x07, 0x65, 0x6e, 0x61, 0x62, 0x6c, 0x65, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, - 0x08, 0x52, 0x07, 0x65, 0x6e, 0x61, 0x62, 0x6c, 0x65, 0x64, 0x12, 0x1c, 0x0a, 0x09, 0x74, 0x68, - 0x72, 0x65, 0x73, 0x68, 0x6f, 0x6c, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x05, 0x52, 0x09, 0x74, - 0x68, 0x72, 0x65, 0x73, 0x68, 0x6f, 0x6c, 0x64, 0x22, 0x63, 0x0a, 0x15, 0x56, 0x6f, 0x6c, 0x75, - 0x6d, 0x65, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x4d, 0x6f, 0x6e, 0x69, 0x74, 0x6f, - 0x72, 0x12, 0x12, 0x0a, 0x04, 0x70, 0x61, 0x74, 0x68, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, - 0x04, 0x70, 0x61, 0x74, 0x68, 0x12, 0x18, 0x0a, 0x07, 0x65, 0x6e, 0x61, 0x62, 0x6c, 0x65, 0x64, - 0x18, 0x02, 0x20, 0x01, 0x28, 0x08, 0x52, 0x07, 0x65, 0x6e, 0x61, 0x62, 0x6c, 0x65, 0x64, 0x12, - 0x1c, 0x0a, 0x09, 0x74, 0x68, 0x72, 0x65, 0x73, 0x68, 0x6f, 0x6c, 0x64, 0x18, 0x03, 0x20, 0x01, - 0x28, 0x05, 0x52, 0x09, 0x74, 0x68, 0x72, 0x65, 0x73, 0x68, 0x6f, 0x6c, 0x64, 0x22, 0xc6, 0x01, - 0x0a, 0x0b, 0x44, 0x69, 0x73, 0x70, 0x6c, 0x61, 0x79, 0x41, 0x70, 0x70, 0x73, 0x12, 0x16, 0x0a, - 0x06, 0x76, 0x73, 0x63, 0x6f, 0x64, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x08, 0x52, 0x06, 0x76, - 0x73, 0x63, 0x6f, 0x64, 0x65, 0x12, 0x27, 0x0a, 0x0f, 0x76, 0x73, 0x63, 0x6f, 0x64, 0x65, 0x5f, - 0x69, 0x6e, 0x73, 0x69, 0x64, 0x65, 0x72, 0x73, 0x18, 0x02, 0x20, 0x01, 0x28, 0x08, 0x52, 0x0e, - 0x76, 0x73, 0x63, 0x6f, 0x64, 0x65, 0x49, 0x6e, 0x73, 0x69, 0x64, 0x65, 0x72, 0x73, 0x12, 0x21, - 0x0a, 0x0c, 0x77, 0x65, 0x62, 0x5f, 0x74, 0x65, 0x72, 0x6d, 0x69, 0x6e, 0x61, 0x6c, 0x18, 0x03, - 0x20, 0x01, 0x28, 0x08, 0x52, 0x0b, 0x77, 0x65, 0x62, 0x54, 0x65, 0x72, 0x6d, 0x69, 0x6e, 0x61, - 0x6c, 0x12, 0x1d, 0x0a, 0x0a, 0x73, 0x73, 0x68, 0x5f, 0x68, 0x65, 0x6c, 0x70, 0x65, 0x72, 0x18, - 0x04, 0x20, 0x01, 0x28, 0x08, 0x52, 0x09, 0x73, 0x73, 0x68, 0x48, 0x65, 0x6c, 0x70, 0x65, 0x72, - 0x12, 0x34, 0x0a, 0x16, 0x70, 0x6f, 0x72, 0x74, 0x5f, 0x66, 0x6f, 0x72, 0x77, 0x61, 0x72, 0x64, - 0x69, 0x6e, 0x67, 0x5f, 0x68, 0x65, 0x6c, 0x70, 0x65, 0x72, 0x18, 0x05, 0x20, 0x01, 0x28, 0x08, - 0x52, 0x14, 0x70, 0x6f, 0x72, 0x74, 0x46, 0x6f, 0x72, 0x77, 0x61, 0x72, 0x64, 0x69, 0x6e, 0x67, - 0x48, 0x65, 0x6c, 0x70, 0x65, 0x72, 0x22, 0x2f, 0x0a, 0x03, 0x45, 0x6e, 0x76, 0x12, 0x12, 0x0a, - 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, - 0x65, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, - 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x22, 0x9f, 0x02, 0x0a, 0x06, 0x53, 0x63, 0x72, 0x69, - 0x70, 0x74, 0x12, 0x21, 0x0a, 0x0c, 0x64, 0x69, 0x73, 0x70, 0x6c, 0x61, 0x79, 0x5f, 0x6e, 0x61, - 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x64, 0x69, 0x73, 0x70, 0x6c, 0x61, - 0x79, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x69, 0x63, 0x6f, 0x6e, 0x18, 0x02, 0x20, - 0x01, 0x28, 0x09, 0x52, 0x04, 0x69, 0x63, 0x6f, 0x6e, 0x12, 0x16, 0x0a, 0x06, 0x73, 0x63, 0x72, - 0x69, 0x70, 0x74, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x73, 0x63, 0x72, 0x69, 0x70, - 0x74, 0x12, 0x12, 0x0a, 0x04, 0x63, 0x72, 0x6f, 0x6e, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, - 0x04, 0x63, 0x72, 0x6f, 0x6e, 0x12, 0x2c, 0x0a, 0x12, 0x73, 0x74, 0x61, 0x72, 0x74, 0x5f, 0x62, - 0x6c, 0x6f, 0x63, 0x6b, 0x73, 0x5f, 0x6c, 0x6f, 0x67, 0x69, 0x6e, 0x18, 0x05, 0x20, 0x01, 0x28, - 0x08, 0x52, 0x10, 0x73, 0x74, 0x61, 0x72, 0x74, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x73, 0x4c, 0x6f, - 0x67, 0x69, 0x6e, 0x12, 0x20, 0x0a, 0x0c, 0x72, 0x75, 0x6e, 0x5f, 0x6f, 0x6e, 0x5f, 0x73, 0x74, - 0x61, 0x72, 0x74, 0x18, 0x06, 0x20, 0x01, 0x28, 0x08, 0x52, 0x0a, 0x72, 0x75, 0x6e, 0x4f, 0x6e, - 0x53, 0x74, 0x61, 0x72, 0x74, 0x12, 0x1e, 0x0a, 0x0b, 0x72, 0x75, 0x6e, 0x5f, 0x6f, 0x6e, 0x5f, - 0x73, 0x74, 0x6f, 0x70, 0x18, 0x07, 0x20, 0x01, 0x28, 0x08, 0x52, 0x09, 0x72, 0x75, 0x6e, 0x4f, - 0x6e, 0x53, 0x74, 0x6f, 0x70, 0x12, 0x27, 0x0a, 0x0f, 0x74, 0x69, 0x6d, 0x65, 0x6f, 0x75, 0x74, - 0x5f, 0x73, 0x65, 0x63, 0x6f, 0x6e, 0x64, 0x73, 0x18, 0x08, 0x20, 0x01, 0x28, 0x05, 0x52, 0x0e, - 0x74, 0x69, 0x6d, 0x65, 0x6f, 0x75, 0x74, 0x53, 0x65, 0x63, 0x6f, 0x6e, 0x64, 0x73, 0x12, 0x19, - 0x0a, 0x08, 0x6c, 0x6f, 0x67, 0x5f, 0x70, 0x61, 0x74, 0x68, 0x18, 0x09, 0x20, 0x01, 0x28, 0x09, - 0x52, 0x07, 0x6c, 0x6f, 0x67, 0x50, 0x61, 0x74, 0x68, 0x22, 0x6e, 0x0a, 0x0c, 0x44, 0x65, 0x76, - 0x63, 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65, 0x72, 0x12, 0x29, 0x0a, 0x10, 0x77, 0x6f, 0x72, - 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x5f, 0x66, 0x6f, 0x6c, 0x64, 0x65, 0x72, 0x18, 0x01, 0x20, - 0x01, 0x28, 0x09, 0x52, 0x0f, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x46, 0x6f, - 0x6c, 0x64, 0x65, 0x72, 0x12, 0x1f, 0x0a, 0x0b, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x5f, 0x70, - 0x61, 0x74, 0x68, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, 0x63, 0x6f, 0x6e, 0x66, 0x69, - 0x67, 0x50, 0x61, 0x74, 0x68, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x03, 0x20, - 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x22, 0xba, 0x03, 0x0a, 0x03, 0x41, 0x70, - 0x70, 0x12, 0x12, 0x0a, 0x04, 0x73, 0x6c, 0x75, 0x67, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, - 0x04, 0x73, 0x6c, 0x75, 0x67, 0x12, 0x21, 0x0a, 0x0c, 0x64, 0x69, 0x73, 0x70, 0x6c, 0x61, 0x79, - 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x64, 0x69, 0x73, - 0x70, 0x6c, 0x61, 0x79, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x18, 0x0a, 0x07, 0x63, 0x6f, 0x6d, 0x6d, - 0x61, 0x6e, 0x64, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x63, 0x6f, 0x6d, 0x6d, 0x61, - 0x6e, 0x64, 0x12, 0x10, 0x0a, 0x03, 0x75, 0x72, 0x6c, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, - 0x03, 0x75, 0x72, 0x6c, 0x12, 0x12, 0x0a, 0x04, 0x69, 0x63, 0x6f, 0x6e, 0x18, 0x05, 0x20, 0x01, - 0x28, 0x09, 0x52, 0x04, 0x69, 0x63, 0x6f, 0x6e, 0x12, 0x1c, 0x0a, 0x09, 0x73, 0x75, 0x62, 0x64, - 0x6f, 0x6d, 0x61, 0x69, 0x6e, 0x18, 0x06, 0x20, 0x01, 0x28, 0x08, 0x52, 0x09, 0x73, 0x75, 0x62, - 0x64, 0x6f, 0x6d, 0x61, 0x69, 0x6e, 0x12, 0x3a, 0x0a, 0x0b, 0x68, 0x65, 0x61, 0x6c, 0x74, 0x68, - 0x63, 0x68, 0x65, 0x63, 0x6b, 0x18, 0x07, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x18, 0x2e, 0x70, 0x72, - 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x48, 0x65, 0x61, 0x6c, 0x74, 0x68, - 0x63, 0x68, 0x65, 0x63, 0x6b, 0x52, 0x0b, 0x68, 0x65, 0x61, 0x6c, 0x74, 0x68, 0x63, 0x68, 0x65, - 0x63, 0x6b, 0x12, 0x41, 0x0a, 0x0d, 0x73, 0x68, 0x61, 0x72, 0x69, 0x6e, 0x67, 0x5f, 0x6c, 0x65, - 0x76, 0x65, 0x6c, 0x18, 0x08, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x1c, 0x2e, 0x70, 0x72, 0x6f, 0x76, - 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x41, 0x70, 0x70, 0x53, 0x68, 0x61, 0x72, 0x69, - 0x6e, 0x67, 0x4c, 0x65, 0x76, 0x65, 0x6c, 0x52, 0x0c, 0x73, 0x68, 0x61, 0x72, 0x69, 0x6e, 0x67, - 0x4c, 0x65, 0x76, 0x65, 0x6c, 0x12, 0x1a, 0x0a, 0x08, 0x65, 0x78, 0x74, 0x65, 0x72, 0x6e, 0x61, - 0x6c, 0x18, 0x09, 0x20, 0x01, 0x28, 0x08, 0x52, 0x08, 0x65, 0x78, 0x74, 0x65, 0x72, 0x6e, 0x61, - 0x6c, 0x12, 0x14, 0x0a, 0x05, 0x6f, 0x72, 0x64, 0x65, 0x72, 0x18, 0x0a, 0x20, 0x01, 0x28, 0x03, - 0x52, 0x05, 0x6f, 0x72, 0x64, 0x65, 0x72, 0x12, 0x16, 0x0a, 0x06, 0x68, 0x69, 0x64, 0x64, 0x65, - 0x6e, 0x18, 0x0b, 0x20, 0x01, 0x28, 0x08, 0x52, 0x06, 0x68, 0x69, 0x64, 0x64, 0x65, 0x6e, 0x12, - 0x2f, 0x0a, 0x07, 0x6f, 0x70, 0x65, 0x6e, 0x5f, 0x69, 0x6e, 0x18, 0x0c, 0x20, 0x01, 0x28, 0x0e, - 0x32, 0x16, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x41, - 0x70, 0x70, 0x4f, 0x70, 0x65, 0x6e, 0x49, 0x6e, 0x52, 0x06, 0x6f, 0x70, 0x65, 0x6e, 0x49, 0x6e, - 0x12, 0x14, 0x0a, 0x05, 0x67, 0x72, 0x6f, 0x75, 0x70, 0x18, 0x0d, 0x20, 0x01, 0x28, 0x09, 0x52, - 0x05, 0x67, 0x72, 0x6f, 0x75, 0x70, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x0e, 0x20, 0x01, - 0x28, 0x09, 0x52, 0x02, 0x69, 0x64, 0x22, 0x59, 0x0a, 0x0b, 0x48, 0x65, 0x61, 0x6c, 0x74, 0x68, - 0x63, 0x68, 0x65, 0x63, 0x6b, 0x12, 0x10, 0x0a, 0x03, 0x75, 0x72, 0x6c, 0x18, 0x01, 0x20, 0x01, - 0x28, 0x09, 0x52, 0x03, 0x75, 0x72, 0x6c, 0x12, 0x1a, 0x0a, 0x08, 0x69, 0x6e, 0x74, 0x65, 0x72, - 0x76, 0x61, 0x6c, 0x18, 0x02, 0x20, 0x01, 0x28, 0x05, 0x52, 0x08, 0x69, 0x6e, 0x74, 0x65, 0x72, - 0x76, 0x61, 0x6c, 0x12, 0x1c, 0x0a, 0x09, 0x74, 0x68, 0x72, 0x65, 0x73, 0x68, 0x6f, 0x6c, 0x64, - 0x18, 0x03, 0x20, 0x01, 0x28, 0x05, 0x52, 0x09, 0x74, 0x68, 0x72, 0x65, 0x73, 0x68, 0x6f, 0x6c, - 0x64, 0x22, 0x92, 0x03, 0x0a, 0x08, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x12, 0x12, - 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, - 0x6d, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x74, 0x79, 0x70, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, - 0x52, 0x04, 0x74, 0x79, 0x70, 0x65, 0x12, 0x2a, 0x0a, 0x06, 0x61, 0x67, 0x65, 0x6e, 0x74, 0x73, - 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x12, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, - 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x41, 0x67, 0x65, 0x6e, 0x74, 0x52, 0x06, 0x61, 0x67, 0x65, 0x6e, - 0x74, 0x73, 0x12, 0x3a, 0x0a, 0x08, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x18, 0x04, - 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1e, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, - 0x65, 0x72, 0x2e, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x2e, 0x4d, 0x65, 0x74, 0x61, - 0x64, 0x61, 0x74, 0x61, 0x52, 0x08, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x12, 0x12, - 0x0a, 0x04, 0x68, 0x69, 0x64, 0x65, 0x18, 0x05, 0x20, 0x01, 0x28, 0x08, 0x52, 0x04, 0x68, 0x69, - 0x64, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x69, 0x63, 0x6f, 0x6e, 0x18, 0x06, 0x20, 0x01, 0x28, 0x09, - 0x52, 0x04, 0x69, 0x63, 0x6f, 0x6e, 0x12, 0x23, 0x0a, 0x0d, 0x69, 0x6e, 0x73, 0x74, 0x61, 0x6e, - 0x63, 0x65, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x18, 0x07, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0c, 0x69, - 0x6e, 0x73, 0x74, 0x61, 0x6e, 0x63, 0x65, 0x54, 0x79, 0x70, 0x65, 0x12, 0x1d, 0x0a, 0x0a, 0x64, - 0x61, 0x69, 0x6c, 0x79, 0x5f, 0x63, 0x6f, 0x73, 0x74, 0x18, 0x08, 0x20, 0x01, 0x28, 0x05, 0x52, - 0x09, 0x64, 0x61, 0x69, 0x6c, 0x79, 0x43, 0x6f, 0x73, 0x74, 0x12, 0x1f, 0x0a, 0x0b, 0x6d, 0x6f, - 0x64, 0x75, 0x6c, 0x65, 0x5f, 0x70, 0x61, 0x74, 0x68, 0x18, 0x09, 0x20, 0x01, 0x28, 0x09, 0x52, - 0x0a, 0x6d, 0x6f, 0x64, 0x75, 0x6c, 0x65, 0x50, 0x61, 0x74, 0x68, 0x1a, 0x69, 0x0a, 0x08, 0x4d, - 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, - 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, - 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x12, - 0x1c, 0x0a, 0x09, 0x73, 0x65, 0x6e, 0x73, 0x69, 0x74, 0x69, 0x76, 0x65, 0x18, 0x03, 0x20, 0x01, - 0x28, 0x08, 0x52, 0x09, 0x73, 0x65, 0x6e, 0x73, 0x69, 0x74, 0x69, 0x76, 0x65, 0x12, 0x17, 0x0a, - 0x07, 0x69, 0x73, 0x5f, 0x6e, 0x75, 0x6c, 0x6c, 0x18, 0x04, 0x20, 0x01, 0x28, 0x08, 0x52, 0x06, - 0x69, 0x73, 0x4e, 0x75, 0x6c, 0x6c, 0x22, 0x5e, 0x0a, 0x06, 0x4d, 0x6f, 0x64, 0x75, 0x6c, 0x65, - 0x12, 0x16, 0x0a, 0x06, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, - 0x52, 0x06, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x12, 0x18, 0x0a, 0x07, 0x76, 0x65, 0x72, 0x73, - 0x69, 0x6f, 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x76, 0x65, 0x72, 0x73, 0x69, - 0x6f, 0x6e, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, - 0x03, 0x6b, 0x65, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x64, 0x69, 0x72, 0x18, 0x04, 0x20, 0x01, 0x28, - 0x09, 0x52, 0x03, 0x64, 0x69, 0x72, 0x22, 0x31, 0x0a, 0x04, 0x52, 0x6f, 0x6c, 0x65, 0x12, 0x12, - 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, - 0x6d, 0x65, 0x12, 0x15, 0x0a, 0x06, 0x6f, 0x72, 0x67, 0x5f, 0x69, 0x64, 0x18, 0x02, 0x20, 0x01, - 0x28, 0x09, 0x52, 0x05, 0x6f, 0x72, 0x67, 0x49, 0x64, 0x22, 0x48, 0x0a, 0x15, 0x52, 0x75, 0x6e, - 0x6e, 0x69, 0x6e, 0x67, 0x41, 0x67, 0x65, 0x6e, 0x74, 0x41, 0x75, 0x74, 0x68, 0x54, 0x6f, 0x6b, - 0x65, 0x6e, 0x12, 0x19, 0x0a, 0x08, 0x61, 0x67, 0x65, 0x6e, 0x74, 0x5f, 0x69, 0x64, 0x18, 0x01, - 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x61, 0x67, 0x65, 0x6e, 0x74, 0x49, 0x64, 0x12, 0x14, 0x0a, - 0x05, 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x74, 0x6f, - 0x6b, 0x65, 0x6e, 0x22, 0x22, 0x0a, 0x10, 0x41, 0x49, 0x54, 0x61, 0x73, 0x6b, 0x53, 0x69, 0x64, - 0x65, 0x62, 0x61, 0x72, 0x41, 0x70, 0x70, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, - 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, 0x64, 0x22, 0x58, 0x0a, 0x06, 0x41, 0x49, 0x54, 0x61, 0x73, - 0x6b, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, - 0x64, 0x12, 0x3e, 0x0a, 0x0b, 0x73, 0x69, 0x64, 0x65, 0x62, 0x61, 0x72, 0x5f, 0x61, 0x70, 0x70, - 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1d, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, - 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x41, 0x49, 0x54, 0x61, 0x73, 0x6b, 0x53, 0x69, 0x64, 0x65, 0x62, - 0x61, 0x72, 0x41, 0x70, 0x70, 0x52, 0x0a, 0x73, 0x69, 0x64, 0x65, 0x62, 0x61, 0x72, 0x41, 0x70, - 0x70, 0x22, 0xca, 0x09, 0x0a, 0x08, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x12, 0x1b, - 0x0a, 0x09, 0x63, 0x6f, 0x64, 0x65, 0x72, 0x5f, 0x75, 0x72, 0x6c, 0x18, 0x01, 0x20, 0x01, 0x28, - 0x09, 0x52, 0x08, 0x63, 0x6f, 0x64, 0x65, 0x72, 0x55, 0x72, 0x6c, 0x12, 0x53, 0x0a, 0x14, 0x77, - 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x5f, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x69, 0x74, - 0x69, 0x6f, 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x20, 0x2e, 0x70, 0x72, 0x6f, 0x76, - 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, - 0x65, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x13, 0x77, 0x6f, 0x72, - 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x69, 0x74, 0x69, 0x6f, 0x6e, - 0x12, 0x25, 0x0a, 0x0e, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x5f, 0x6e, 0x61, - 0x6d, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0d, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, - 0x61, 0x63, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x27, 0x0a, 0x0f, 0x77, 0x6f, 0x72, 0x6b, 0x73, - 0x70, 0x61, 0x63, 0x65, 0x5f, 0x6f, 0x77, 0x6e, 0x65, 0x72, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, - 0x52, 0x0e, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x4f, 0x77, 0x6e, 0x65, 0x72, - 0x12, 0x21, 0x0a, 0x0c, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x5f, 0x69, 0x64, - 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, - 0x65, 0x49, 0x64, 0x12, 0x2c, 0x0a, 0x12, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, - 0x5f, 0x6f, 0x77, 0x6e, 0x65, 0x72, 0x5f, 0x69, 0x64, 0x18, 0x06, 0x20, 0x01, 0x28, 0x09, 0x52, - 0x10, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x4f, 0x77, 0x6e, 0x65, 0x72, 0x49, - 0x64, 0x12, 0x32, 0x0a, 0x15, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x5f, 0x6f, - 0x77, 0x6e, 0x65, 0x72, 0x5f, 0x65, 0x6d, 0x61, 0x69, 0x6c, 0x18, 0x07, 0x20, 0x01, 0x28, 0x09, - 0x52, 0x13, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x4f, 0x77, 0x6e, 0x65, 0x72, - 0x45, 0x6d, 0x61, 0x69, 0x6c, 0x12, 0x23, 0x0a, 0x0d, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, - 0x65, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x08, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0c, 0x74, 0x65, - 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x29, 0x0a, 0x10, 0x74, 0x65, - 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x5f, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x09, - 0x20, 0x01, 0x28, 0x09, 0x52, 0x0f, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x56, 0x65, - 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x48, 0x0a, 0x21, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, - 0x63, 0x65, 0x5f, 0x6f, 0x77, 0x6e, 0x65, 0x72, 0x5f, 0x6f, 0x69, 0x64, 0x63, 0x5f, 0x61, 0x63, - 0x63, 0x65, 0x73, 0x73, 0x5f, 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x18, 0x0a, 0x20, 0x01, 0x28, 0x09, - 0x52, 0x1d, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x4f, 0x77, 0x6e, 0x65, 0x72, - 0x4f, 0x69, 0x64, 0x63, 0x41, 0x63, 0x63, 0x65, 0x73, 0x73, 0x54, 0x6f, 0x6b, 0x65, 0x6e, 0x12, - 0x41, 0x0a, 0x1d, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x5f, 0x6f, 0x77, 0x6e, - 0x65, 0x72, 0x5f, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x5f, 0x74, 0x6f, 0x6b, 0x65, 0x6e, - 0x18, 0x0b, 0x20, 0x01, 0x28, 0x09, 0x52, 0x1a, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, - 0x65, 0x4f, 0x77, 0x6e, 0x65, 0x72, 0x53, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x54, 0x6f, 0x6b, - 0x65, 0x6e, 0x12, 0x1f, 0x0a, 0x0b, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x5f, 0x69, - 0x64, 0x18, 0x0c, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, - 0x65, 0x49, 0x64, 0x12, 0x30, 0x0a, 0x14, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, - 0x5f, 0x6f, 0x77, 0x6e, 0x65, 0x72, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x0d, 0x20, 0x01, 0x28, - 0x09, 0x52, 0x12, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x4f, 0x77, 0x6e, 0x65, - 0x72, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x34, 0x0a, 0x16, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, - 0x63, 0x65, 0x5f, 0x6f, 0x77, 0x6e, 0x65, 0x72, 0x5f, 0x67, 0x72, 0x6f, 0x75, 0x70, 0x73, 0x18, - 0x0e, 0x20, 0x03, 0x28, 0x09, 0x52, 0x14, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, - 0x4f, 0x77, 0x6e, 0x65, 0x72, 0x47, 0x72, 0x6f, 0x75, 0x70, 0x73, 0x12, 0x42, 0x0a, 0x1e, 0x77, - 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x5f, 0x6f, 0x77, 0x6e, 0x65, 0x72, 0x5f, 0x73, - 0x73, 0x68, 0x5f, 0x70, 0x75, 0x62, 0x6c, 0x69, 0x63, 0x5f, 0x6b, 0x65, 0x79, 0x18, 0x0f, 0x20, - 0x01, 0x28, 0x09, 0x52, 0x1a, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x4f, 0x77, - 0x6e, 0x65, 0x72, 0x53, 0x73, 0x68, 0x50, 0x75, 0x62, 0x6c, 0x69, 0x63, 0x4b, 0x65, 0x79, 0x12, - 0x44, 0x0a, 0x1f, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x5f, 0x6f, 0x77, 0x6e, - 0x65, 0x72, 0x5f, 0x73, 0x73, 0x68, 0x5f, 0x70, 0x72, 0x69, 0x76, 0x61, 0x74, 0x65, 0x5f, 0x6b, - 0x65, 0x79, 0x18, 0x10, 0x20, 0x01, 0x28, 0x09, 0x52, 0x1b, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, - 0x61, 0x63, 0x65, 0x4f, 0x77, 0x6e, 0x65, 0x72, 0x53, 0x73, 0x68, 0x50, 0x72, 0x69, 0x76, 0x61, - 0x74, 0x65, 0x4b, 0x65, 0x79, 0x12, 0x2c, 0x0a, 0x12, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, - 0x63, 0x65, 0x5f, 0x62, 0x75, 0x69, 0x6c, 0x64, 0x5f, 0x69, 0x64, 0x18, 0x11, 0x20, 0x01, 0x28, - 0x09, 0x52, 0x10, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x42, 0x75, 0x69, 0x6c, - 0x64, 0x49, 0x64, 0x12, 0x3b, 0x0a, 0x1a, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, - 0x5f, 0x6f, 0x77, 0x6e, 0x65, 0x72, 0x5f, 0x6c, 0x6f, 0x67, 0x69, 0x6e, 0x5f, 0x74, 0x79, 0x70, - 0x65, 0x18, 0x12, 0x20, 0x01, 0x28, 0x09, 0x52, 0x17, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, - 0x63, 0x65, 0x4f, 0x77, 0x6e, 0x65, 0x72, 0x4c, 0x6f, 0x67, 0x69, 0x6e, 0x54, 0x79, 0x70, 0x65, - 0x12, 0x4e, 0x0a, 0x1a, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x5f, 0x6f, 0x77, - 0x6e, 0x65, 0x72, 0x5f, 0x72, 0x62, 0x61, 0x63, 0x5f, 0x72, 0x6f, 0x6c, 0x65, 0x73, 0x18, 0x13, - 0x20, 0x03, 0x28, 0x0b, 0x32, 0x11, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, - 0x65, 0x72, 0x2e, 0x52, 0x6f, 0x6c, 0x65, 0x52, 0x17, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, - 0x63, 0x65, 0x4f, 0x77, 0x6e, 0x65, 0x72, 0x52, 0x62, 0x61, 0x63, 0x52, 0x6f, 0x6c, 0x65, 0x73, - 0x12, 0x6d, 0x0a, 0x1e, 0x70, 0x72, 0x65, 0x62, 0x75, 0x69, 0x6c, 0x74, 0x5f, 0x77, 0x6f, 0x72, - 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x5f, 0x62, 0x75, 0x69, 0x6c, 0x64, 0x5f, 0x73, 0x74, 0x61, - 0x67, 0x65, 0x18, 0x14, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x28, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, - 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x50, 0x72, 0x65, 0x62, 0x75, 0x69, 0x6c, 0x74, 0x57, - 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x42, 0x75, 0x69, 0x6c, 0x64, 0x53, 0x74, 0x61, - 0x67, 0x65, 0x52, 0x1b, 0x70, 0x72, 0x65, 0x62, 0x75, 0x69, 0x6c, 0x74, 0x57, 0x6f, 0x72, 0x6b, - 0x73, 0x70, 0x61, 0x63, 0x65, 0x42, 0x75, 0x69, 0x6c, 0x64, 0x53, 0x74, 0x61, 0x67, 0x65, 0x12, - 0x5d, 0x0a, 0x19, 0x72, 0x75, 0x6e, 0x6e, 0x69, 0x6e, 0x67, 0x5f, 0x61, 0x67, 0x65, 0x6e, 0x74, - 0x5f, 0x61, 0x75, 0x74, 0x68, 0x5f, 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x73, 0x18, 0x15, 0x20, 0x03, - 0x28, 0x0b, 0x32, 0x22, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, - 0x2e, 0x52, 0x75, 0x6e, 0x6e, 0x69, 0x6e, 0x67, 0x41, 0x67, 0x65, 0x6e, 0x74, 0x41, 0x75, 0x74, - 0x68, 0x54, 0x6f, 0x6b, 0x65, 0x6e, 0x52, 0x16, 0x72, 0x75, 0x6e, 0x6e, 0x69, 0x6e, 0x67, 0x41, - 0x67, 0x65, 0x6e, 0x74, 0x41, 0x75, 0x74, 0x68, 0x54, 0x6f, 0x6b, 0x65, 0x6e, 0x73, 0x22, 0x8a, - 0x01, 0x0a, 0x06, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x12, 0x36, 0x0a, 0x17, 0x74, 0x65, 0x6d, - 0x70, 0x6c, 0x61, 0x74, 0x65, 0x5f, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x61, 0x72, 0x63, - 0x68, 0x69, 0x76, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x15, 0x74, 0x65, 0x6d, 0x70, - 0x6c, 0x61, 0x74, 0x65, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x41, 0x72, 0x63, 0x68, 0x69, 0x76, - 0x65, 0x12, 0x14, 0x0a, 0x05, 0x73, 0x74, 0x61, 0x74, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, - 0x52, 0x05, 0x73, 0x74, 0x61, 0x74, 0x65, 0x12, 0x32, 0x0a, 0x15, 0x70, 0x72, 0x6f, 0x76, 0x69, - 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x5f, 0x6c, 0x6f, 0x67, 0x5f, 0x6c, 0x65, 0x76, 0x65, 0x6c, - 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x13, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, - 0x6e, 0x65, 0x72, 0x4c, 0x6f, 0x67, 0x4c, 0x65, 0x76, 0x65, 0x6c, 0x22, 0x0e, 0x0a, 0x0c, 0x50, - 0x61, 0x72, 0x73, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x22, 0xa3, 0x02, 0x0a, 0x0d, - 0x50, 0x61, 0x72, 0x73, 0x65, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x65, 0x12, 0x14, 0x0a, - 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x65, 0x72, - 0x72, 0x6f, 0x72, 0x12, 0x4c, 0x0a, 0x12, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x5f, - 0x76, 0x61, 0x72, 0x69, 0x61, 0x62, 0x6c, 0x65, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, - 0x1d, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x54, 0x65, - 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x56, 0x61, 0x72, 0x69, 0x61, 0x62, 0x6c, 0x65, 0x52, 0x11, - 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x56, 0x61, 0x72, 0x69, 0x61, 0x62, 0x6c, 0x65, - 0x73, 0x12, 0x16, 0x0a, 0x06, 0x72, 0x65, 0x61, 0x64, 0x6d, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, - 0x0c, 0x52, 0x06, 0x72, 0x65, 0x61, 0x64, 0x6d, 0x65, 0x12, 0x54, 0x0a, 0x0e, 0x77, 0x6f, 0x72, - 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x5f, 0x74, 0x61, 0x67, 0x73, 0x18, 0x04, 0x20, 0x03, 0x28, - 0x0b, 0x32, 0x2d, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, - 0x50, 0x61, 0x72, 0x73, 0x65, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x65, 0x2e, 0x57, 0x6f, - 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x54, 0x61, 0x67, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, - 0x52, 0x0d, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x54, 0x61, 0x67, 0x73, 0x1a, - 0x40, 0x0a, 0x12, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x54, 0x61, 0x67, 0x73, - 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, - 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, - 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, - 0x01, 0x22, 0xbe, 0x03, 0x0a, 0x0b, 0x50, 0x6c, 0x61, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, - 0x74, 0x12, 0x31, 0x0a, 0x08, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x18, 0x01, 0x20, - 0x01, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, - 0x72, 0x2e, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x52, 0x08, 0x6d, 0x65, 0x74, 0x61, - 0x64, 0x61, 0x74, 0x61, 0x12, 0x53, 0x0a, 0x15, 0x72, 0x69, 0x63, 0x68, 0x5f, 0x70, 0x61, 0x72, - 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x5f, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x18, 0x02, 0x20, - 0x03, 0x28, 0x0b, 0x32, 0x1f, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, - 0x72, 0x2e, 0x52, 0x69, 0x63, 0x68, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x56, - 0x61, 0x6c, 0x75, 0x65, 0x52, 0x13, 0x72, 0x69, 0x63, 0x68, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, - 0x74, 0x65, 0x72, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x12, 0x43, 0x0a, 0x0f, 0x76, 0x61, 0x72, - 0x69, 0x61, 0x62, 0x6c, 0x65, 0x5f, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x18, 0x03, 0x20, 0x03, - 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, - 0x2e, 0x56, 0x61, 0x72, 0x69, 0x61, 0x62, 0x6c, 0x65, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x0e, - 0x76, 0x61, 0x72, 0x69, 0x61, 0x62, 0x6c, 0x65, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x12, 0x59, + 0x20, 0x01, 0x28, 0x08, 0x52, 0x07, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x12, 0x20, 0x0a, + 0x0b, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x05, 0x20, 0x01, + 0x28, 0x09, 0x52, 0x0b, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x12, + 0x12, 0x0a, 0x04, 0x69, 0x63, 0x6f, 0x6e, 0x18, 0x06, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x69, + 0x63, 0x6f, 0x6e, 0x22, 0x3b, 0x0a, 0x0f, 0x50, 0x72, 0x65, 0x73, 0x65, 0x74, 0x50, 0x61, 0x72, + 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, + 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, + 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, + 0x22, 0x47, 0x0a, 0x13, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x52, 0x65, 0x70, 0x6c, + 0x61, 0x63, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x12, 0x1a, 0x0a, 0x08, 0x72, 0x65, 0x73, 0x6f, 0x75, + 0x72, 0x63, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x72, 0x65, 0x73, 0x6f, 0x75, + 0x72, 0x63, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x70, 0x61, 0x74, 0x68, 0x73, 0x18, 0x02, 0x20, 0x03, + 0x28, 0x09, 0x52, 0x05, 0x70, 0x61, 0x74, 0x68, 0x73, 0x22, 0x57, 0x0a, 0x0d, 0x56, 0x61, 0x72, + 0x69, 0x61, 0x62, 0x6c, 0x65, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, + 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x14, + 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x76, + 0x61, 0x6c, 0x75, 0x65, 0x12, 0x1c, 0x0a, 0x09, 0x73, 0x65, 0x6e, 0x73, 0x69, 0x74, 0x69, 0x76, + 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x08, 0x52, 0x09, 0x73, 0x65, 0x6e, 0x73, 0x69, 0x74, 0x69, + 0x76, 0x65, 0x22, 0x4a, 0x0a, 0x03, 0x4c, 0x6f, 0x67, 0x12, 0x2b, 0x0a, 0x05, 0x6c, 0x65, 0x76, + 0x65, 0x6c, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x15, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, + 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x4c, 0x6f, 0x67, 0x4c, 0x65, 0x76, 0x65, 0x6c, 0x52, + 0x05, 0x6c, 0x65, 0x76, 0x65, 0x6c, 0x12, 0x16, 0x0a, 0x06, 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, + 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x22, 0x37, + 0x0a, 0x14, 0x49, 0x6e, 0x73, 0x74, 0x61, 0x6e, 0x63, 0x65, 0x49, 0x64, 0x65, 0x6e, 0x74, 0x69, + 0x74, 0x79, 0x41, 0x75, 0x74, 0x68, 0x12, 0x1f, 0x0a, 0x0b, 0x69, 0x6e, 0x73, 0x74, 0x61, 0x6e, + 0x63, 0x65, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, 0x69, 0x6e, 0x73, + 0x74, 0x61, 0x6e, 0x63, 0x65, 0x49, 0x64, 0x22, 0x4a, 0x0a, 0x1c, 0x45, 0x78, 0x74, 0x65, 0x72, + 0x6e, 0x61, 0x6c, 0x41, 0x75, 0x74, 0x68, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x52, + 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, + 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, 0x64, 0x12, 0x1a, 0x0a, 0x08, 0x6f, 0x70, 0x74, 0x69, 0x6f, + 0x6e, 0x61, 0x6c, 0x18, 0x02, 0x20, 0x01, 0x28, 0x08, 0x52, 0x08, 0x6f, 0x70, 0x74, 0x69, 0x6f, + 0x6e, 0x61, 0x6c, 0x22, 0x49, 0x0a, 0x14, 0x45, 0x78, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x41, + 0x75, 0x74, 0x68, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x12, 0x0e, 0x0a, 0x02, 0x69, + 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, 0x64, 0x12, 0x21, 0x0a, 0x0c, 0x61, + 0x63, 0x63, 0x65, 0x73, 0x73, 0x5f, 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, + 0x09, 0x52, 0x0b, 0x61, 0x63, 0x63, 0x65, 0x73, 0x73, 0x54, 0x6f, 0x6b, 0x65, 0x6e, 0x22, 0xda, + 0x08, 0x0a, 0x05, 0x41, 0x67, 0x65, 0x6e, 0x74, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, + 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, 0x64, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, + 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x2d, 0x0a, 0x03, + 0x65, 0x6e, 0x76, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1b, 0x2e, 0x70, 0x72, 0x6f, 0x76, + 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x41, 0x67, 0x65, 0x6e, 0x74, 0x2e, 0x45, 0x6e, + 0x76, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x03, 0x65, 0x6e, 0x76, 0x12, 0x29, 0x0a, 0x10, 0x6f, + 0x70, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6e, 0x67, 0x5f, 0x73, 0x79, 0x73, 0x74, 0x65, 0x6d, 0x18, + 0x05, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0f, 0x6f, 0x70, 0x65, 0x72, 0x61, 0x74, 0x69, 0x6e, 0x67, + 0x53, 0x79, 0x73, 0x74, 0x65, 0x6d, 0x12, 0x22, 0x0a, 0x0c, 0x61, 0x72, 0x63, 0x68, 0x69, 0x74, + 0x65, 0x63, 0x74, 0x75, 0x72, 0x65, 0x18, 0x06, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0c, 0x61, 0x72, + 0x63, 0x68, 0x69, 0x74, 0x65, 0x63, 0x74, 0x75, 0x72, 0x65, 0x12, 0x1c, 0x0a, 0x09, 0x64, 0x69, + 0x72, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x79, 0x18, 0x07, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x64, + 0x69, 0x72, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x79, 0x12, 0x24, 0x0a, 0x04, 0x61, 0x70, 0x70, 0x73, + 0x18, 0x08, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x10, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, + 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x41, 0x70, 0x70, 0x52, 0x04, 0x61, 0x70, 0x70, 0x73, 0x12, 0x16, + 0x0a, 0x05, 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x18, 0x09, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, + 0x05, 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x12, 0x21, 0x0a, 0x0b, 0x69, 0x6e, 0x73, 0x74, 0x61, 0x6e, + 0x63, 0x65, 0x5f, 0x69, 0x64, 0x18, 0x0a, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x0a, 0x69, + 0x6e, 0x73, 0x74, 0x61, 0x6e, 0x63, 0x65, 0x49, 0x64, 0x12, 0x3c, 0x0a, 0x1a, 0x63, 0x6f, 0x6e, + 0x6e, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x74, 0x69, 0x6d, 0x65, 0x6f, 0x75, 0x74, 0x5f, + 0x73, 0x65, 0x63, 0x6f, 0x6e, 0x64, 0x73, 0x18, 0x0b, 0x20, 0x01, 0x28, 0x05, 0x52, 0x18, 0x63, + 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x54, 0x69, 0x6d, 0x65, 0x6f, 0x75, 0x74, + 0x53, 0x65, 0x63, 0x6f, 0x6e, 0x64, 0x73, 0x12, 0x2f, 0x0a, 0x13, 0x74, 0x72, 0x6f, 0x75, 0x62, + 0x6c, 0x65, 0x73, 0x68, 0x6f, 0x6f, 0x74, 0x69, 0x6e, 0x67, 0x5f, 0x75, 0x72, 0x6c, 0x18, 0x0c, + 0x20, 0x01, 0x28, 0x09, 0x52, 0x12, 0x74, 0x72, 0x6f, 0x75, 0x62, 0x6c, 0x65, 0x73, 0x68, 0x6f, + 0x6f, 0x74, 0x69, 0x6e, 0x67, 0x55, 0x72, 0x6c, 0x12, 0x1b, 0x0a, 0x09, 0x6d, 0x6f, 0x74, 0x64, + 0x5f, 0x66, 0x69, 0x6c, 0x65, 0x18, 0x0d, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x6d, 0x6f, 0x74, + 0x64, 0x46, 0x69, 0x6c, 0x65, 0x12, 0x37, 0x0a, 0x08, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, + 0x61, 0x18, 0x12, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1b, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, + 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x41, 0x67, 0x65, 0x6e, 0x74, 0x2e, 0x4d, 0x65, 0x74, 0x61, + 0x64, 0x61, 0x74, 0x61, 0x52, 0x08, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x12, 0x3b, + 0x0a, 0x0c, 0x64, 0x69, 0x73, 0x70, 0x6c, 0x61, 0x79, 0x5f, 0x61, 0x70, 0x70, 0x73, 0x18, 0x14, + 0x20, 0x01, 0x28, 0x0b, 0x32, 0x18, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, + 0x65, 0x72, 0x2e, 0x44, 0x69, 0x73, 0x70, 0x6c, 0x61, 0x79, 0x41, 0x70, 0x70, 0x73, 0x52, 0x0b, + 0x64, 0x69, 0x73, 0x70, 0x6c, 0x61, 0x79, 0x41, 0x70, 0x70, 0x73, 0x12, 0x2d, 0x0a, 0x07, 0x73, + 0x63, 0x72, 0x69, 0x70, 0x74, 0x73, 0x18, 0x15, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x13, 0x2e, 0x70, + 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x53, 0x63, 0x72, 0x69, 0x70, + 0x74, 0x52, 0x07, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x73, 0x12, 0x2f, 0x0a, 0x0a, 0x65, 0x78, + 0x74, 0x72, 0x61, 0x5f, 0x65, 0x6e, 0x76, 0x73, 0x18, 0x16, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x10, + 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x45, 0x6e, 0x76, + 0x52, 0x09, 0x65, 0x78, 0x74, 0x72, 0x61, 0x45, 0x6e, 0x76, 0x73, 0x12, 0x14, 0x0a, 0x05, 0x6f, + 0x72, 0x64, 0x65, 0x72, 0x18, 0x17, 0x20, 0x01, 0x28, 0x03, 0x52, 0x05, 0x6f, 0x72, 0x64, 0x65, + 0x72, 0x12, 0x53, 0x0a, 0x14, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x5f, 0x6d, + 0x6f, 0x6e, 0x69, 0x74, 0x6f, 0x72, 0x69, 0x6e, 0x67, 0x18, 0x18, 0x20, 0x01, 0x28, 0x0b, 0x32, + 0x20, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x52, 0x65, + 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x4d, 0x6f, 0x6e, 0x69, 0x74, 0x6f, 0x72, 0x69, 0x6e, + 0x67, 0x52, 0x13, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x4d, 0x6f, 0x6e, 0x69, + 0x74, 0x6f, 0x72, 0x69, 0x6e, 0x67, 0x12, 0x3f, 0x0a, 0x0d, 0x64, 0x65, 0x76, 0x63, 0x6f, 0x6e, + 0x74, 0x61, 0x69, 0x6e, 0x65, 0x72, 0x73, 0x18, 0x19, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x19, 0x2e, + 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x44, 0x65, 0x76, 0x63, + 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65, 0x72, 0x52, 0x0d, 0x64, 0x65, 0x76, 0x63, 0x6f, 0x6e, + 0x74, 0x61, 0x69, 0x6e, 0x65, 0x72, 0x73, 0x12, 0x22, 0x0a, 0x0d, 0x61, 0x70, 0x69, 0x5f, 0x6b, + 0x65, 0x79, 0x5f, 0x73, 0x63, 0x6f, 0x70, 0x65, 0x18, 0x1a, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, + 0x61, 0x70, 0x69, 0x4b, 0x65, 0x79, 0x53, 0x63, 0x6f, 0x70, 0x65, 0x1a, 0xa3, 0x01, 0x0a, 0x08, + 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, + 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x21, 0x0a, 0x0c, 0x64, 0x69, + 0x73, 0x70, 0x6c, 0x61, 0x79, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, + 0x52, 0x0b, 0x64, 0x69, 0x73, 0x70, 0x6c, 0x61, 0x79, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x16, 0x0a, + 0x06, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x73, + 0x63, 0x72, 0x69, 0x70, 0x74, 0x12, 0x1a, 0x0a, 0x08, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x76, 0x61, + 0x6c, 0x18, 0x04, 0x20, 0x01, 0x28, 0x03, 0x52, 0x08, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x76, 0x61, + 0x6c, 0x12, 0x18, 0x0a, 0x07, 0x74, 0x69, 0x6d, 0x65, 0x6f, 0x75, 0x74, 0x18, 0x05, 0x20, 0x01, + 0x28, 0x03, 0x52, 0x07, 0x74, 0x69, 0x6d, 0x65, 0x6f, 0x75, 0x74, 0x12, 0x14, 0x0a, 0x05, 0x6f, + 0x72, 0x64, 0x65, 0x72, 0x18, 0x06, 0x20, 0x01, 0x28, 0x03, 0x52, 0x05, 0x6f, 0x72, 0x64, 0x65, + 0x72, 0x1a, 0x36, 0x0a, 0x08, 0x45, 0x6e, 0x76, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, + 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, + 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, + 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x42, 0x06, 0x0a, 0x04, 0x61, 0x75, 0x74, + 0x68, 0x4a, 0x04, 0x08, 0x0e, 0x10, 0x0f, 0x52, 0x12, 0x6c, 0x6f, 0x67, 0x69, 0x6e, 0x5f, 0x62, + 0x65, 0x66, 0x6f, 0x72, 0x65, 0x5f, 0x72, 0x65, 0x61, 0x64, 0x79, 0x22, 0x8f, 0x01, 0x0a, 0x13, + 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x4d, 0x6f, 0x6e, 0x69, 0x74, 0x6f, 0x72, + 0x69, 0x6e, 0x67, 0x12, 0x3a, 0x0a, 0x06, 0x6d, 0x65, 0x6d, 0x6f, 0x72, 0x79, 0x18, 0x01, 0x20, + 0x01, 0x28, 0x0b, 0x32, 0x22, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, + 0x72, 0x2e, 0x4d, 0x65, 0x6d, 0x6f, 0x72, 0x79, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, + 0x4d, 0x6f, 0x6e, 0x69, 0x74, 0x6f, 0x72, 0x52, 0x06, 0x6d, 0x65, 0x6d, 0x6f, 0x72, 0x79, 0x12, + 0x3c, 0x0a, 0x07, 0x76, 0x6f, 0x6c, 0x75, 0x6d, 0x65, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, + 0x32, 0x22, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x56, + 0x6f, 0x6c, 0x75, 0x6d, 0x65, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x4d, 0x6f, 0x6e, + 0x69, 0x74, 0x6f, 0x72, 0x52, 0x07, 0x76, 0x6f, 0x6c, 0x75, 0x6d, 0x65, 0x73, 0x22, 0x4f, 0x0a, + 0x15, 0x4d, 0x65, 0x6d, 0x6f, 0x72, 0x79, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x4d, + 0x6f, 0x6e, 0x69, 0x74, 0x6f, 0x72, 0x12, 0x18, 0x0a, 0x07, 0x65, 0x6e, 0x61, 0x62, 0x6c, 0x65, + 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x08, 0x52, 0x07, 0x65, 0x6e, 0x61, 0x62, 0x6c, 0x65, 0x64, + 0x12, 0x1c, 0x0a, 0x09, 0x74, 0x68, 0x72, 0x65, 0x73, 0x68, 0x6f, 0x6c, 0x64, 0x18, 0x02, 0x20, + 0x01, 0x28, 0x05, 0x52, 0x09, 0x74, 0x68, 0x72, 0x65, 0x73, 0x68, 0x6f, 0x6c, 0x64, 0x22, 0x63, + 0x0a, 0x15, 0x56, 0x6f, 0x6c, 0x75, 0x6d, 0x65, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, + 0x4d, 0x6f, 0x6e, 0x69, 0x74, 0x6f, 0x72, 0x12, 0x12, 0x0a, 0x04, 0x70, 0x61, 0x74, 0x68, 0x18, + 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x70, 0x61, 0x74, 0x68, 0x12, 0x18, 0x0a, 0x07, 0x65, + 0x6e, 0x61, 0x62, 0x6c, 0x65, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x08, 0x52, 0x07, 0x65, 0x6e, + 0x61, 0x62, 0x6c, 0x65, 0x64, 0x12, 0x1c, 0x0a, 0x09, 0x74, 0x68, 0x72, 0x65, 0x73, 0x68, 0x6f, + 0x6c, 0x64, 0x18, 0x03, 0x20, 0x01, 0x28, 0x05, 0x52, 0x09, 0x74, 0x68, 0x72, 0x65, 0x73, 0x68, + 0x6f, 0x6c, 0x64, 0x22, 0xc6, 0x01, 0x0a, 0x0b, 0x44, 0x69, 0x73, 0x70, 0x6c, 0x61, 0x79, 0x41, + 0x70, 0x70, 0x73, 0x12, 0x16, 0x0a, 0x06, 0x76, 0x73, 0x63, 0x6f, 0x64, 0x65, 0x18, 0x01, 0x20, + 0x01, 0x28, 0x08, 0x52, 0x06, 0x76, 0x73, 0x63, 0x6f, 0x64, 0x65, 0x12, 0x27, 0x0a, 0x0f, 0x76, + 0x73, 0x63, 0x6f, 0x64, 0x65, 0x5f, 0x69, 0x6e, 0x73, 0x69, 0x64, 0x65, 0x72, 0x73, 0x18, 0x02, + 0x20, 0x01, 0x28, 0x08, 0x52, 0x0e, 0x76, 0x73, 0x63, 0x6f, 0x64, 0x65, 0x49, 0x6e, 0x73, 0x69, + 0x64, 0x65, 0x72, 0x73, 0x12, 0x21, 0x0a, 0x0c, 0x77, 0x65, 0x62, 0x5f, 0x74, 0x65, 0x72, 0x6d, + 0x69, 0x6e, 0x61, 0x6c, 0x18, 0x03, 0x20, 0x01, 0x28, 0x08, 0x52, 0x0b, 0x77, 0x65, 0x62, 0x54, + 0x65, 0x72, 0x6d, 0x69, 0x6e, 0x61, 0x6c, 0x12, 0x1d, 0x0a, 0x0a, 0x73, 0x73, 0x68, 0x5f, 0x68, + 0x65, 0x6c, 0x70, 0x65, 0x72, 0x18, 0x04, 0x20, 0x01, 0x28, 0x08, 0x52, 0x09, 0x73, 0x73, 0x68, + 0x48, 0x65, 0x6c, 0x70, 0x65, 0x72, 0x12, 0x34, 0x0a, 0x16, 0x70, 0x6f, 0x72, 0x74, 0x5f, 0x66, + 0x6f, 0x72, 0x77, 0x61, 0x72, 0x64, 0x69, 0x6e, 0x67, 0x5f, 0x68, 0x65, 0x6c, 0x70, 0x65, 0x72, + 0x18, 0x05, 0x20, 0x01, 0x28, 0x08, 0x52, 0x14, 0x70, 0x6f, 0x72, 0x74, 0x46, 0x6f, 0x72, 0x77, + 0x61, 0x72, 0x64, 0x69, 0x6e, 0x67, 0x48, 0x65, 0x6c, 0x70, 0x65, 0x72, 0x22, 0x2f, 0x0a, 0x03, + 0x45, 0x6e, 0x76, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, + 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, + 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x22, 0x9f, 0x02, + 0x0a, 0x06, 0x53, 0x63, 0x72, 0x69, 0x70, 0x74, 0x12, 0x21, 0x0a, 0x0c, 0x64, 0x69, 0x73, 0x70, + 0x6c, 0x61, 0x79, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, + 0x64, 0x69, 0x73, 0x70, 0x6c, 0x61, 0x79, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x69, + 0x63, 0x6f, 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x69, 0x63, 0x6f, 0x6e, 0x12, + 0x16, 0x0a, 0x06, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, + 0x06, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x12, 0x12, 0x0a, 0x04, 0x63, 0x72, 0x6f, 0x6e, 0x18, + 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x63, 0x72, 0x6f, 0x6e, 0x12, 0x2c, 0x0a, 0x12, 0x73, + 0x74, 0x61, 0x72, 0x74, 0x5f, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x73, 0x5f, 0x6c, 0x6f, 0x67, 0x69, + 0x6e, 0x18, 0x05, 0x20, 0x01, 0x28, 0x08, 0x52, 0x10, 0x73, 0x74, 0x61, 0x72, 0x74, 0x42, 0x6c, + 0x6f, 0x63, 0x6b, 0x73, 0x4c, 0x6f, 0x67, 0x69, 0x6e, 0x12, 0x20, 0x0a, 0x0c, 0x72, 0x75, 0x6e, + 0x5f, 0x6f, 0x6e, 0x5f, 0x73, 0x74, 0x61, 0x72, 0x74, 0x18, 0x06, 0x20, 0x01, 0x28, 0x08, 0x52, + 0x0a, 0x72, 0x75, 0x6e, 0x4f, 0x6e, 0x53, 0x74, 0x61, 0x72, 0x74, 0x12, 0x1e, 0x0a, 0x0b, 0x72, + 0x75, 0x6e, 0x5f, 0x6f, 0x6e, 0x5f, 0x73, 0x74, 0x6f, 0x70, 0x18, 0x07, 0x20, 0x01, 0x28, 0x08, + 0x52, 0x09, 0x72, 0x75, 0x6e, 0x4f, 0x6e, 0x53, 0x74, 0x6f, 0x70, 0x12, 0x27, 0x0a, 0x0f, 0x74, + 0x69, 0x6d, 0x65, 0x6f, 0x75, 0x74, 0x5f, 0x73, 0x65, 0x63, 0x6f, 0x6e, 0x64, 0x73, 0x18, 0x08, + 0x20, 0x01, 0x28, 0x05, 0x52, 0x0e, 0x74, 0x69, 0x6d, 0x65, 0x6f, 0x75, 0x74, 0x53, 0x65, 0x63, + 0x6f, 0x6e, 0x64, 0x73, 0x12, 0x19, 0x0a, 0x08, 0x6c, 0x6f, 0x67, 0x5f, 0x70, 0x61, 0x74, 0x68, + 0x18, 0x09, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x6c, 0x6f, 0x67, 0x50, 0x61, 0x74, 0x68, 0x22, + 0x6e, 0x0a, 0x0c, 0x44, 0x65, 0x76, 0x63, 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65, 0x72, 0x12, + 0x29, 0x0a, 0x10, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x5f, 0x66, 0x6f, 0x6c, + 0x64, 0x65, 0x72, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0f, 0x77, 0x6f, 0x72, 0x6b, 0x73, + 0x70, 0x61, 0x63, 0x65, 0x46, 0x6f, 0x6c, 0x64, 0x65, 0x72, 0x12, 0x1f, 0x0a, 0x0b, 0x63, 0x6f, + 0x6e, 0x66, 0x69, 0x67, 0x5f, 0x70, 0x61, 0x74, 0x68, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, + 0x0a, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x50, 0x61, 0x74, 0x68, 0x12, 0x12, 0x0a, 0x04, 0x6e, + 0x61, 0x6d, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x22, + 0xba, 0x03, 0x0a, 0x03, 0x41, 0x70, 0x70, 0x12, 0x12, 0x0a, 0x04, 0x73, 0x6c, 0x75, 0x67, 0x18, + 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x73, 0x6c, 0x75, 0x67, 0x12, 0x21, 0x0a, 0x0c, 0x64, + 0x69, 0x73, 0x70, 0x6c, 0x61, 0x79, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, + 0x09, 0x52, 0x0b, 0x64, 0x69, 0x73, 0x70, 0x6c, 0x61, 0x79, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x18, + 0x0a, 0x07, 0x63, 0x6f, 0x6d, 0x6d, 0x61, 0x6e, 0x64, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, + 0x07, 0x63, 0x6f, 0x6d, 0x6d, 0x61, 0x6e, 0x64, 0x12, 0x10, 0x0a, 0x03, 0x75, 0x72, 0x6c, 0x18, + 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x75, 0x72, 0x6c, 0x12, 0x12, 0x0a, 0x04, 0x69, 0x63, + 0x6f, 0x6e, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x69, 0x63, 0x6f, 0x6e, 0x12, 0x1c, + 0x0a, 0x09, 0x73, 0x75, 0x62, 0x64, 0x6f, 0x6d, 0x61, 0x69, 0x6e, 0x18, 0x06, 0x20, 0x01, 0x28, + 0x08, 0x52, 0x09, 0x73, 0x75, 0x62, 0x64, 0x6f, 0x6d, 0x61, 0x69, 0x6e, 0x12, 0x3a, 0x0a, 0x0b, + 0x68, 0x65, 0x61, 0x6c, 0x74, 0x68, 0x63, 0x68, 0x65, 0x63, 0x6b, 0x18, 0x07, 0x20, 0x01, 0x28, + 0x0b, 0x32, 0x18, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, + 0x48, 0x65, 0x61, 0x6c, 0x74, 0x68, 0x63, 0x68, 0x65, 0x63, 0x6b, 0x52, 0x0b, 0x68, 0x65, 0x61, + 0x6c, 0x74, 0x68, 0x63, 0x68, 0x65, 0x63, 0x6b, 0x12, 0x41, 0x0a, 0x0d, 0x73, 0x68, 0x61, 0x72, + 0x69, 0x6e, 0x67, 0x5f, 0x6c, 0x65, 0x76, 0x65, 0x6c, 0x18, 0x08, 0x20, 0x01, 0x28, 0x0e, 0x32, + 0x1c, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x41, 0x70, + 0x70, 0x53, 0x68, 0x61, 0x72, 0x69, 0x6e, 0x67, 0x4c, 0x65, 0x76, 0x65, 0x6c, 0x52, 0x0c, 0x73, + 0x68, 0x61, 0x72, 0x69, 0x6e, 0x67, 0x4c, 0x65, 0x76, 0x65, 0x6c, 0x12, 0x1a, 0x0a, 0x08, 0x65, + 0x78, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x18, 0x09, 0x20, 0x01, 0x28, 0x08, 0x52, 0x08, 0x65, + 0x78, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x12, 0x14, 0x0a, 0x05, 0x6f, 0x72, 0x64, 0x65, 0x72, + 0x18, 0x0a, 0x20, 0x01, 0x28, 0x03, 0x52, 0x05, 0x6f, 0x72, 0x64, 0x65, 0x72, 0x12, 0x16, 0x0a, + 0x06, 0x68, 0x69, 0x64, 0x64, 0x65, 0x6e, 0x18, 0x0b, 0x20, 0x01, 0x28, 0x08, 0x52, 0x06, 0x68, + 0x69, 0x64, 0x64, 0x65, 0x6e, 0x12, 0x2f, 0x0a, 0x07, 0x6f, 0x70, 0x65, 0x6e, 0x5f, 0x69, 0x6e, + 0x18, 0x0c, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x16, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, + 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x41, 0x70, 0x70, 0x4f, 0x70, 0x65, 0x6e, 0x49, 0x6e, 0x52, 0x06, + 0x6f, 0x70, 0x65, 0x6e, 0x49, 0x6e, 0x12, 0x14, 0x0a, 0x05, 0x67, 0x72, 0x6f, 0x75, 0x70, 0x18, + 0x0d, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x67, 0x72, 0x6f, 0x75, 0x70, 0x12, 0x0e, 0x0a, 0x02, + 0x69, 0x64, 0x18, 0x0e, 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, 0x64, 0x22, 0x59, 0x0a, 0x0b, + 0x48, 0x65, 0x61, 0x6c, 0x74, 0x68, 0x63, 0x68, 0x65, 0x63, 0x6b, 0x12, 0x10, 0x0a, 0x03, 0x75, + 0x72, 0x6c, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x75, 0x72, 0x6c, 0x12, 0x1a, 0x0a, + 0x08, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x76, 0x61, 0x6c, 0x18, 0x02, 0x20, 0x01, 0x28, 0x05, 0x52, + 0x08, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x76, 0x61, 0x6c, 0x12, 0x1c, 0x0a, 0x09, 0x74, 0x68, 0x72, + 0x65, 0x73, 0x68, 0x6f, 0x6c, 0x64, 0x18, 0x03, 0x20, 0x01, 0x28, 0x05, 0x52, 0x09, 0x74, 0x68, + 0x72, 0x65, 0x73, 0x68, 0x6f, 0x6c, 0x64, 0x22, 0x92, 0x03, 0x0a, 0x08, 0x52, 0x65, 0x73, 0x6f, + 0x75, 0x72, 0x63, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, + 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x74, 0x79, 0x70, 0x65, + 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x74, 0x79, 0x70, 0x65, 0x12, 0x2a, 0x0a, 0x06, + 0x61, 0x67, 0x65, 0x6e, 0x74, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x12, 0x2e, 0x70, + 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x41, 0x67, 0x65, 0x6e, 0x74, + 0x52, 0x06, 0x61, 0x67, 0x65, 0x6e, 0x74, 0x73, 0x12, 0x3a, 0x0a, 0x08, 0x6d, 0x65, 0x74, 0x61, + 0x64, 0x61, 0x74, 0x61, 0x18, 0x04, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1e, 0x2e, 0x70, 0x72, 0x6f, + 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, + 0x65, 0x2e, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x52, 0x08, 0x6d, 0x65, 0x74, 0x61, + 0x64, 0x61, 0x74, 0x61, 0x12, 0x12, 0x0a, 0x04, 0x68, 0x69, 0x64, 0x65, 0x18, 0x05, 0x20, 0x01, + 0x28, 0x08, 0x52, 0x04, 0x68, 0x69, 0x64, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x69, 0x63, 0x6f, 0x6e, + 0x18, 0x06, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x69, 0x63, 0x6f, 0x6e, 0x12, 0x23, 0x0a, 0x0d, + 0x69, 0x6e, 0x73, 0x74, 0x61, 0x6e, 0x63, 0x65, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x18, 0x07, 0x20, + 0x01, 0x28, 0x09, 0x52, 0x0c, 0x69, 0x6e, 0x73, 0x74, 0x61, 0x6e, 0x63, 0x65, 0x54, 0x79, 0x70, + 0x65, 0x12, 0x1d, 0x0a, 0x0a, 0x64, 0x61, 0x69, 0x6c, 0x79, 0x5f, 0x63, 0x6f, 0x73, 0x74, 0x18, + 0x08, 0x20, 0x01, 0x28, 0x05, 0x52, 0x09, 0x64, 0x61, 0x69, 0x6c, 0x79, 0x43, 0x6f, 0x73, 0x74, + 0x12, 0x1f, 0x0a, 0x0b, 0x6d, 0x6f, 0x64, 0x75, 0x6c, 0x65, 0x5f, 0x70, 0x61, 0x74, 0x68, 0x18, + 0x09, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, 0x6d, 0x6f, 0x64, 0x75, 0x6c, 0x65, 0x50, 0x61, 0x74, + 0x68, 0x1a, 0x69, 0x0a, 0x08, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x12, 0x10, 0x0a, + 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, + 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, + 0x76, 0x61, 0x6c, 0x75, 0x65, 0x12, 0x1c, 0x0a, 0x09, 0x73, 0x65, 0x6e, 0x73, 0x69, 0x74, 0x69, + 0x76, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x08, 0x52, 0x09, 0x73, 0x65, 0x6e, 0x73, 0x69, 0x74, + 0x69, 0x76, 0x65, 0x12, 0x17, 0x0a, 0x07, 0x69, 0x73, 0x5f, 0x6e, 0x75, 0x6c, 0x6c, 0x18, 0x04, + 0x20, 0x01, 0x28, 0x08, 0x52, 0x06, 0x69, 0x73, 0x4e, 0x75, 0x6c, 0x6c, 0x22, 0x5e, 0x0a, 0x06, + 0x4d, 0x6f, 0x64, 0x75, 0x6c, 0x65, 0x12, 0x16, 0x0a, 0x06, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, + 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x12, 0x18, + 0x0a, 0x07, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, + 0x07, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, + 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x64, 0x69, + 0x72, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x64, 0x69, 0x72, 0x22, 0x31, 0x0a, 0x04, + 0x52, 0x6f, 0x6c, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, + 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x15, 0x0a, 0x06, 0x6f, 0x72, 0x67, 0x5f, + 0x69, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x6f, 0x72, 0x67, 0x49, 0x64, 0x22, + 0x48, 0x0a, 0x15, 0x52, 0x75, 0x6e, 0x6e, 0x69, 0x6e, 0x67, 0x41, 0x67, 0x65, 0x6e, 0x74, 0x41, + 0x75, 0x74, 0x68, 0x54, 0x6f, 0x6b, 0x65, 0x6e, 0x12, 0x19, 0x0a, 0x08, 0x61, 0x67, 0x65, 0x6e, + 0x74, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x61, 0x67, 0x65, 0x6e, + 0x74, 0x49, 0x64, 0x12, 0x14, 0x0a, 0x05, 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x18, 0x02, 0x20, 0x01, + 0x28, 0x09, 0x52, 0x05, 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x22, 0x22, 0x0a, 0x10, 0x41, 0x49, 0x54, + 0x61, 0x73, 0x6b, 0x53, 0x69, 0x64, 0x65, 0x62, 0x61, 0x72, 0x41, 0x70, 0x70, 0x12, 0x0e, 0x0a, + 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, 0x64, 0x22, 0x58, 0x0a, + 0x06, 0x41, 0x49, 0x54, 0x61, 0x73, 0x6b, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, + 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, 0x64, 0x12, 0x3e, 0x0a, 0x0b, 0x73, 0x69, 0x64, 0x65, 0x62, + 0x61, 0x72, 0x5f, 0x61, 0x70, 0x70, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1d, 0x2e, 0x70, + 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x41, 0x49, 0x54, 0x61, 0x73, + 0x6b, 0x53, 0x69, 0x64, 0x65, 0x62, 0x61, 0x72, 0x41, 0x70, 0x70, 0x52, 0x0a, 0x73, 0x69, 0x64, + 0x65, 0x62, 0x61, 0x72, 0x41, 0x70, 0x70, 0x22, 0xca, 0x09, 0x0a, 0x08, 0x4d, 0x65, 0x74, 0x61, + 0x64, 0x61, 0x74, 0x61, 0x12, 0x1b, 0x0a, 0x09, 0x63, 0x6f, 0x64, 0x65, 0x72, 0x5f, 0x75, 0x72, + 0x6c, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x63, 0x6f, 0x64, 0x65, 0x72, 0x55, 0x72, + 0x6c, 0x12, 0x53, 0x0a, 0x14, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x5f, 0x74, + 0x72, 0x61, 0x6e, 0x73, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0e, 0x32, + 0x20, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x57, 0x6f, + 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x69, 0x74, 0x69, 0x6f, + 0x6e, 0x52, 0x13, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x54, 0x72, 0x61, 0x6e, + 0x73, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x25, 0x0a, 0x0e, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, + 0x61, 0x63, 0x65, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0d, + 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x27, 0x0a, + 0x0f, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x5f, 0x6f, 0x77, 0x6e, 0x65, 0x72, + 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0e, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, + 0x65, 0x4f, 0x77, 0x6e, 0x65, 0x72, 0x12, 0x21, 0x0a, 0x0c, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, + 0x61, 0x63, 0x65, 0x5f, 0x69, 0x64, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x77, 0x6f, + 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x49, 0x64, 0x12, 0x2c, 0x0a, 0x12, 0x77, 0x6f, 0x72, + 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x5f, 0x6f, 0x77, 0x6e, 0x65, 0x72, 0x5f, 0x69, 0x64, 0x18, + 0x06, 0x20, 0x01, 0x28, 0x09, 0x52, 0x10, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, + 0x4f, 0x77, 0x6e, 0x65, 0x72, 0x49, 0x64, 0x12, 0x32, 0x0a, 0x15, 0x77, 0x6f, 0x72, 0x6b, 0x73, + 0x70, 0x61, 0x63, 0x65, 0x5f, 0x6f, 0x77, 0x6e, 0x65, 0x72, 0x5f, 0x65, 0x6d, 0x61, 0x69, 0x6c, + 0x18, 0x07, 0x20, 0x01, 0x28, 0x09, 0x52, 0x13, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, + 0x65, 0x4f, 0x77, 0x6e, 0x65, 0x72, 0x45, 0x6d, 0x61, 0x69, 0x6c, 0x12, 0x23, 0x0a, 0x0d, 0x74, + 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x08, 0x20, 0x01, + 0x28, 0x09, 0x52, 0x0c, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x4e, 0x61, 0x6d, 0x65, + 0x12, 0x29, 0x0a, 0x10, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x5f, 0x76, 0x65, 0x72, + 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x09, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0f, 0x74, 0x65, 0x6d, 0x70, + 0x6c, 0x61, 0x74, 0x65, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x48, 0x0a, 0x21, 0x77, + 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x5f, 0x6f, 0x77, 0x6e, 0x65, 0x72, 0x5f, 0x6f, + 0x69, 0x64, 0x63, 0x5f, 0x61, 0x63, 0x63, 0x65, 0x73, 0x73, 0x5f, 0x74, 0x6f, 0x6b, 0x65, 0x6e, + 0x18, 0x0a, 0x20, 0x01, 0x28, 0x09, 0x52, 0x1d, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, + 0x65, 0x4f, 0x77, 0x6e, 0x65, 0x72, 0x4f, 0x69, 0x64, 0x63, 0x41, 0x63, 0x63, 0x65, 0x73, 0x73, + 0x54, 0x6f, 0x6b, 0x65, 0x6e, 0x12, 0x41, 0x0a, 0x1d, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, + 0x63, 0x65, 0x5f, 0x6f, 0x77, 0x6e, 0x65, 0x72, 0x5f, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, + 0x5f, 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x18, 0x0b, 0x20, 0x01, 0x28, 0x09, 0x52, 0x1a, 0x77, 0x6f, + 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x4f, 0x77, 0x6e, 0x65, 0x72, 0x53, 0x65, 0x73, 0x73, + 0x69, 0x6f, 0x6e, 0x54, 0x6f, 0x6b, 0x65, 0x6e, 0x12, 0x1f, 0x0a, 0x0b, 0x74, 0x65, 0x6d, 0x70, + 0x6c, 0x61, 0x74, 0x65, 0x5f, 0x69, 0x64, 0x18, 0x0c, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, 0x74, + 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x49, 0x64, 0x12, 0x30, 0x0a, 0x14, 0x77, 0x6f, 0x72, + 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x5f, 0x6f, 0x77, 0x6e, 0x65, 0x72, 0x5f, 0x6e, 0x61, 0x6d, + 0x65, 0x18, 0x0d, 0x20, 0x01, 0x28, 0x09, 0x52, 0x12, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, + 0x63, 0x65, 0x4f, 0x77, 0x6e, 0x65, 0x72, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x34, 0x0a, 0x16, 0x77, + 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x5f, 0x6f, 0x77, 0x6e, 0x65, 0x72, 0x5f, 0x67, + 0x72, 0x6f, 0x75, 0x70, 0x73, 0x18, 0x0e, 0x20, 0x03, 0x28, 0x09, 0x52, 0x14, 0x77, 0x6f, 0x72, + 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x4f, 0x77, 0x6e, 0x65, 0x72, 0x47, 0x72, 0x6f, 0x75, 0x70, + 0x73, 0x12, 0x42, 0x0a, 0x1e, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x5f, 0x6f, + 0x77, 0x6e, 0x65, 0x72, 0x5f, 0x73, 0x73, 0x68, 0x5f, 0x70, 0x75, 0x62, 0x6c, 0x69, 0x63, 0x5f, + 0x6b, 0x65, 0x79, 0x18, 0x0f, 0x20, 0x01, 0x28, 0x09, 0x52, 0x1a, 0x77, 0x6f, 0x72, 0x6b, 0x73, + 0x70, 0x61, 0x63, 0x65, 0x4f, 0x77, 0x6e, 0x65, 0x72, 0x53, 0x73, 0x68, 0x50, 0x75, 0x62, 0x6c, + 0x69, 0x63, 0x4b, 0x65, 0x79, 0x12, 0x44, 0x0a, 0x1f, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, + 0x63, 0x65, 0x5f, 0x6f, 0x77, 0x6e, 0x65, 0x72, 0x5f, 0x73, 0x73, 0x68, 0x5f, 0x70, 0x72, 0x69, + 0x76, 0x61, 0x74, 0x65, 0x5f, 0x6b, 0x65, 0x79, 0x18, 0x10, 0x20, 0x01, 0x28, 0x09, 0x52, 0x1b, + 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x4f, 0x77, 0x6e, 0x65, 0x72, 0x53, 0x73, + 0x68, 0x50, 0x72, 0x69, 0x76, 0x61, 0x74, 0x65, 0x4b, 0x65, 0x79, 0x12, 0x2c, 0x0a, 0x12, 0x77, + 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x5f, 0x62, 0x75, 0x69, 0x6c, 0x64, 0x5f, 0x69, + 0x64, 0x18, 0x11, 0x20, 0x01, 0x28, 0x09, 0x52, 0x10, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, + 0x63, 0x65, 0x42, 0x75, 0x69, 0x6c, 0x64, 0x49, 0x64, 0x12, 0x3b, 0x0a, 0x1a, 0x77, 0x6f, 0x72, + 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x5f, 0x6f, 0x77, 0x6e, 0x65, 0x72, 0x5f, 0x6c, 0x6f, 0x67, + 0x69, 0x6e, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x18, 0x12, 0x20, 0x01, 0x28, 0x09, 0x52, 0x17, 0x77, + 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x4f, 0x77, 0x6e, 0x65, 0x72, 0x4c, 0x6f, 0x67, + 0x69, 0x6e, 0x54, 0x79, 0x70, 0x65, 0x12, 0x4e, 0x0a, 0x1a, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, + 0x61, 0x63, 0x65, 0x5f, 0x6f, 0x77, 0x6e, 0x65, 0x72, 0x5f, 0x72, 0x62, 0x61, 0x63, 0x5f, 0x72, + 0x6f, 0x6c, 0x65, 0x73, 0x18, 0x13, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x11, 0x2e, 0x70, 0x72, 0x6f, + 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x52, 0x6f, 0x6c, 0x65, 0x52, 0x17, 0x77, + 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x4f, 0x77, 0x6e, 0x65, 0x72, 0x52, 0x62, 0x61, + 0x63, 0x52, 0x6f, 0x6c, 0x65, 0x73, 0x12, 0x6d, 0x0a, 0x1e, 0x70, 0x72, 0x65, 0x62, 0x75, 0x69, + 0x6c, 0x74, 0x5f, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x5f, 0x62, 0x75, 0x69, + 0x6c, 0x64, 0x5f, 0x73, 0x74, 0x61, 0x67, 0x65, 0x18, 0x14, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x28, + 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x50, 0x72, 0x65, + 0x62, 0x75, 0x69, 0x6c, 0x74, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x42, 0x75, + 0x69, 0x6c, 0x64, 0x53, 0x74, 0x61, 0x67, 0x65, 0x52, 0x1b, 0x70, 0x72, 0x65, 0x62, 0x75, 0x69, + 0x6c, 0x74, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x42, 0x75, 0x69, 0x6c, 0x64, + 0x53, 0x74, 0x61, 0x67, 0x65, 0x12, 0x5d, 0x0a, 0x19, 0x72, 0x75, 0x6e, 0x6e, 0x69, 0x6e, 0x67, + 0x5f, 0x61, 0x67, 0x65, 0x6e, 0x74, 0x5f, 0x61, 0x75, 0x74, 0x68, 0x5f, 0x74, 0x6f, 0x6b, 0x65, + 0x6e, 0x73, 0x18, 0x15, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x22, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, + 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x52, 0x75, 0x6e, 0x6e, 0x69, 0x6e, 0x67, 0x41, 0x67, + 0x65, 0x6e, 0x74, 0x41, 0x75, 0x74, 0x68, 0x54, 0x6f, 0x6b, 0x65, 0x6e, 0x52, 0x16, 0x72, 0x75, + 0x6e, 0x6e, 0x69, 0x6e, 0x67, 0x41, 0x67, 0x65, 0x6e, 0x74, 0x41, 0x75, 0x74, 0x68, 0x54, 0x6f, + 0x6b, 0x65, 0x6e, 0x73, 0x22, 0x8a, 0x01, 0x0a, 0x06, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x12, + 0x36, 0x0a, 0x17, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x5f, 0x73, 0x6f, 0x75, 0x72, + 0x63, 0x65, 0x5f, 0x61, 0x72, 0x63, 0x68, 0x69, 0x76, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, + 0x52, 0x15, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, + 0x41, 0x72, 0x63, 0x68, 0x69, 0x76, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x73, 0x74, 0x61, 0x74, 0x65, + 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x05, 0x73, 0x74, 0x61, 0x74, 0x65, 0x12, 0x32, 0x0a, + 0x15, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x5f, 0x6c, 0x6f, 0x67, + 0x5f, 0x6c, 0x65, 0x76, 0x65, 0x6c, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x13, 0x70, 0x72, + 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x4c, 0x6f, 0x67, 0x4c, 0x65, 0x76, 0x65, + 0x6c, 0x22, 0x0e, 0x0a, 0x0c, 0x50, 0x61, 0x72, 0x73, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, + 0x74, 0x22, 0xa3, 0x02, 0x0a, 0x0d, 0x50, 0x61, 0x72, 0x73, 0x65, 0x43, 0x6f, 0x6d, 0x70, 0x6c, + 0x65, 0x74, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x01, 0x20, 0x01, + 0x28, 0x09, 0x52, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x12, 0x4c, 0x0a, 0x12, 0x74, 0x65, 0x6d, + 0x70, 0x6c, 0x61, 0x74, 0x65, 0x5f, 0x76, 0x61, 0x72, 0x69, 0x61, 0x62, 0x6c, 0x65, 0x73, 0x18, + 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1d, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, + 0x6e, 0x65, 0x72, 0x2e, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x56, 0x61, 0x72, 0x69, + 0x61, 0x62, 0x6c, 0x65, 0x52, 0x11, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x56, 0x61, + 0x72, 0x69, 0x61, 0x62, 0x6c, 0x65, 0x73, 0x12, 0x16, 0x0a, 0x06, 0x72, 0x65, 0x61, 0x64, 0x6d, + 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x06, 0x72, 0x65, 0x61, 0x64, 0x6d, 0x65, 0x12, + 0x54, 0x0a, 0x0e, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x5f, 0x74, 0x61, 0x67, + 0x73, 0x18, 0x04, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x2d, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, + 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x50, 0x61, 0x72, 0x73, 0x65, 0x43, 0x6f, 0x6d, 0x70, 0x6c, + 0x65, 0x74, 0x65, 0x2e, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x54, 0x61, 0x67, + 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x0d, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, + 0x65, 0x54, 0x61, 0x67, 0x73, 0x1a, 0x40, 0x0a, 0x12, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, + 0x63, 0x65, 0x54, 0x61, 0x67, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, + 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x14, 0x0a, + 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, + 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x22, 0xbe, 0x03, 0x0a, 0x0b, 0x50, 0x6c, 0x61, 0x6e, + 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x31, 0x0a, 0x08, 0x6d, 0x65, 0x74, 0x61, 0x64, + 0x61, 0x74, 0x61, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x70, 0x72, 0x6f, 0x76, + 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, + 0x52, 0x08, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x12, 0x53, 0x0a, 0x15, 0x72, 0x69, + 0x63, 0x68, 0x5f, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x5f, 0x76, 0x61, 0x6c, + 0x75, 0x65, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1f, 0x2e, 0x70, 0x72, 0x6f, 0x76, + 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x52, 0x69, 0x63, 0x68, 0x50, 0x61, 0x72, 0x61, + 0x6d, 0x65, 0x74, 0x65, 0x72, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x13, 0x72, 0x69, 0x63, 0x68, + 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x12, + 0x43, 0x0a, 0x0f, 0x76, 0x61, 0x72, 0x69, 0x61, 0x62, 0x6c, 0x65, 0x5f, 0x76, 0x61, 0x6c, 0x75, + 0x65, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, + 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x56, 0x61, 0x72, 0x69, 0x61, 0x62, 0x6c, 0x65, 0x56, + 0x61, 0x6c, 0x75, 0x65, 0x52, 0x0e, 0x76, 0x61, 0x72, 0x69, 0x61, 0x62, 0x6c, 0x65, 0x56, 0x61, + 0x6c, 0x75, 0x65, 0x73, 0x12, 0x59, 0x0a, 0x17, 0x65, 0x78, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, + 0x5f, 0x61, 0x75, 0x74, 0x68, 0x5f, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x73, 0x18, + 0x04, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x21, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, + 0x6e, 0x65, 0x72, 0x2e, 0x45, 0x78, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x41, 0x75, 0x74, 0x68, + 0x50, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x52, 0x15, 0x65, 0x78, 0x74, 0x65, 0x72, 0x6e, + 0x61, 0x6c, 0x41, 0x75, 0x74, 0x68, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x73, 0x12, + 0x5b, 0x0a, 0x19, 0x70, 0x72, 0x65, 0x76, 0x69, 0x6f, 0x75, 0x73, 0x5f, 0x70, 0x61, 0x72, 0x61, + 0x6d, 0x65, 0x74, 0x65, 0x72, 0x5f, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x18, 0x05, 0x20, 0x03, + 0x28, 0x0b, 0x32, 0x1f, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, + 0x2e, 0x52, 0x69, 0x63, 0x68, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x56, 0x61, + 0x6c, 0x75, 0x65, 0x52, 0x17, 0x70, 0x72, 0x65, 0x76, 0x69, 0x6f, 0x75, 0x73, 0x50, 0x61, 0x72, + 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x12, 0x2a, 0x0a, 0x11, + 0x6f, 0x6d, 0x69, 0x74, 0x5f, 0x6d, 0x6f, 0x64, 0x75, 0x6c, 0x65, 0x5f, 0x66, 0x69, 0x6c, 0x65, + 0x73, 0x18, 0x06, 0x20, 0x01, 0x28, 0x08, 0x52, 0x0f, 0x6f, 0x6d, 0x69, 0x74, 0x4d, 0x6f, 0x64, + 0x75, 0x6c, 0x65, 0x46, 0x69, 0x6c, 0x65, 0x73, 0x22, 0xc1, 0x05, 0x0a, 0x0c, 0x50, 0x6c, 0x61, + 0x6e, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x65, 0x72, 0x72, + 0x6f, 0x72, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x12, + 0x33, 0x0a, 0x09, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x18, 0x02, 0x20, 0x03, + 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, + 0x2e, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x52, 0x09, 0x72, 0x65, 0x73, 0x6f, 0x75, + 0x72, 0x63, 0x65, 0x73, 0x12, 0x3a, 0x0a, 0x0a, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, + 0x72, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, + 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x52, 0x69, 0x63, 0x68, 0x50, 0x61, 0x72, 0x61, 0x6d, + 0x65, 0x74, 0x65, 0x72, 0x52, 0x0a, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, + 0x12, 0x61, 0x0a, 0x17, 0x65, 0x78, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x5f, 0x61, 0x75, 0x74, + 0x68, 0x5f, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x73, 0x18, 0x04, 0x20, 0x03, 0x28, + 0x0b, 0x32, 0x29, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, + 0x45, 0x78, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x41, 0x75, 0x74, 0x68, 0x50, 0x72, 0x6f, 0x76, + 0x69, 0x64, 0x65, 0x72, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x52, 0x15, 0x65, 0x78, + 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x41, 0x75, 0x74, 0x68, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x64, + 0x65, 0x72, 0x73, 0x12, 0x2d, 0x0a, 0x07, 0x74, 0x69, 0x6d, 0x69, 0x6e, 0x67, 0x73, 0x18, 0x06, + 0x20, 0x03, 0x28, 0x0b, 0x32, 0x13, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, + 0x65, 0x72, 0x2e, 0x54, 0x69, 0x6d, 0x69, 0x6e, 0x67, 0x52, 0x07, 0x74, 0x69, 0x6d, 0x69, 0x6e, + 0x67, 0x73, 0x12, 0x2d, 0x0a, 0x07, 0x6d, 0x6f, 0x64, 0x75, 0x6c, 0x65, 0x73, 0x18, 0x07, 0x20, + 0x03, 0x28, 0x0b, 0x32, 0x13, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, + 0x72, 0x2e, 0x4d, 0x6f, 0x64, 0x75, 0x6c, 0x65, 0x52, 0x07, 0x6d, 0x6f, 0x64, 0x75, 0x6c, 0x65, + 0x73, 0x12, 0x2d, 0x0a, 0x07, 0x70, 0x72, 0x65, 0x73, 0x65, 0x74, 0x73, 0x18, 0x08, 0x20, 0x03, + 0x28, 0x0b, 0x32, 0x13, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, + 0x2e, 0x50, 0x72, 0x65, 0x73, 0x65, 0x74, 0x52, 0x07, 0x70, 0x72, 0x65, 0x73, 0x65, 0x74, 0x73, + 0x12, 0x12, 0x0a, 0x04, 0x70, 0x6c, 0x61, 0x6e, 0x18, 0x09, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x04, + 0x70, 0x6c, 0x61, 0x6e, 0x12, 0x55, 0x0a, 0x15, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, + 0x5f, 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x18, 0x0a, 0x20, + 0x03, 0x28, 0x0b, 0x32, 0x20, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, + 0x72, 0x2e, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x52, 0x65, 0x70, 0x6c, 0x61, 0x63, + 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x52, 0x14, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x52, + 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x12, 0x21, 0x0a, 0x0c, 0x6d, + 0x6f, 0x64, 0x75, 0x6c, 0x65, 0x5f, 0x66, 0x69, 0x6c, 0x65, 0x73, 0x18, 0x0b, 0x20, 0x01, 0x28, + 0x0c, 0x52, 0x0b, 0x6d, 0x6f, 0x64, 0x75, 0x6c, 0x65, 0x46, 0x69, 0x6c, 0x65, 0x73, 0x12, 0x2a, + 0x0a, 0x11, 0x6d, 0x6f, 0x64, 0x75, 0x6c, 0x65, 0x5f, 0x66, 0x69, 0x6c, 0x65, 0x73, 0x5f, 0x68, + 0x61, 0x73, 0x68, 0x18, 0x0c, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x0f, 0x6d, 0x6f, 0x64, 0x75, 0x6c, + 0x65, 0x46, 0x69, 0x6c, 0x65, 0x73, 0x48, 0x61, 0x73, 0x68, 0x12, 0x20, 0x0a, 0x0c, 0x68, 0x61, + 0x73, 0x5f, 0x61, 0x69, 0x5f, 0x74, 0x61, 0x73, 0x6b, 0x73, 0x18, 0x0d, 0x20, 0x01, 0x28, 0x08, + 0x52, 0x0a, 0x68, 0x61, 0x73, 0x41, 0x69, 0x54, 0x61, 0x73, 0x6b, 0x73, 0x12, 0x2e, 0x0a, 0x08, + 0x61, 0x69, 0x5f, 0x74, 0x61, 0x73, 0x6b, 0x73, 0x18, 0x0e, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x13, + 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x41, 0x49, 0x54, + 0x61, 0x73, 0x6b, 0x52, 0x07, 0x61, 0x69, 0x54, 0x61, 0x73, 0x6b, 0x73, 0x12, 0x2e, 0x0a, 0x13, + 0x68, 0x61, 0x73, 0x5f, 0x65, 0x78, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x5f, 0x61, 0x67, 0x65, + 0x6e, 0x74, 0x73, 0x18, 0x0f, 0x20, 0x01, 0x28, 0x08, 0x52, 0x11, 0x68, 0x61, 0x73, 0x45, 0x78, + 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x41, 0x67, 0x65, 0x6e, 0x74, 0x73, 0x22, 0x41, 0x0a, 0x0c, + 0x41, 0x70, 0x70, 0x6c, 0x79, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x31, 0x0a, 0x08, + 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x15, + 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x4d, 0x65, 0x74, + 0x61, 0x64, 0x61, 0x74, 0x61, 0x52, 0x08, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x22, + 0xee, 0x02, 0x0a, 0x0d, 0x41, 0x70, 0x70, 0x6c, 0x79, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, + 0x65, 0x12, 0x14, 0x0a, 0x05, 0x73, 0x74, 0x61, 0x74, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, + 0x52, 0x05, 0x73, 0x74, 0x61, 0x74, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, + 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x12, 0x33, 0x0a, + 0x09, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, + 0x32, 0x15, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x52, + 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x52, 0x09, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, + 0x65, 0x73, 0x12, 0x3a, 0x0a, 0x0a, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, + 0x18, 0x04, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, + 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x52, 0x69, 0x63, 0x68, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, + 0x65, 0x72, 0x52, 0x0a, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, 0x12, 0x61, 0x0a, 0x17, 0x65, 0x78, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x5f, 0x61, 0x75, 0x74, 0x68, 0x5f, - 0x70, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x73, 0x18, 0x04, 0x20, 0x03, 0x28, 0x0b, 0x32, - 0x21, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x45, 0x78, + 0x70, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x73, 0x18, 0x05, 0x20, 0x03, 0x28, 0x0b, 0x32, + 0x29, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x45, 0x78, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x41, 0x75, 0x74, 0x68, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x64, - 0x65, 0x72, 0x52, 0x15, 0x65, 0x78, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x41, 0x75, 0x74, 0x68, - 0x50, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x73, 0x12, 0x5b, 0x0a, 0x19, 0x70, 0x72, 0x65, - 0x76, 0x69, 0x6f, 0x75, 0x73, 0x5f, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x5f, - 0x76, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x18, 0x05, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1f, 0x2e, 0x70, - 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x52, 0x69, 0x63, 0x68, 0x50, - 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x17, 0x70, - 0x72, 0x65, 0x76, 0x69, 0x6f, 0x75, 0x73, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, - 0x56, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x12, 0x2a, 0x0a, 0x11, 0x6f, 0x6d, 0x69, 0x74, 0x5f, 0x6d, - 0x6f, 0x64, 0x75, 0x6c, 0x65, 0x5f, 0x66, 0x69, 0x6c, 0x65, 0x73, 0x18, 0x06, 0x20, 0x01, 0x28, - 0x08, 0x52, 0x0f, 0x6f, 0x6d, 0x69, 0x74, 0x4d, 0x6f, 0x64, 0x75, 0x6c, 0x65, 0x46, 0x69, 0x6c, - 0x65, 0x73, 0x22, 0x91, 0x05, 0x0a, 0x0c, 0x50, 0x6c, 0x61, 0x6e, 0x43, 0x6f, 0x6d, 0x70, 0x6c, - 0x65, 0x74, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x01, 0x20, 0x01, - 0x28, 0x09, 0x52, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x12, 0x33, 0x0a, 0x09, 0x72, 0x65, 0x73, - 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x70, - 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x52, 0x65, 0x73, 0x6f, 0x75, - 0x72, 0x63, 0x65, 0x52, 0x09, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x12, 0x3a, - 0x0a, 0x0a, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, 0x18, 0x03, 0x20, 0x03, - 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, - 0x2e, 0x52, 0x69, 0x63, 0x68, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x52, 0x0a, - 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, 0x12, 0x61, 0x0a, 0x17, 0x65, 0x78, - 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x5f, 0x61, 0x75, 0x74, 0x68, 0x5f, 0x70, 0x72, 0x6f, 0x76, - 0x69, 0x64, 0x65, 0x72, 0x73, 0x18, 0x04, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x29, 0x2e, 0x70, 0x72, - 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x45, 0x78, 0x74, 0x65, 0x72, 0x6e, - 0x61, 0x6c, 0x41, 0x75, 0x74, 0x68, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x52, 0x65, - 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x52, 0x15, 0x65, 0x78, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, - 0x41, 0x75, 0x74, 0x68, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x73, 0x12, 0x2d, 0x0a, - 0x07, 0x74, 0x69, 0x6d, 0x69, 0x6e, 0x67, 0x73, 0x18, 0x06, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x13, - 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x54, 0x69, 0x6d, - 0x69, 0x6e, 0x67, 0x52, 0x07, 0x74, 0x69, 0x6d, 0x69, 0x6e, 0x67, 0x73, 0x12, 0x2d, 0x0a, 0x07, - 0x6d, 0x6f, 0x64, 0x75, 0x6c, 0x65, 0x73, 0x18, 0x07, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x13, 0x2e, - 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x4d, 0x6f, 0x64, 0x75, - 0x6c, 0x65, 0x52, 0x07, 0x6d, 0x6f, 0x64, 0x75, 0x6c, 0x65, 0x73, 0x12, 0x2d, 0x0a, 0x07, 0x70, - 0x72, 0x65, 0x73, 0x65, 0x74, 0x73, 0x18, 0x08, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x13, 0x2e, 0x70, - 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x50, 0x72, 0x65, 0x73, 0x65, - 0x74, 0x52, 0x07, 0x70, 0x72, 0x65, 0x73, 0x65, 0x74, 0x73, 0x12, 0x12, 0x0a, 0x04, 0x70, 0x6c, - 0x61, 0x6e, 0x18, 0x09, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x04, 0x70, 0x6c, 0x61, 0x6e, 0x12, 0x55, - 0x0a, 0x15, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x72, 0x65, 0x70, 0x6c, 0x61, - 0x63, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x18, 0x0a, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x20, 0x2e, - 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x52, 0x65, 0x73, 0x6f, - 0x75, 0x72, 0x63, 0x65, 0x52, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x52, - 0x14, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x52, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, - 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x12, 0x21, 0x0a, 0x0c, 0x6d, 0x6f, 0x64, 0x75, 0x6c, 0x65, 0x5f, - 0x66, 0x69, 0x6c, 0x65, 0x73, 0x18, 0x0b, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x0b, 0x6d, 0x6f, 0x64, - 0x75, 0x6c, 0x65, 0x46, 0x69, 0x6c, 0x65, 0x73, 0x12, 0x2a, 0x0a, 0x11, 0x6d, 0x6f, 0x64, 0x75, - 0x6c, 0x65, 0x5f, 0x66, 0x69, 0x6c, 0x65, 0x73, 0x5f, 0x68, 0x61, 0x73, 0x68, 0x18, 0x0c, 0x20, - 0x01, 0x28, 0x0c, 0x52, 0x0f, 0x6d, 0x6f, 0x64, 0x75, 0x6c, 0x65, 0x46, 0x69, 0x6c, 0x65, 0x73, - 0x48, 0x61, 0x73, 0x68, 0x12, 0x20, 0x0a, 0x0c, 0x68, 0x61, 0x73, 0x5f, 0x61, 0x69, 0x5f, 0x74, - 0x61, 0x73, 0x6b, 0x73, 0x18, 0x0d, 0x20, 0x01, 0x28, 0x08, 0x52, 0x0a, 0x68, 0x61, 0x73, 0x41, - 0x69, 0x54, 0x61, 0x73, 0x6b, 0x73, 0x12, 0x2e, 0x0a, 0x08, 0x61, 0x69, 0x5f, 0x74, 0x61, 0x73, - 0x6b, 0x73, 0x18, 0x0e, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x13, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, - 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x41, 0x49, 0x54, 0x61, 0x73, 0x6b, 0x52, 0x07, 0x61, - 0x69, 0x54, 0x61, 0x73, 0x6b, 0x73, 0x22, 0x41, 0x0a, 0x0c, 0x41, 0x70, 0x70, 0x6c, 0x79, 0x52, - 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x31, 0x0a, 0x08, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, - 0x74, 0x61, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, - 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x52, - 0x08, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x22, 0xee, 0x02, 0x0a, 0x0d, 0x41, 0x70, - 0x70, 0x6c, 0x79, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x73, - 0x74, 0x61, 0x74, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x05, 0x73, 0x74, 0x61, 0x74, - 0x65, 0x12, 0x14, 0x0a, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, - 0x52, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x12, 0x33, 0x0a, 0x09, 0x72, 0x65, 0x73, 0x6f, 0x75, - 0x72, 0x63, 0x65, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x70, 0x72, 0x6f, - 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, - 0x65, 0x52, 0x09, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x12, 0x3a, 0x0a, 0x0a, - 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, 0x18, 0x04, 0x20, 0x03, 0x28, 0x0b, - 0x32, 0x1a, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x52, - 0x69, 0x63, 0x68, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x52, 0x0a, 0x70, 0x61, - 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, 0x12, 0x61, 0x0a, 0x17, 0x65, 0x78, 0x74, 0x65, - 0x72, 0x6e, 0x61, 0x6c, 0x5f, 0x61, 0x75, 0x74, 0x68, 0x5f, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x64, - 0x65, 0x72, 0x73, 0x18, 0x05, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x29, 0x2e, 0x70, 0x72, 0x6f, 0x76, - 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x45, 0x78, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, - 0x41, 0x75, 0x74, 0x68, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x52, 0x65, 0x73, 0x6f, - 0x75, 0x72, 0x63, 0x65, 0x52, 0x15, 0x65, 0x78, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x41, 0x75, - 0x74, 0x68, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x73, 0x12, 0x2d, 0x0a, 0x07, 0x74, - 0x69, 0x6d, 0x69, 0x6e, 0x67, 0x73, 0x18, 0x06, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x13, 0x2e, 0x70, + 0x65, 0x72, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x52, 0x15, 0x65, 0x78, 0x74, 0x65, + 0x72, 0x6e, 0x61, 0x6c, 0x41, 0x75, 0x74, 0x68, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, + 0x73, 0x12, 0x2d, 0x0a, 0x07, 0x74, 0x69, 0x6d, 0x69, 0x6e, 0x67, 0x73, 0x18, 0x06, 0x20, 0x03, + 0x28, 0x0b, 0x32, 0x13, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, + 0x2e, 0x54, 0x69, 0x6d, 0x69, 0x6e, 0x67, 0x52, 0x07, 0x74, 0x69, 0x6d, 0x69, 0x6e, 0x67, 0x73, + 0x12, 0x2e, 0x0a, 0x08, 0x61, 0x69, 0x5f, 0x74, 0x61, 0x73, 0x6b, 0x73, 0x18, 0x07, 0x20, 0x03, + 0x28, 0x0b, 0x32, 0x13, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, + 0x2e, 0x41, 0x49, 0x54, 0x61, 0x73, 0x6b, 0x52, 0x07, 0x61, 0x69, 0x54, 0x61, 0x73, 0x6b, 0x73, + 0x22, 0xfa, 0x01, 0x0a, 0x06, 0x54, 0x69, 0x6d, 0x69, 0x6e, 0x67, 0x12, 0x30, 0x0a, 0x05, 0x73, + 0x74, 0x61, 0x72, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, + 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, + 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x05, 0x73, 0x74, 0x61, 0x72, 0x74, 0x12, 0x2c, 0x0a, + 0x03, 0x65, 0x6e, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, + 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, + 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x03, 0x65, 0x6e, 0x64, 0x12, 0x16, 0x0a, 0x06, 0x61, + 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x61, 0x63, 0x74, + 0x69, 0x6f, 0x6e, 0x12, 0x16, 0x0a, 0x06, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x18, 0x04, 0x20, + 0x01, 0x28, 0x09, 0x52, 0x06, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x12, 0x1a, 0x0a, 0x08, 0x72, + 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x72, + 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x73, 0x74, 0x61, 0x67, 0x65, + 0x18, 0x06, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x73, 0x74, 0x61, 0x67, 0x65, 0x12, 0x2e, 0x0a, + 0x05, 0x73, 0x74, 0x61, 0x74, 0x65, 0x18, 0x07, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x18, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x54, 0x69, 0x6d, 0x69, 0x6e, - 0x67, 0x52, 0x07, 0x74, 0x69, 0x6d, 0x69, 0x6e, 0x67, 0x73, 0x12, 0x2e, 0x0a, 0x08, 0x61, 0x69, - 0x5f, 0x74, 0x61, 0x73, 0x6b, 0x73, 0x18, 0x07, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x13, 0x2e, 0x70, - 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x41, 0x49, 0x54, 0x61, 0x73, - 0x6b, 0x52, 0x07, 0x61, 0x69, 0x54, 0x61, 0x73, 0x6b, 0x73, 0x22, 0xfa, 0x01, 0x0a, 0x06, 0x54, - 0x69, 0x6d, 0x69, 0x6e, 0x67, 0x12, 0x30, 0x0a, 0x05, 0x73, 0x74, 0x61, 0x72, 0x74, 0x18, 0x01, - 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, - 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, - 0x52, 0x05, 0x73, 0x74, 0x61, 0x72, 0x74, 0x12, 0x2c, 0x0a, 0x03, 0x65, 0x6e, 0x64, 0x18, 0x02, - 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, - 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, - 0x52, 0x03, 0x65, 0x6e, 0x64, 0x12, 0x16, 0x0a, 0x06, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x18, - 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x16, 0x0a, - 0x06, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x73, - 0x6f, 0x75, 0x72, 0x63, 0x65, 0x12, 0x1a, 0x0a, 0x08, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, - 0x65, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, - 0x65, 0x12, 0x14, 0x0a, 0x05, 0x73, 0x74, 0x61, 0x67, 0x65, 0x18, 0x06, 0x20, 0x01, 0x28, 0x09, - 0x52, 0x05, 0x73, 0x74, 0x61, 0x67, 0x65, 0x12, 0x2e, 0x0a, 0x05, 0x73, 0x74, 0x61, 0x74, 0x65, - 0x18, 0x07, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x18, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, - 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x54, 0x69, 0x6d, 0x69, 0x6e, 0x67, 0x53, 0x74, 0x61, 0x74, 0x65, - 0x52, 0x05, 0x73, 0x74, 0x61, 0x74, 0x65, 0x22, 0x0f, 0x0a, 0x0d, 0x43, 0x61, 0x6e, 0x63, 0x65, - 0x6c, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x22, 0x8c, 0x02, 0x0a, 0x07, 0x52, 0x65, 0x71, - 0x75, 0x65, 0x73, 0x74, 0x12, 0x2d, 0x0a, 0x06, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x18, 0x01, - 0x20, 0x01, 0x28, 0x0b, 0x32, 0x13, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, - 0x65, 0x72, 0x2e, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x48, 0x00, 0x52, 0x06, 0x63, 0x6f, 0x6e, - 0x66, 0x69, 0x67, 0x12, 0x31, 0x0a, 0x05, 0x70, 0x61, 0x72, 0x73, 0x65, 0x18, 0x02, 0x20, 0x01, + 0x67, 0x53, 0x74, 0x61, 0x74, 0x65, 0x52, 0x05, 0x73, 0x74, 0x61, 0x74, 0x65, 0x22, 0x0f, 0x0a, + 0x0d, 0x43, 0x61, 0x6e, 0x63, 0x65, 0x6c, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x22, 0x8c, + 0x02, 0x0a, 0x07, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x2d, 0x0a, 0x06, 0x63, 0x6f, + 0x6e, 0x66, 0x69, 0x67, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x13, 0x2e, 0x70, 0x72, 0x6f, + 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x48, + 0x00, 0x52, 0x06, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x12, 0x31, 0x0a, 0x05, 0x70, 0x61, 0x72, + 0x73, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, + 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x50, 0x61, 0x72, 0x73, 0x65, 0x52, 0x65, 0x71, 0x75, + 0x65, 0x73, 0x74, 0x48, 0x00, 0x52, 0x05, 0x70, 0x61, 0x72, 0x73, 0x65, 0x12, 0x2e, 0x0a, 0x04, + 0x70, 0x6c, 0x61, 0x6e, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x18, 0x2e, 0x70, 0x72, 0x6f, + 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x50, 0x6c, 0x61, 0x6e, 0x52, 0x65, 0x71, + 0x75, 0x65, 0x73, 0x74, 0x48, 0x00, 0x52, 0x04, 0x70, 0x6c, 0x61, 0x6e, 0x12, 0x31, 0x0a, 0x05, + 0x61, 0x70, 0x70, 0x6c, 0x79, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x70, 0x72, + 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x41, 0x70, 0x70, 0x6c, 0x79, 0x52, + 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x48, 0x00, 0x52, 0x05, 0x61, 0x70, 0x70, 0x6c, 0x79, 0x12, + 0x34, 0x0a, 0x06, 0x63, 0x61, 0x6e, 0x63, 0x65, 0x6c, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, + 0x1a, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x43, 0x61, + 0x6e, 0x63, 0x65, 0x6c, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x48, 0x00, 0x52, 0x06, 0x63, + 0x61, 0x6e, 0x63, 0x65, 0x6c, 0x42, 0x06, 0x0a, 0x04, 0x74, 0x79, 0x70, 0x65, 0x22, 0xc9, 0x02, + 0x0a, 0x08, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x24, 0x0a, 0x03, 0x6c, 0x6f, + 0x67, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x10, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, + 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x4c, 0x6f, 0x67, 0x48, 0x00, 0x52, 0x03, 0x6c, 0x6f, 0x67, + 0x12, 0x32, 0x0a, 0x05, 0x70, 0x61, 0x72, 0x73, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, + 0x1a, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x50, 0x61, + 0x72, 0x73, 0x65, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x65, 0x48, 0x00, 0x52, 0x05, 0x70, + 0x61, 0x72, 0x73, 0x65, 0x12, 0x2f, 0x0a, 0x04, 0x70, 0x6c, 0x61, 0x6e, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, - 0x2e, 0x50, 0x61, 0x72, 0x73, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x48, 0x00, 0x52, - 0x05, 0x70, 0x61, 0x72, 0x73, 0x65, 0x12, 0x2e, 0x0a, 0x04, 0x70, 0x6c, 0x61, 0x6e, 0x18, 0x03, - 0x20, 0x01, 0x28, 0x0b, 0x32, 0x18, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, - 0x65, 0x72, 0x2e, 0x50, 0x6c, 0x61, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x48, 0x00, - 0x52, 0x04, 0x70, 0x6c, 0x61, 0x6e, 0x12, 0x31, 0x0a, 0x05, 0x61, 0x70, 0x70, 0x6c, 0x79, 0x18, - 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, - 0x6e, 0x65, 0x72, 0x2e, 0x41, 0x70, 0x70, 0x6c, 0x79, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, - 0x48, 0x00, 0x52, 0x05, 0x61, 0x70, 0x70, 0x6c, 0x79, 0x12, 0x34, 0x0a, 0x06, 0x63, 0x61, 0x6e, - 0x63, 0x65, 0x6c, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x70, 0x72, 0x6f, 0x76, - 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x43, 0x61, 0x6e, 0x63, 0x65, 0x6c, 0x52, 0x65, - 0x71, 0x75, 0x65, 0x73, 0x74, 0x48, 0x00, 0x52, 0x06, 0x63, 0x61, 0x6e, 0x63, 0x65, 0x6c, 0x42, - 0x06, 0x0a, 0x04, 0x74, 0x79, 0x70, 0x65, 0x22, 0xc9, 0x02, 0x0a, 0x08, 0x52, 0x65, 0x73, 0x70, - 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x24, 0x0a, 0x03, 0x6c, 0x6f, 0x67, 0x18, 0x01, 0x20, 0x01, 0x28, - 0x0b, 0x32, 0x10, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, - 0x4c, 0x6f, 0x67, 0x48, 0x00, 0x52, 0x03, 0x6c, 0x6f, 0x67, 0x12, 0x32, 0x0a, 0x05, 0x70, 0x61, - 0x72, 0x73, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x70, 0x72, 0x6f, 0x76, - 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x50, 0x61, 0x72, 0x73, 0x65, 0x43, 0x6f, 0x6d, - 0x70, 0x6c, 0x65, 0x74, 0x65, 0x48, 0x00, 0x52, 0x05, 0x70, 0x61, 0x72, 0x73, 0x65, 0x12, 0x2f, - 0x0a, 0x04, 0x70, 0x6c, 0x61, 0x6e, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x70, - 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x50, 0x6c, 0x61, 0x6e, 0x43, - 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x65, 0x48, 0x00, 0x52, 0x04, 0x70, 0x6c, 0x61, 0x6e, 0x12, - 0x32, 0x0a, 0x05, 0x61, 0x70, 0x70, 0x6c, 0x79, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, - 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x41, 0x70, 0x70, - 0x6c, 0x79, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x65, 0x48, 0x00, 0x52, 0x05, 0x61, 0x70, - 0x70, 0x6c, 0x79, 0x12, 0x3a, 0x0a, 0x0b, 0x64, 0x61, 0x74, 0x61, 0x5f, 0x75, 0x70, 0x6c, 0x6f, - 0x61, 0x64, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, - 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x44, 0x61, 0x74, 0x61, 0x55, 0x70, 0x6c, 0x6f, 0x61, - 0x64, 0x48, 0x00, 0x52, 0x0a, 0x64, 0x61, 0x74, 0x61, 0x55, 0x70, 0x6c, 0x6f, 0x61, 0x64, 0x12, - 0x3a, 0x0a, 0x0b, 0x63, 0x68, 0x75, 0x6e, 0x6b, 0x5f, 0x70, 0x69, 0x65, 0x63, 0x65, 0x18, 0x06, - 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, - 0x65, 0x72, 0x2e, 0x43, 0x68, 0x75, 0x6e, 0x6b, 0x50, 0x69, 0x65, 0x63, 0x65, 0x48, 0x00, 0x52, - 0x0a, 0x63, 0x68, 0x75, 0x6e, 0x6b, 0x50, 0x69, 0x65, 0x63, 0x65, 0x42, 0x06, 0x0a, 0x04, 0x74, - 0x79, 0x70, 0x65, 0x22, 0x9c, 0x01, 0x0a, 0x0a, 0x44, 0x61, 0x74, 0x61, 0x55, 0x70, 0x6c, 0x6f, - 0x61, 0x64, 0x12, 0x3c, 0x0a, 0x0b, 0x75, 0x70, 0x6c, 0x6f, 0x61, 0x64, 0x5f, 0x74, 0x79, 0x70, - 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x1b, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, - 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x44, 0x61, 0x74, 0x61, 0x55, 0x70, 0x6c, 0x6f, 0x61, 0x64, - 0x54, 0x79, 0x70, 0x65, 0x52, 0x0a, 0x75, 0x70, 0x6c, 0x6f, 0x61, 0x64, 0x54, 0x79, 0x70, 0x65, - 0x12, 0x1b, 0x0a, 0x09, 0x64, 0x61, 0x74, 0x61, 0x5f, 0x68, 0x61, 0x73, 0x68, 0x18, 0x02, 0x20, - 0x01, 0x28, 0x0c, 0x52, 0x08, 0x64, 0x61, 0x74, 0x61, 0x48, 0x61, 0x73, 0x68, 0x12, 0x1b, 0x0a, - 0x09, 0x66, 0x69, 0x6c, 0x65, 0x5f, 0x73, 0x69, 0x7a, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x03, - 0x52, 0x08, 0x66, 0x69, 0x6c, 0x65, 0x53, 0x69, 0x7a, 0x65, 0x12, 0x16, 0x0a, 0x06, 0x63, 0x68, - 0x75, 0x6e, 0x6b, 0x73, 0x18, 0x04, 0x20, 0x01, 0x28, 0x05, 0x52, 0x06, 0x63, 0x68, 0x75, 0x6e, - 0x6b, 0x73, 0x22, 0x67, 0x0a, 0x0a, 0x43, 0x68, 0x75, 0x6e, 0x6b, 0x50, 0x69, 0x65, 0x63, 0x65, - 0x12, 0x12, 0x0a, 0x04, 0x64, 0x61, 0x74, 0x61, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x04, - 0x64, 0x61, 0x74, 0x61, 0x12, 0x24, 0x0a, 0x0e, 0x66, 0x75, 0x6c, 0x6c, 0x5f, 0x64, 0x61, 0x74, - 0x61, 0x5f, 0x68, 0x61, 0x73, 0x68, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x0c, 0x66, 0x75, - 0x6c, 0x6c, 0x44, 0x61, 0x74, 0x61, 0x48, 0x61, 0x73, 0x68, 0x12, 0x1f, 0x0a, 0x0b, 0x70, 0x69, - 0x65, 0x63, 0x65, 0x5f, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x18, 0x03, 0x20, 0x01, 0x28, 0x05, 0x52, - 0x0a, 0x70, 0x69, 0x65, 0x63, 0x65, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x2a, 0xa8, 0x01, 0x0a, 0x11, - 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x46, 0x6f, 0x72, 0x6d, 0x54, 0x79, 0x70, - 0x65, 0x12, 0x0b, 0x0a, 0x07, 0x44, 0x45, 0x46, 0x41, 0x55, 0x4c, 0x54, 0x10, 0x00, 0x12, 0x0e, - 0x0a, 0x0a, 0x46, 0x4f, 0x52, 0x4d, 0x5f, 0x45, 0x52, 0x52, 0x4f, 0x52, 0x10, 0x01, 0x12, 0x09, - 0x0a, 0x05, 0x52, 0x41, 0x44, 0x49, 0x4f, 0x10, 0x02, 0x12, 0x0c, 0x0a, 0x08, 0x44, 0x52, 0x4f, - 0x50, 0x44, 0x4f, 0x57, 0x4e, 0x10, 0x03, 0x12, 0x09, 0x0a, 0x05, 0x49, 0x4e, 0x50, 0x55, 0x54, - 0x10, 0x04, 0x12, 0x0c, 0x0a, 0x08, 0x54, 0x45, 0x58, 0x54, 0x41, 0x52, 0x45, 0x41, 0x10, 0x05, - 0x12, 0x0a, 0x0a, 0x06, 0x53, 0x4c, 0x49, 0x44, 0x45, 0x52, 0x10, 0x06, 0x12, 0x0c, 0x0a, 0x08, - 0x43, 0x48, 0x45, 0x43, 0x4b, 0x42, 0x4f, 0x58, 0x10, 0x07, 0x12, 0x0a, 0x0a, 0x06, 0x53, 0x57, - 0x49, 0x54, 0x43, 0x48, 0x10, 0x08, 0x12, 0x0d, 0x0a, 0x09, 0x54, 0x41, 0x47, 0x53, 0x45, 0x4c, - 0x45, 0x43, 0x54, 0x10, 0x09, 0x12, 0x0f, 0x0a, 0x0b, 0x4d, 0x55, 0x4c, 0x54, 0x49, 0x53, 0x45, - 0x4c, 0x45, 0x43, 0x54, 0x10, 0x0a, 0x2a, 0x3f, 0x0a, 0x08, 0x4c, 0x6f, 0x67, 0x4c, 0x65, 0x76, - 0x65, 0x6c, 0x12, 0x09, 0x0a, 0x05, 0x54, 0x52, 0x41, 0x43, 0x45, 0x10, 0x00, 0x12, 0x09, 0x0a, - 0x05, 0x44, 0x45, 0x42, 0x55, 0x47, 0x10, 0x01, 0x12, 0x08, 0x0a, 0x04, 0x49, 0x4e, 0x46, 0x4f, - 0x10, 0x02, 0x12, 0x08, 0x0a, 0x04, 0x57, 0x41, 0x52, 0x4e, 0x10, 0x03, 0x12, 0x09, 0x0a, 0x05, - 0x45, 0x52, 0x52, 0x4f, 0x52, 0x10, 0x04, 0x2a, 0x3b, 0x0a, 0x0f, 0x41, 0x70, 0x70, 0x53, 0x68, - 0x61, 0x72, 0x69, 0x6e, 0x67, 0x4c, 0x65, 0x76, 0x65, 0x6c, 0x12, 0x09, 0x0a, 0x05, 0x4f, 0x57, - 0x4e, 0x45, 0x52, 0x10, 0x00, 0x12, 0x11, 0x0a, 0x0d, 0x41, 0x55, 0x54, 0x48, 0x45, 0x4e, 0x54, - 0x49, 0x43, 0x41, 0x54, 0x45, 0x44, 0x10, 0x01, 0x12, 0x0a, 0x0a, 0x06, 0x50, 0x55, 0x42, 0x4c, - 0x49, 0x43, 0x10, 0x02, 0x2a, 0x35, 0x0a, 0x09, 0x41, 0x70, 0x70, 0x4f, 0x70, 0x65, 0x6e, 0x49, - 0x6e, 0x12, 0x0e, 0x0a, 0x06, 0x57, 0x49, 0x4e, 0x44, 0x4f, 0x57, 0x10, 0x00, 0x1a, 0x02, 0x08, - 0x01, 0x12, 0x0f, 0x0a, 0x0b, 0x53, 0x4c, 0x49, 0x4d, 0x5f, 0x57, 0x49, 0x4e, 0x44, 0x4f, 0x57, - 0x10, 0x01, 0x12, 0x07, 0x0a, 0x03, 0x54, 0x41, 0x42, 0x10, 0x02, 0x2a, 0x37, 0x0a, 0x13, 0x57, - 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x69, 0x74, 0x69, - 0x6f, 0x6e, 0x12, 0x09, 0x0a, 0x05, 0x53, 0x54, 0x41, 0x52, 0x54, 0x10, 0x00, 0x12, 0x08, 0x0a, - 0x04, 0x53, 0x54, 0x4f, 0x50, 0x10, 0x01, 0x12, 0x0b, 0x0a, 0x07, 0x44, 0x45, 0x53, 0x54, 0x52, - 0x4f, 0x59, 0x10, 0x02, 0x2a, 0x3e, 0x0a, 0x1b, 0x50, 0x72, 0x65, 0x62, 0x75, 0x69, 0x6c, 0x74, - 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x42, 0x75, 0x69, 0x6c, 0x64, 0x53, 0x74, - 0x61, 0x67, 0x65, 0x12, 0x08, 0x0a, 0x04, 0x4e, 0x4f, 0x4e, 0x45, 0x10, 0x00, 0x12, 0x0a, 0x0a, - 0x06, 0x43, 0x52, 0x45, 0x41, 0x54, 0x45, 0x10, 0x01, 0x12, 0x09, 0x0a, 0x05, 0x43, 0x4c, 0x41, - 0x49, 0x4d, 0x10, 0x02, 0x2a, 0x35, 0x0a, 0x0b, 0x54, 0x69, 0x6d, 0x69, 0x6e, 0x67, 0x53, 0x74, - 0x61, 0x74, 0x65, 0x12, 0x0b, 0x0a, 0x07, 0x53, 0x54, 0x41, 0x52, 0x54, 0x45, 0x44, 0x10, 0x00, - 0x12, 0x0d, 0x0a, 0x09, 0x43, 0x4f, 0x4d, 0x50, 0x4c, 0x45, 0x54, 0x45, 0x44, 0x10, 0x01, 0x12, - 0x0a, 0x0a, 0x06, 0x46, 0x41, 0x49, 0x4c, 0x45, 0x44, 0x10, 0x02, 0x2a, 0x47, 0x0a, 0x0e, 0x44, - 0x61, 0x74, 0x61, 0x55, 0x70, 0x6c, 0x6f, 0x61, 0x64, 0x54, 0x79, 0x70, 0x65, 0x12, 0x17, 0x0a, - 0x13, 0x55, 0x50, 0x4c, 0x4f, 0x41, 0x44, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x55, 0x4e, 0x4b, - 0x4e, 0x4f, 0x57, 0x4e, 0x10, 0x00, 0x12, 0x1c, 0x0a, 0x18, 0x55, 0x50, 0x4c, 0x4f, 0x41, 0x44, - 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x4d, 0x4f, 0x44, 0x55, 0x4c, 0x45, 0x5f, 0x46, 0x49, 0x4c, - 0x45, 0x53, 0x10, 0x01, 0x32, 0x49, 0x0a, 0x0b, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, - 0x6e, 0x65, 0x72, 0x12, 0x3a, 0x0a, 0x07, 0x53, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x14, - 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x52, 0x65, 0x71, - 0x75, 0x65, 0x73, 0x74, 0x1a, 0x15, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, - 0x65, 0x72, 0x2e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x28, 0x01, 0x30, 0x01, 0x42, - 0x30, 0x5a, 0x2e, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x63, 0x6f, - 0x64, 0x65, 0x72, 0x2f, 0x63, 0x6f, 0x64, 0x65, 0x72, 0x2f, 0x76, 0x32, 0x2f, 0x70, 0x72, 0x6f, - 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x73, 0x64, 0x6b, 0x2f, 0x70, 0x72, 0x6f, 0x74, - 0x6f, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, + 0x2e, 0x50, 0x6c, 0x61, 0x6e, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x65, 0x48, 0x00, 0x52, + 0x04, 0x70, 0x6c, 0x61, 0x6e, 0x12, 0x32, 0x0a, 0x05, 0x61, 0x70, 0x70, 0x6c, 0x79, 0x18, 0x04, + 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, + 0x65, 0x72, 0x2e, 0x41, 0x70, 0x70, 0x6c, 0x79, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x65, + 0x48, 0x00, 0x52, 0x05, 0x61, 0x70, 0x70, 0x6c, 0x79, 0x12, 0x3a, 0x0a, 0x0b, 0x64, 0x61, 0x74, + 0x61, 0x5f, 0x75, 0x70, 0x6c, 0x6f, 0x61, 0x64, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, + 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x44, 0x61, 0x74, + 0x61, 0x55, 0x70, 0x6c, 0x6f, 0x61, 0x64, 0x48, 0x00, 0x52, 0x0a, 0x64, 0x61, 0x74, 0x61, 0x55, + 0x70, 0x6c, 0x6f, 0x61, 0x64, 0x12, 0x3a, 0x0a, 0x0b, 0x63, 0x68, 0x75, 0x6e, 0x6b, 0x5f, 0x70, + 0x69, 0x65, 0x63, 0x65, 0x18, 0x06, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x70, 0x72, 0x6f, + 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x43, 0x68, 0x75, 0x6e, 0x6b, 0x50, 0x69, + 0x65, 0x63, 0x65, 0x48, 0x00, 0x52, 0x0a, 0x63, 0x68, 0x75, 0x6e, 0x6b, 0x50, 0x69, 0x65, 0x63, + 0x65, 0x42, 0x06, 0x0a, 0x04, 0x74, 0x79, 0x70, 0x65, 0x22, 0x9c, 0x01, 0x0a, 0x0a, 0x44, 0x61, + 0x74, 0x61, 0x55, 0x70, 0x6c, 0x6f, 0x61, 0x64, 0x12, 0x3c, 0x0a, 0x0b, 0x75, 0x70, 0x6c, 0x6f, + 0x61, 0x64, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x1b, 0x2e, + 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x44, 0x61, 0x74, 0x61, + 0x55, 0x70, 0x6c, 0x6f, 0x61, 0x64, 0x54, 0x79, 0x70, 0x65, 0x52, 0x0a, 0x75, 0x70, 0x6c, 0x6f, + 0x61, 0x64, 0x54, 0x79, 0x70, 0x65, 0x12, 0x1b, 0x0a, 0x09, 0x64, 0x61, 0x74, 0x61, 0x5f, 0x68, + 0x61, 0x73, 0x68, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x08, 0x64, 0x61, 0x74, 0x61, 0x48, + 0x61, 0x73, 0x68, 0x12, 0x1b, 0x0a, 0x09, 0x66, 0x69, 0x6c, 0x65, 0x5f, 0x73, 0x69, 0x7a, 0x65, + 0x18, 0x03, 0x20, 0x01, 0x28, 0x03, 0x52, 0x08, 0x66, 0x69, 0x6c, 0x65, 0x53, 0x69, 0x7a, 0x65, + 0x12, 0x16, 0x0a, 0x06, 0x63, 0x68, 0x75, 0x6e, 0x6b, 0x73, 0x18, 0x04, 0x20, 0x01, 0x28, 0x05, + 0x52, 0x06, 0x63, 0x68, 0x75, 0x6e, 0x6b, 0x73, 0x22, 0x67, 0x0a, 0x0a, 0x43, 0x68, 0x75, 0x6e, + 0x6b, 0x50, 0x69, 0x65, 0x63, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x64, 0x61, 0x74, 0x61, 0x18, 0x01, + 0x20, 0x01, 0x28, 0x0c, 0x52, 0x04, 0x64, 0x61, 0x74, 0x61, 0x12, 0x24, 0x0a, 0x0e, 0x66, 0x75, + 0x6c, 0x6c, 0x5f, 0x64, 0x61, 0x74, 0x61, 0x5f, 0x68, 0x61, 0x73, 0x68, 0x18, 0x02, 0x20, 0x01, + 0x28, 0x0c, 0x52, 0x0c, 0x66, 0x75, 0x6c, 0x6c, 0x44, 0x61, 0x74, 0x61, 0x48, 0x61, 0x73, 0x68, + 0x12, 0x1f, 0x0a, 0x0b, 0x70, 0x69, 0x65, 0x63, 0x65, 0x5f, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x18, + 0x03, 0x20, 0x01, 0x28, 0x05, 0x52, 0x0a, 0x70, 0x69, 0x65, 0x63, 0x65, 0x49, 0x6e, 0x64, 0x65, + 0x78, 0x2a, 0xa8, 0x01, 0x0a, 0x11, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x46, + 0x6f, 0x72, 0x6d, 0x54, 0x79, 0x70, 0x65, 0x12, 0x0b, 0x0a, 0x07, 0x44, 0x45, 0x46, 0x41, 0x55, + 0x4c, 0x54, 0x10, 0x00, 0x12, 0x0e, 0x0a, 0x0a, 0x46, 0x4f, 0x52, 0x4d, 0x5f, 0x45, 0x52, 0x52, + 0x4f, 0x52, 0x10, 0x01, 0x12, 0x09, 0x0a, 0x05, 0x52, 0x41, 0x44, 0x49, 0x4f, 0x10, 0x02, 0x12, + 0x0c, 0x0a, 0x08, 0x44, 0x52, 0x4f, 0x50, 0x44, 0x4f, 0x57, 0x4e, 0x10, 0x03, 0x12, 0x09, 0x0a, + 0x05, 0x49, 0x4e, 0x50, 0x55, 0x54, 0x10, 0x04, 0x12, 0x0c, 0x0a, 0x08, 0x54, 0x45, 0x58, 0x54, + 0x41, 0x52, 0x45, 0x41, 0x10, 0x05, 0x12, 0x0a, 0x0a, 0x06, 0x53, 0x4c, 0x49, 0x44, 0x45, 0x52, + 0x10, 0x06, 0x12, 0x0c, 0x0a, 0x08, 0x43, 0x48, 0x45, 0x43, 0x4b, 0x42, 0x4f, 0x58, 0x10, 0x07, + 0x12, 0x0a, 0x0a, 0x06, 0x53, 0x57, 0x49, 0x54, 0x43, 0x48, 0x10, 0x08, 0x12, 0x0d, 0x0a, 0x09, + 0x54, 0x41, 0x47, 0x53, 0x45, 0x4c, 0x45, 0x43, 0x54, 0x10, 0x09, 0x12, 0x0f, 0x0a, 0x0b, 0x4d, + 0x55, 0x4c, 0x54, 0x49, 0x53, 0x45, 0x4c, 0x45, 0x43, 0x54, 0x10, 0x0a, 0x2a, 0x3f, 0x0a, 0x08, + 0x4c, 0x6f, 0x67, 0x4c, 0x65, 0x76, 0x65, 0x6c, 0x12, 0x09, 0x0a, 0x05, 0x54, 0x52, 0x41, 0x43, + 0x45, 0x10, 0x00, 0x12, 0x09, 0x0a, 0x05, 0x44, 0x45, 0x42, 0x55, 0x47, 0x10, 0x01, 0x12, 0x08, + 0x0a, 0x04, 0x49, 0x4e, 0x46, 0x4f, 0x10, 0x02, 0x12, 0x08, 0x0a, 0x04, 0x57, 0x41, 0x52, 0x4e, + 0x10, 0x03, 0x12, 0x09, 0x0a, 0x05, 0x45, 0x52, 0x52, 0x4f, 0x52, 0x10, 0x04, 0x2a, 0x3b, 0x0a, + 0x0f, 0x41, 0x70, 0x70, 0x53, 0x68, 0x61, 0x72, 0x69, 0x6e, 0x67, 0x4c, 0x65, 0x76, 0x65, 0x6c, + 0x12, 0x09, 0x0a, 0x05, 0x4f, 0x57, 0x4e, 0x45, 0x52, 0x10, 0x00, 0x12, 0x11, 0x0a, 0x0d, 0x41, + 0x55, 0x54, 0x48, 0x45, 0x4e, 0x54, 0x49, 0x43, 0x41, 0x54, 0x45, 0x44, 0x10, 0x01, 0x12, 0x0a, + 0x0a, 0x06, 0x50, 0x55, 0x42, 0x4c, 0x49, 0x43, 0x10, 0x02, 0x2a, 0x35, 0x0a, 0x09, 0x41, 0x70, + 0x70, 0x4f, 0x70, 0x65, 0x6e, 0x49, 0x6e, 0x12, 0x0e, 0x0a, 0x06, 0x57, 0x49, 0x4e, 0x44, 0x4f, + 0x57, 0x10, 0x00, 0x1a, 0x02, 0x08, 0x01, 0x12, 0x0f, 0x0a, 0x0b, 0x53, 0x4c, 0x49, 0x4d, 0x5f, + 0x57, 0x49, 0x4e, 0x44, 0x4f, 0x57, 0x10, 0x01, 0x12, 0x07, 0x0a, 0x03, 0x54, 0x41, 0x42, 0x10, + 0x02, 0x2a, 0x37, 0x0a, 0x13, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x54, 0x72, + 0x61, 0x6e, 0x73, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x09, 0x0a, 0x05, 0x53, 0x54, 0x41, 0x52, + 0x54, 0x10, 0x00, 0x12, 0x08, 0x0a, 0x04, 0x53, 0x54, 0x4f, 0x50, 0x10, 0x01, 0x12, 0x0b, 0x0a, + 0x07, 0x44, 0x45, 0x53, 0x54, 0x52, 0x4f, 0x59, 0x10, 0x02, 0x2a, 0x3e, 0x0a, 0x1b, 0x50, 0x72, + 0x65, 0x62, 0x75, 0x69, 0x6c, 0x74, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x42, + 0x75, 0x69, 0x6c, 0x64, 0x53, 0x74, 0x61, 0x67, 0x65, 0x12, 0x08, 0x0a, 0x04, 0x4e, 0x4f, 0x4e, + 0x45, 0x10, 0x00, 0x12, 0x0a, 0x0a, 0x06, 0x43, 0x52, 0x45, 0x41, 0x54, 0x45, 0x10, 0x01, 0x12, + 0x09, 0x0a, 0x05, 0x43, 0x4c, 0x41, 0x49, 0x4d, 0x10, 0x02, 0x2a, 0x35, 0x0a, 0x0b, 0x54, 0x69, + 0x6d, 0x69, 0x6e, 0x67, 0x53, 0x74, 0x61, 0x74, 0x65, 0x12, 0x0b, 0x0a, 0x07, 0x53, 0x54, 0x41, + 0x52, 0x54, 0x45, 0x44, 0x10, 0x00, 0x12, 0x0d, 0x0a, 0x09, 0x43, 0x4f, 0x4d, 0x50, 0x4c, 0x45, + 0x54, 0x45, 0x44, 0x10, 0x01, 0x12, 0x0a, 0x0a, 0x06, 0x46, 0x41, 0x49, 0x4c, 0x45, 0x44, 0x10, + 0x02, 0x2a, 0x47, 0x0a, 0x0e, 0x44, 0x61, 0x74, 0x61, 0x55, 0x70, 0x6c, 0x6f, 0x61, 0x64, 0x54, + 0x79, 0x70, 0x65, 0x12, 0x17, 0x0a, 0x13, 0x55, 0x50, 0x4c, 0x4f, 0x41, 0x44, 0x5f, 0x54, 0x59, + 0x50, 0x45, 0x5f, 0x55, 0x4e, 0x4b, 0x4e, 0x4f, 0x57, 0x4e, 0x10, 0x00, 0x12, 0x1c, 0x0a, 0x18, + 0x55, 0x50, 0x4c, 0x4f, 0x41, 0x44, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x4d, 0x4f, 0x44, 0x55, + 0x4c, 0x45, 0x5f, 0x46, 0x49, 0x4c, 0x45, 0x53, 0x10, 0x01, 0x32, 0x49, 0x0a, 0x0b, 0x50, 0x72, + 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x12, 0x3a, 0x0a, 0x07, 0x53, 0x65, 0x73, + 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x14, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, + 0x65, 0x72, 0x2e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x15, 0x2e, 0x70, 0x72, 0x6f, + 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, + 0x65, 0x28, 0x01, 0x30, 0x01, 0x42, 0x30, 0x5a, 0x2e, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, + 0x63, 0x6f, 0x6d, 0x2f, 0x63, 0x6f, 0x64, 0x65, 0x72, 0x2f, 0x63, 0x6f, 0x64, 0x65, 0x72, 0x2f, + 0x76, 0x32, 0x2f, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x73, 0x64, + 0x6b, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, } var ( diff --git a/provisionersdk/proto/provisioner.proto b/provisionersdk/proto/provisioner.proto index a57983c21ad9b..b120ba1c0e607 100644 --- a/provisionersdk/proto/provisioner.proto +++ b/provisionersdk/proto/provisioner.proto @@ -101,6 +101,8 @@ message Preset { repeated PresetParameter parameters = 2; Prebuild prebuild = 3; bool default = 4; + string description = 5; + string icon = 6; } message PresetParameter { @@ -417,6 +419,7 @@ message PlanComplete { // See `hasAITaskResources` in provisioner/terraform/resources.go for more details. bool has_ai_tasks = 13; repeated provisioner.AITask ai_tasks = 14; + bool has_external_agents = 15; } // ApplyRequest asks the provisioner to apply the changes. Apply MUST be preceded by a successful plan request/response diff --git a/scaletest/agentconn/run.go b/scaletest/agentconn/run.go index dba21cc24e3a0..b0990d9cb11a6 100644 --- a/scaletest/agentconn/run.go +++ b/scaletest/agentconn/run.go @@ -89,7 +89,7 @@ func (r *Runner) Run(ctx context.Context, _ string, w io.Writer) error { // Ensure DERP for completeness. if r.cfg.ConnectionMode == ConnectionModeDerp { - status := conn.Status() + status := conn.TailnetConn().Status() if len(status.Peers()) != 1 { return xerrors.Errorf("check connection mode: expected 1 peer, got %d", len(status.Peers())) } @@ -133,7 +133,7 @@ func (r *Runner) Run(ctx context.Context, _ string, w io.Writer) error { return nil } -func waitForDisco(ctx context.Context, logs io.Writer, conn *workspacesdk.AgentConn) error { +func waitForDisco(ctx context.Context, logs io.Writer, conn workspacesdk.AgentConn) error { const pingAttempts = 10 const pingDelay = 1 * time.Second @@ -165,7 +165,7 @@ func waitForDisco(ctx context.Context, logs io.Writer, conn *workspacesdk.AgentC return nil } -func waitForDirectConnection(ctx context.Context, logs io.Writer, conn *workspacesdk.AgentConn) error { +func waitForDirectConnection(ctx context.Context, logs io.Writer, conn workspacesdk.AgentConn) error { const directConnectionAttempts = 30 const directConnectionDelay = 1 * time.Second @@ -174,7 +174,7 @@ func waitForDirectConnection(ctx context.Context, logs io.Writer, conn *workspac for i := 0; i < directConnectionAttempts; i++ { _, _ = fmt.Fprintf(logs, "\tDirect connection check %d/%d...\n", i+1, directConnectionAttempts) - status := conn.Status() + status := conn.TailnetConn().Status() var err error if len(status.Peers()) != 1 { @@ -207,7 +207,7 @@ func waitForDirectConnection(ctx context.Context, logs io.Writer, conn *workspac return nil } -func verifyConnection(ctx context.Context, logs io.Writer, conn *workspacesdk.AgentConn) error { +func verifyConnection(ctx context.Context, logs io.Writer, conn workspacesdk.AgentConn) error { const verifyConnectionAttempts = 30 const verifyConnectionDelay = 1 * time.Second @@ -249,7 +249,7 @@ func verifyConnection(ctx context.Context, logs io.Writer, conn *workspacesdk.Ag return nil } -func performInitialConnections(ctx context.Context, logs io.Writer, conn *workspacesdk.AgentConn, specs []Connection) error { +func performInitialConnections(ctx context.Context, logs io.Writer, conn workspacesdk.AgentConn, specs []Connection) error { if len(specs) == 0 { return nil } @@ -287,7 +287,7 @@ func performInitialConnections(ctx context.Context, logs io.Writer, conn *worksp return nil } -func holdConnection(ctx context.Context, logs io.Writer, conn *workspacesdk.AgentConn, holdDur time.Duration, specs []Connection) error { +func holdConnection(ctx context.Context, logs io.Writer, conn workspacesdk.AgentConn, holdDur time.Duration, specs []Connection) error { ctx, span := tracing.StartSpan(ctx) defer span.End() @@ -364,7 +364,7 @@ func holdConnection(ctx context.Context, logs io.Writer, conn *workspacesdk.Agen return nil } -func agentHTTPClient(conn *workspacesdk.AgentConn) *http.Client { +func agentHTTPClient(conn workspacesdk.AgentConn) *http.Client { return &http.Client{ Transport: &http.Transport{ DisableKeepAlives: true, diff --git a/scaletest/terraform/action/.gitignore b/scaletest/terraform/action/.gitignore new file mode 100644 index 0000000000000..c45cf41694258 --- /dev/null +++ b/scaletest/terraform/action/.gitignore @@ -0,0 +1 @@ +*.tfvars diff --git a/scaletest/terraform/action/cf_dns.tf b/scaletest/terraform/action/cf_dns.tf index eaaff28ce03a0..664b909ae90b2 100644 --- a/scaletest/terraform/action/cf_dns.tf +++ b/scaletest/terraform/action/cf_dns.tf @@ -1,6 +1,10 @@ +data "cloudflare_zone" "domain" { + name = var.cloudflare_domain +} + resource "cloudflare_record" "coder" { for_each = local.deployments - zone_id = var.cloudflare_zone_id + zone_id = data.cloudflare_zone.domain.zone_id name = each.value.subdomain content = google_compute_address.coder[each.key].address type = "A" diff --git a/scaletest/terraform/action/main.tf b/scaletest/terraform/action/main.tf index 57a294710c5b5..cd26c7ec1ccd2 100644 --- a/scaletest/terraform/action/main.tf +++ b/scaletest/terraform/action/main.tf @@ -16,7 +16,7 @@ terraform { } // We use the kubectl provider to apply Custom Resources. - // The kubernetes provider requires the CRD is already present + // The kubernetes provider requires the CRD is already present // and would require a separate apply step beforehand. // https://github.com/hashicorp/terraform-provider-kubernetes/issues/1367 kubectl = { @@ -40,14 +40,19 @@ terraform { } } - required_version = "~> 1.9.0" + required_version = ">= 1.9.0" } provider "google" { } +data "google_secret_manager_secret_version_access" "cloudflare_api_token_dns" { + secret = "cloudflare-api-token-dns" + project = var.project_id +} + provider "cloudflare" { - api_token = var.cloudflare_api_token + api_token = coalesce(var.cloudflare_api_token, data.google_secret_manager_secret_version_access.cloudflare_api_token_dns.secret_data) } provider "kubernetes" { diff --git a/scaletest/terraform/action/scenarios.tf b/scaletest/terraform/action/scenarios.tf index bd22fa7c5b54f..b135b977047de 100644 --- a/scaletest/terraform/action/scenarios.tf +++ b/scaletest/terraform/action/scenarios.tf @@ -35,5 +35,40 @@ locals { max_connections = 500 } } + small = { + coder = { + nodepool_size = 3 + machine_type = "c2d-standard-8" + replicas = 3 + cpu_request = "4000m" + mem_request = "12Gi" + cpu_limit = "4000m" + mem_limit = "12Gi" + } + provisionerd = { + replicas = 5 + cpu_request = "100m" + mem_request = "256Mi" + cpu_limit = "1000m" + mem_limit = "1Gi" + } + workspaces = { + count_per_deployment = 10 + nodepool_size = 3 + machine_type = "c2d-standard-8" + cpu_request = "100m" + mem_request = "128Mi" + cpu_limit = "100m" + mem_limit = "128Mi" + } + misc = { + nodepool_size = 1 + machine_type = "c2d-standard-8" + } + cloudsql = { + tier = "db-custom-2-7680" + max_connections = 100 + } + } } } diff --git a/scaletest/terraform/action/vars.tf b/scaletest/terraform/action/vars.tf index 6788e843d8b6f..3952baab82b80 100644 --- a/scaletest/terraform/action/vars.tf +++ b/scaletest/terraform/action/vars.tf @@ -13,6 +13,7 @@ variable "scenario" { // GCP variable "project_id" { description = "The project in which to provision resources" + default = "coder-scaletest" } variable "k8s_version" { @@ -24,19 +25,14 @@ variable "k8s_version" { variable "cloudflare_api_token" { description = "Cloudflare API token." sensitive = true -} - -variable "cloudflare_email" { - description = "Cloudflare email address." - sensitive = true + # only override if you want to change the cloudflare_domain; pulls the token for scaletest.dev from Google Secrets + # Manager if null. + default = null } variable "cloudflare_domain" { description = "Cloudflare coder domain." -} - -variable "cloudflare_zone_id" { - description = "Cloudflare zone ID." + default = "scaletest.dev" } // Coder diff --git a/scripts/Dockerfile.base b/scripts/Dockerfile.base index 8bcb59c325b19..f5e89f8a048fa 100644 --- a/scripts/Dockerfile.base +++ b/scripts/Dockerfile.base @@ -1,7 +1,7 @@ # This is the base image used for Coder images. It's a multi-arch image that is # built in depot.dev for all supported architectures. Since it's built on real # hardware and not cross-compiled, it can have "RUN" commands. -FROM alpine:3.21.3 +FROM alpine:3.21.3@sha256:a8560b36e8b8210634f77d9f7f9efd7ffa463e380b75e2e74aff4511df3ef88c # We use a single RUN command to reduce the number of layers in the image. # NOTE: Keep the Terraform version in sync with minTerraformVersion and diff --git a/scripts/apidocgen/package.json b/scripts/apidocgen/package.json index cf8072904ba8a..4ab69c8f72442 100644 --- a/scripts/apidocgen/package.json +++ b/scripts/apidocgen/package.json @@ -8,7 +8,8 @@ }, "pnpm": { "overrides": { - "@babel/runtime": "7.26.10" + "@babel/runtime": "7.26.10", + "form-data": "4.0.4" } } } diff --git a/scripts/apidocgen/pnpm-lock.yaml b/scripts/apidocgen/pnpm-lock.yaml index 9d729e02a4bb9..619e9dc9f6a6c 100644 --- a/scripts/apidocgen/pnpm-lock.yaml +++ b/scripts/apidocgen/pnpm-lock.yaml @@ -8,6 +8,7 @@ overrides: semver: 7.5.3 jsonpointer: 5.0.1 '@babel/runtime': 7.26.10 + form-data: 4.0.4 importers: @@ -82,6 +83,10 @@ packages: peerDependencies: ajv: 4.11.8 - 6 + call-bind-apply-helpers@1.0.2: + resolution: {integrity: sha512-Sp1ablJ0ivDkSzjcaJdxEunN5/XvksFJ2sMBFfq6x0ryhQV/2b/KwFe21cMpmHtPOSij8K99/wSfoEuTObmuMQ==} + engines: {node: '>= 0.4'} + call-me-maybe@1.0.2: resolution: {integrity: sha512-HpX65o1Hnr9HH25ojC1YGs7HCQLq0GCOibSaWER0eNpgJ/Z1MZv2mTc7+xh6WOPxbRVcmgbv4hGU+uSQ/2xFZQ==} @@ -167,6 +172,10 @@ packages: engines: {'0': node >=0.2.6} hasBin: true + dunder-proto@1.0.1: + resolution: {integrity: sha512-KIN/nDJBQRcXw0MLVhZE9iQHmG68qAVIBg9CqmUYjmQIhgij9U5MFvrqkUL5FbtyyzZuOeOt0zdeRe4UY7ct+A==} + engines: {node: '>= 0.4'} + duplexer@0.1.2: resolution: {integrity: sha512-jtD6YG370ZCIi/9GTaJKQxWTZD045+4R4hTk/x1UyoqadyJ9x9CgSi1RlVDQF8U2sxLLSnFkCaMihqljHIWgMg==} @@ -179,6 +188,22 @@ packages: entities@2.0.3: resolution: {integrity: sha512-MyoZ0jgnLvB2X3Lg5HqpFmn1kybDiIfEQmKzTb5apr51Rb+T3KdmMiqa70T+bhGnyv7bQ6WMj2QMHpGMmlrUYQ==} + es-define-property@1.0.1: + resolution: {integrity: sha512-e3nRfgfUZ4rNGL232gUgX06QNyyez04KdjFrF+LTRoOXmrOgFKDg4BCdsjW8EnT69eqdYGmRpJwiPVYNrCaW3g==} + engines: {node: '>= 0.4'} + + es-errors@1.3.0: + resolution: {integrity: sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw==} + engines: {node: '>= 0.4'} + + es-object-atoms@1.1.1: + resolution: {integrity: sha512-FGgH2h8zKNim9ljj7dankFPcICIK9Cp5bm+c2gQSYePhpaG5+esrLODihIorn+Pe6FGJzWhXQotPv73jTaldXA==} + engines: {node: '>= 0.4'} + + es-set-tostringtag@2.1.0: + resolution: {integrity: sha512-j6vWzfrGVfyXxge+O0x5sh6cvxAog0a/4Rdd2K36zCMV5eJ+/+tOAngRO8cODMNWbVRdVlmGZQL2YS3yR8bIUA==} + engines: {node: '>= 0.4'} + es6-promise@3.3.1: resolution: {integrity: sha512-SOp9Phqvqn7jtEUxPWdWfWoLmyt2VaJ6MpvP9Comy1MceMXqE6bxvaTu4iaxpYYPzhny28Lc+M87/c2cPK6lDg==} @@ -220,8 +245,8 @@ packages: foreach@2.0.6: resolution: {integrity: sha512-k6GAGDyqLe9JaebCsFCoudPPWfihKu8pylYXRlqP1J7ms39iPoTtk2fviNglIeQEwdh0bQeKJ01ZPyuyQvKzwg==} - form-data@3.0.0: - resolution: {integrity: sha512-CKMFDglpbMi6PyN+brwB9Q/GOw0eAnsrEZDgcsH5Krhz5Od/haKHAX0NmQfha2zPPz0JpWzA7GJHGSnvCRLWsg==} + form-data@4.0.4: + resolution: {integrity: sha512-KrGhL9Q4zjj0kiUt5OO4Mr/A/jlI2jDYs5eHBpYHPcBEVSiipAvn2Ko2HnPe20rmcuuvMHNdZFp+4IlGTMF0Ow==} engines: {node: '>= 6'} from@0.1.7: @@ -234,6 +259,9 @@ packages: resolution: {integrity: sha512-yI+wDwj0FsgX7tyIQJR+EP60R64evMSixtGb9AzGWjJVKlF5tCet95KomfqGBg/aIAG1Dhd6wjCOQe5HbX/qLA==} engines: {node: '>=0.10'} + function-bind@1.1.2: + resolution: {integrity: sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==} + get-caller-file@1.0.3: resolution: {integrity: sha512-3t6rVToeoZfYSGd8YoLFR2DJkiQrIiUrGcjvFX2mDw3bn6k2OtwHN0TNCLbBO+w8qTvimhDkv+LSscbJY1vE6w==} @@ -241,13 +269,25 @@ packages: resolution: {integrity: sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==} engines: {node: 6.* || 8.* || >= 10.*} + get-intrinsic@1.3.0: + resolution: {integrity: sha512-9fSjSaos/fRIVIp+xSJlE6lfwhES7LNtKaCBIamHsjr2na1BiABJPo0mOjjz8GJDURarmCPGqaiVg5mfjb98CQ==} + engines: {node: '>= 0.4'} + get-own-enumerable-property-symbols@3.0.2: resolution: {integrity: sha512-I0UBV/XOz1XkIJHEUDMZAbzCThU/H8DxmSfmdGcKPnVhu2VfFqr34jr9777IyaTYvxjedWhqVIilEDsCdP5G6g==} + get-proto@1.0.1: + resolution: {integrity: sha512-sTSfBjoXBp89JvIKIefqw7U2CCebsc74kiY6awiGogKtoSGbgjYE/G/+l9sF3MWFPNc9IcoOC4ODfKHfxFmp0g==} + engines: {node: '>= 0.4'} + get-stream@4.1.0: resolution: {integrity: sha512-GMat4EJ5161kIy2HevLlr4luNjBgvmj413KaQA7jt4V8B4RDsfpHk7WQ9GVqfYyyx8OS/L66Kox+rJRNklLK7w==} engines: {node: '>=6'} + gopd@1.2.0: + resolution: {integrity: sha512-ZUKRh6/kUFoAiTAtTYPZJ3hw9wNxx+BIBOijnlG9PnrJsCcSjs1wyyD6vJpaYtgnzDrKYRSqf3OO6Rfa93xsRg==} + engines: {node: '>= 0.4'} + graceful-fs@4.2.11: resolution: {integrity: sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==} @@ -271,6 +311,18 @@ packages: resolution: {integrity: sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw==} engines: {node: '>=4'} + has-symbols@1.1.0: + resolution: {integrity: sha512-1cDNdwJ2Jaohmb3sg4OmKaMBwuC48sYni5HUw2DvsC8LjGTLK9h+eb1X6RyuOHe4hT0ULCW68iomhjUoKUqlPQ==} + engines: {node: '>= 0.4'} + + has-tostringtag@1.0.2: + resolution: {integrity: sha512-NqADB8VjPFLM2V0VvHUewwwsw0ZWBaIdgo+ieHtK3hasLz4qeCRjYcqfB6AQrBggRKppKF8L52/VqdVsO47Dlw==} + engines: {node: '>= 0.4'} + + hasown@2.0.2: + resolution: {integrity: sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==} + engines: {node: '>= 0.4'} + highlightjs@9.16.2: resolution: {integrity: sha512-FK1vmMj8BbEipEy8DLIvp71t5UsC7n2D6En/UfM/91PCwmOpj6f2iu0Y0coRC62KSRHHC+dquM2xMULV/X7NFg==} deprecated: Use the 'highlight.js' package instead https://npm.im/highlight.js @@ -375,6 +427,10 @@ packages: resolution: {integrity: sha512-YWOP1j7UbDNz+TumYP1kpwnP0aEa711cJjrAQrzd0UXlbJfc5aAq0F/PZHjiioqDC1NKgvIMX+o+9Bk7yuM2dg==} hasBin: true + math-intrinsics@1.1.0: + resolution: {integrity: sha512-/IXtbwEk5HTPyEwyKX6hGkYXxM9nbj64B+ilVJnC/R6B0pH5G4V3b0pVbL7DBj4tkhBAppbQUlf6F6Xl9LHu1g==} + engines: {node: '>= 0.4'} + mdurl@1.0.1: resolution: {integrity: sha512-/sKlQJCBYVY9Ers9hqzKou4H6V5UWc/M59TH2dvkt+84itfnq7uFOMLpOiOS4ujvHP4etln18fmIxA5R5fll0g==} @@ -786,6 +842,11 @@ snapshots: jsonpointer: 5.0.1 leven: 3.1.0 + call-bind-apply-helpers@1.0.2: + dependencies: + es-errors: 1.3.0 + function-bind: 1.1.2 + call-me-maybe@1.0.2: {} camelcase@5.3.1: {} @@ -866,6 +927,12 @@ snapshots: dot@1.1.3: {} + dunder-proto@1.0.1: + dependencies: + call-bind-apply-helpers: 1.0.2 + es-errors: 1.3.0 + gopd: 1.2.0 + duplexer@0.1.2: {} emoji-regex@8.0.0: {} @@ -876,6 +943,21 @@ snapshots: entities@2.0.3: {} + es-define-property@1.0.1: {} + + es-errors@1.3.0: {} + + es-object-atoms@1.1.1: + dependencies: + es-errors: 1.3.0 + + es-set-tostringtag@2.1.0: + dependencies: + es-errors: 1.3.0 + get-intrinsic: 1.3.0 + has-tostringtag: 1.0.2 + hasown: 2.0.2 + es6-promise@3.3.1: {} escalade@3.1.1: {} @@ -921,10 +1003,12 @@ snapshots: foreach@2.0.6: {} - form-data@3.0.0: + form-data@4.0.4: dependencies: asynckit: 0.4.0 combined-stream: 1.0.8 + es-set-tostringtag: 2.1.0 + hasown: 2.0.2 mime-types: 2.1.35 from@0.1.7: {} @@ -940,16 +1024,38 @@ snapshots: transitivePeerDependencies: - mkdirp + function-bind@1.1.2: {} + get-caller-file@1.0.3: {} get-caller-file@2.0.5: {} + get-intrinsic@1.3.0: + dependencies: + call-bind-apply-helpers: 1.0.2 + es-define-property: 1.0.1 + es-errors: 1.3.0 + es-object-atoms: 1.1.1 + function-bind: 1.1.2 + get-proto: 1.0.1 + gopd: 1.2.0 + has-symbols: 1.1.0 + hasown: 2.0.2 + math-intrinsics: 1.1.0 + get-own-enumerable-property-symbols@3.0.2: {} + get-proto@1.0.1: + dependencies: + dunder-proto: 1.0.1 + es-object-atoms: 1.1.1 + get-stream@4.1.0: dependencies: pump: 3.0.0 + gopd@1.2.0: {} + graceful-fs@4.2.11: {} grapheme-splitter@1.0.4: {} @@ -967,6 +1073,16 @@ snapshots: has-flag@3.0.0: {} + has-symbols@1.1.0: {} + + has-tostringtag@1.0.2: + dependencies: + has-symbols: 1.1.0 + + hasown@2.0.2: + dependencies: + function-bind: 1.1.2 + highlightjs@9.16.2: {} http2-client@1.3.5: {} @@ -977,7 +1093,7 @@ snapshots: commander: 2.20.3 debug: 2.6.9 event-stream: 3.3.4 - form-data: 3.0.0 + form-data: 4.0.4 fs-readfile-promise: 2.0.1 fs-writefile-promise: 1.0.3(mkdirp@3.0.1) har-validator: 5.1.5 @@ -1063,6 +1179,8 @@ snapshots: mdurl: 1.0.1 uc.micro: 1.0.6 + math-intrinsics@1.1.0: {} + mdurl@1.0.1: {} mem@4.3.0: diff --git a/scripts/check_unstaged.sh b/scripts/check_unstaged.sh index 90d4cad87e4fc..715c84c374acf 100755 --- a/scripts/check_unstaged.sh +++ b/scripts/check_unstaged.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash set -euo pipefail # shellcheck source=scripts/lib.sh diff --git a/scripts/dbgen/constraint.go b/scripts/dbgen/constraint.go new file mode 100644 index 0000000000000..6853f9bb26ad5 --- /dev/null +++ b/scripts/dbgen/constraint.go @@ -0,0 +1,239 @@ +package main + +import ( + "bufio" + "bytes" + "fmt" + "os" + "path/filepath" + "regexp" + "strings" + + "golang.org/x/tools/imports" + "golang.org/x/xerrors" +) + +type constraintType string + +const ( + constraintTypeUnique constraintType = "unique" + constraintTypeForeignKey constraintType = "foreign_key" + constraintTypeCheck constraintType = "check" +) + +func (c constraintType) goType() string { + switch c { + case constraintTypeUnique: + return "UniqueConstraint" + case constraintTypeForeignKey: + return "ForeignKeyConstraint" + case constraintTypeCheck: + return "CheckConstraint" + default: + panic(fmt.Sprintf("unknown constraint type: %s", c)) + } +} + +func (c constraintType) goTypeDescriptionPart() string { + switch c { + case constraintTypeUnique: + return "unique" + case constraintTypeForeignKey: + return "foreign key" + case constraintTypeCheck: + return "check" + default: + panic(fmt.Sprintf("unknown constraint type: %s", c)) + } +} + +func (c constraintType) goEnumNamePrefix() string { + switch c { + case constraintTypeUnique: + return "Unique" + case constraintTypeForeignKey: + return "ForeignKey" + case constraintTypeCheck: + return "Check" + default: + panic(fmt.Sprintf("unknown constraint type: %s", c)) + } +} + +type constraint struct { + name string + // comment is typically the full constraint, but for check constraints it's + // instead the table name. + comment string +} + +// queryToConstraintsFn is a function that takes a query and returns zero or +// more constraints if the query matches the wanted constraint type. If the +// query does not match the wanted constraint type, the function should return +// no constraints. +type queryToConstraintsFn func(query string) ([]constraint, error) + +// generateConstraints does the following: +// 1. Read the dump.sql file +// 2. Parse the file into each query +// 3. Pass each query to the constraintFn function +// 4. Generate the enum from the returned constraints +// 5. Write the generated code to the output path +func generateConstraints(dumpPath, outputPath string, outputConstraintType constraintType, fn queryToConstraintsFn) error { + dump, err := os.Open(dumpPath) + if err != nil { + return err + } + defer dump.Close() + + var allConstraints []constraint + + dumpScanner := bufio.NewScanner(dump) + query := "" + for dumpScanner.Scan() { + line := strings.TrimSpace(dumpScanner.Text()) + switch { + case strings.HasPrefix(line, "--"): + case line == "": + case strings.HasSuffix(line, ";"): + query += line + newConstraints, err := fn(query) + query = "" + if err != nil { + return xerrors.Errorf("process query %q: %w", query, err) + } + allConstraints = append(allConstraints, newConstraints...) + default: + query += line + " " + } + } + if err = dumpScanner.Err(); err != nil { + return err + } + + s := &bytes.Buffer{} + + _, _ = fmt.Fprintf(s, `// Code generated by scripts/dbgen/main.go. DO NOT EDIT. +package database + +// %[1]s represents a named %[2]s constraint on a table. +type %[1]s string + +// %[1]s enums. +const ( +`, outputConstraintType.goType(), outputConstraintType.goTypeDescriptionPart()) + + for _, c := range allConstraints { + constName := outputConstraintType.goEnumNamePrefix() + nameFromSnakeCase(c.name) + _, _ = fmt.Fprintf(s, "\t%[1]s %[2]s = %[3]q // %[4]s\n", constName, outputConstraintType.goType(), c.name, c.comment) + } + _, _ = fmt.Fprint(s, ")\n") + + data, err := imports.Process(outputPath, s.Bytes(), &imports.Options{ + Comments: true, + }) + if err != nil { + return err + } + return os.WriteFile(outputPath, data, 0o600) +} + +// generateUniqueConstraints generates the UniqueConstraint enum. +func generateUniqueConstraints() error { + localPath, err := localFilePath() + if err != nil { + return err + } + databasePath := filepath.Join(localPath, "..", "..", "..", "coderd", "database") + dumpPath := filepath.Join(databasePath, "dump.sql") + outputPath := filepath.Join(databasePath, "unique_constraint.go") + + fn := func(query string) ([]constraint, error) { + if strings.Contains(query, "UNIQUE") || strings.Contains(query, "PRIMARY KEY") { + name := "" + switch { + case strings.Contains(query, "ALTER TABLE") && strings.Contains(query, "ADD CONSTRAINT"): + name = strings.Split(query, " ")[6] + case strings.Contains(query, "CREATE UNIQUE INDEX"): + name = strings.Split(query, " ")[3] + default: + return nil, xerrors.Errorf("unknown unique constraint format: %s", query) + } + return []constraint{ + { + name: name, + comment: query, + }, + }, nil + } + return nil, nil + } + return generateConstraints(dumpPath, outputPath, constraintTypeUnique, fn) +} + +// generateForeignKeyConstraints generates the ForeignKeyConstraint enum. +func generateForeignKeyConstraints() error { + localPath, err := localFilePath() + if err != nil { + return err + } + databasePath := filepath.Join(localPath, "..", "..", "..", "coderd", "database") + dumpPath := filepath.Join(databasePath, "dump.sql") + outputPath := filepath.Join(databasePath, "foreign_key_constraint.go") + + fn := func(query string) ([]constraint, error) { + if strings.Contains(query, "FOREIGN KEY") { + name := "" + switch { + case strings.Contains(query, "ALTER TABLE") && strings.Contains(query, "ADD CONSTRAINT"): + name = strings.Split(query, " ")[6] + default: + return nil, xerrors.Errorf("unknown foreign key constraint format: %s", query) + } + return []constraint{ + { + name: name, + comment: query, + }, + }, nil + } + return []constraint{}, nil + } + return generateConstraints(dumpPath, outputPath, constraintTypeForeignKey, fn) +} + +// generateCheckConstraints generates the CheckConstraint enum. +func generateCheckConstraints() error { + localPath, err := localFilePath() + if err != nil { + return err + } + databasePath := filepath.Join(localPath, "..", "..", "..", "coderd", "database") + dumpPath := filepath.Join(databasePath, "dump.sql") + outputPath := filepath.Join(databasePath, "check_constraint.go") + + var ( + tableRegex = regexp.MustCompile(`CREATE TABLE\s+([^\s]+)`) + checkRegex = regexp.MustCompile(`CONSTRAINT\s+([^\s]+)\s+CHECK`) + ) + fn := func(query string) ([]constraint, error) { + constraints := []constraint{} + + tableMatches := tableRegex.FindStringSubmatch(query) + if len(tableMatches) > 0 { + table := tableMatches[1] + + // Find every CONSTRAINT xxx CHECK occurrence. + matches := checkRegex.FindAllStringSubmatch(query, -1) + for _, match := range matches { + constraints = append(constraints, constraint{ + name: match[1], + comment: table, + }) + } + } + return constraints, nil + } + + return generateConstraints(dumpPath, outputPath, constraintTypeCheck, fn) +} diff --git a/scripts/dbgen/main.go b/scripts/dbgen/main.go index 561a46199a6ef..f2f0c19b1fd0b 100644 --- a/scripts/dbgen/main.go +++ b/scripts/dbgen/main.go @@ -1,7 +1,6 @@ package main import ( - "bufio" "bytes" "fmt" "go/format" @@ -80,152 +79,12 @@ return %s return xerrors.Errorf("generate foreign key constraints: %w", err) } - return nil -} - -// generateUniqueConstraints generates the UniqueConstraint enum. -func generateUniqueConstraints() error { - localPath, err := localFilePath() - if err != nil { - return err - } - databasePath := filepath.Join(localPath, "..", "..", "..", "coderd", "database") - - dump, err := os.Open(filepath.Join(databasePath, "dump.sql")) - if err != nil { - return err - } - defer dump.Close() - - var uniqueConstraints []string - dumpScanner := bufio.NewScanner(dump) - query := "" - for dumpScanner.Scan() { - line := strings.TrimSpace(dumpScanner.Text()) - switch { - case strings.HasPrefix(line, "--"): - case line == "": - case strings.HasSuffix(line, ";"): - query += line - if strings.Contains(query, "UNIQUE") || strings.Contains(query, "PRIMARY KEY") { - uniqueConstraints = append(uniqueConstraints, query) - } - query = "" - default: - query += line + " " - } - } - if err = dumpScanner.Err(); err != nil { - return err - } - - s := &bytes.Buffer{} - - _, _ = fmt.Fprint(s, `// Code generated by scripts/dbgen/main.go. DO NOT EDIT. -package database -`) - _, _ = fmt.Fprint(s, ` -// UniqueConstraint represents a named unique constraint on a table. -type UniqueConstraint string - -// UniqueConstraint enums. -const ( -`) - for _, query := range uniqueConstraints { - name := "" - switch { - case strings.Contains(query, "ALTER TABLE") && strings.Contains(query, "ADD CONSTRAINT"): - name = strings.Split(query, " ")[6] - case strings.Contains(query, "CREATE UNIQUE INDEX"): - name = strings.Split(query, " ")[3] - default: - return xerrors.Errorf("unknown unique constraint format: %s", query) - } - _, _ = fmt.Fprintf(s, "\tUnique%s UniqueConstraint = %q // %s\n", nameFromSnakeCase(name), name, query) - } - _, _ = fmt.Fprint(s, ")\n") - - outputPath := filepath.Join(databasePath, "unique_constraint.go") - - data, err := imports.Process(outputPath, s.Bytes(), &imports.Options{ - Comments: true, - }) - if err != nil { - return err - } - return os.WriteFile(outputPath, data, 0o600) -} - -// generateForeignKeyConstraints generates the ForeignKeyConstraint enum. -func generateForeignKeyConstraints() error { - localPath, err := localFilePath() - if err != nil { - return err - } - databasePath := filepath.Join(localPath, "..", "..", "..", "coderd", "database") - - dump, err := os.Open(filepath.Join(databasePath, "dump.sql")) + err = generateCheckConstraints() if err != nil { - return err - } - defer dump.Close() - - var foreignKeyConstraints []string - dumpScanner := bufio.NewScanner(dump) - query := "" - for dumpScanner.Scan() { - line := strings.TrimSpace(dumpScanner.Text()) - switch { - case strings.HasPrefix(line, "--"): - case line == "": - case strings.HasSuffix(line, ";"): - query += line - if strings.Contains(query, "FOREIGN KEY") { - foreignKeyConstraints = append(foreignKeyConstraints, query) - } - query = "" - default: - query += line + " " - } + return xerrors.Errorf("generate check constraints: %w", err) } - if err := dumpScanner.Err(); err != nil { - return err - } - - s := &bytes.Buffer{} - - _, _ = fmt.Fprint(s, `// Code generated by scripts/dbgen/main.go. DO NOT EDIT. -package database -`) - _, _ = fmt.Fprint(s, ` -// ForeignKeyConstraint represents a named foreign key constraint on a table. -type ForeignKeyConstraint string - -// ForeignKeyConstraint enums. -const ( -`) - for _, query := range foreignKeyConstraints { - name := "" - switch { - case strings.Contains(query, "ALTER TABLE") && strings.Contains(query, "ADD CONSTRAINT"): - name = strings.Split(query, " ")[6] - default: - return xerrors.Errorf("unknown foreign key constraint format: %s", query) - } - _, _ = fmt.Fprintf(s, "\tForeignKey%s ForeignKeyConstraint = %q // %s\n", nameFromSnakeCase(name), name, query) - } - _, _ = fmt.Fprint(s, ")\n") - - outputPath := filepath.Join(databasePath, "foreign_key_constraint.go") - - data, err := imports.Process(outputPath, s.Bytes(), &imports.Options{ - Comments: true, - }) - if err != nil { - return err - } - return os.WriteFile(outputPath, data, 0o600) + return nil } type stubParams struct { diff --git a/scripts/develop.sh b/scripts/develop.sh index 5a802735c7c66..23efe67576813 100755 --- a/scripts/develop.sh +++ b/scripts/develop.sh @@ -72,9 +72,25 @@ if [ -n "${CODER_AGENT_URL:-}" ]; then fi # Preflight checks: ensure we have our required dependencies, and make sure nothing is listening on port 3000 or 8080 -dependencies curl git go make pnpm -curl --fail http://127.0.0.1:3000 >/dev/null 2>&1 && echo '== ERROR: something is listening on port 3000. Kill it and re-run this script.' && exit 1 -curl --fail http://127.0.0.1:8080 >/dev/null 2>&1 && echo '== ERROR: something is listening on port 8080. Kill it and re-run this script.' && exit 1 +dependencies curl git go jq make pnpm + +if curl --silent --fail http://127.0.0.1:3000; then + # Check if this is the Coder development server. + if curl --silent --fail http://127.0.0.1:3000/api/v2/buildinfo 2>&1 | jq -r '.version' >/dev/null 2>&1; then + echo '== INFO: Coder development server is already running on port 3000!' && exit 0 + else + echo '== ERROR: something is listening on port 3000. Kill it and re-run this script.' && exit 1 + fi +fi + +if curl --fail http://127.0.0.1:8080 >/dev/null 2>&1; then + # Check if this is the Coder development frontend. + if curl --silent --fail http://127.0.0.1:8080/api/v2/buildinfo 2>&1 | jq -r '.version' >/dev/null 2>&1; then + echo '== INFO: Coder development frontend is already running on port 8080!' && exit 0 + else + echo '== ERROR: something is listening on port 8080. Kill it and re-run this script.' && exit 1 + fi +fi # Compile the CLI binary. This should also compile the frontend and refresh # node_modules if necessary. diff --git a/scripts/fixtures.sh b/scripts/fixtures.sh new file mode 100755 index 0000000000000..377cecde71f64 --- /dev/null +++ b/scripts/fixtures.sh @@ -0,0 +1,56 @@ +#!/usr/bin/env bash + +SCRIPT_DIR=$(dirname "${BASH_SOURCE[0]}") +# shellcheck source=scripts/lib.sh +source "${SCRIPT_DIR}/lib.sh" + +CODER_DEV_SHIM="${PROJECT_ROOT}/scripts/coder-dev.sh" + +add_license() { + CODER_DEV_LICENSE="${CODER_DEV_LICENSE:-}" + if [[ -z "${CODER_DEV_LICENSE}" ]]; then + echo "No license provided. Please set CODER_DEV_LICENSE environment variable." + exit 1 + fi + + if [[ "${CODER_BUILD_AGPL:-0}" -gt "0" ]]; then + echo "Not adding a license in AGPL build mode." + exit 0 + fi + + NUM_LICENSES=$("${CODER_DEV_SHIM}" licenses list -o json | jq -r '. | length') + if [[ "${NUM_LICENSES}" -gt "0" ]]; then + echo "License already exists. Skipping addition." + exit 0 + fi + + echo -n "${CODER_DEV_LICENSE}" | "${CODER_DEV_SHIM}" licenses add -f - || { + echo "ERROR: failed to add license. Try adding one manually." + exit 1 + } + + exit 0 +} + +main() { + if [[ $# -eq 0 ]]; then + echo "Available fixtures:" + echo " license: adds the license from CODER_DEV_LICENSE" + exit 0 + fi + + [[ -n "${VERBOSE:-}" ]] && set -x + set -euo pipefail + + case "$1" in + "license") + add_license + ;; + *) + echo "Unknown fixture: $1" + exit 1 + ;; + esac +} + +main "$@" diff --git a/scripts/rules.go b/scripts/rules.go index 4175287567502..dce029a102d01 100644 --- a/scripts/rules.go +++ b/scripts/rules.go @@ -37,7 +37,9 @@ func dbauthzAuthorizationContext(m dsl.Matcher) { Where( m["c"].Type.Implements("context.Context") && // Only report on functions that start with "As". - m["f"].Text.Matches("^As"), + m["f"].Text.Matches("^As") && + // Ignore test usages of dbauthz contexts. + !m.File().Name.Matches(`_test\.go$`), ). // Instructions for fixing the lint error should be included on the dangerous function. Report("Using '$f' is dangerous and should be accompanied by a comment explaining why it's ok and a nolint.") @@ -559,3 +561,11 @@ func noPTYInAgent(m dsl.Matcher) { ). Report("The agent and its subpackages should not use pty.Command or pty.CommandContext directly. Consider using an agentexec.Execer instead.") } + +func noTestutilRunRetry(m dsl.Matcher) { + m.Import("github.com/coder/coder/v2/testutil") + m.Match( + `testutil.RunRetry($*_)`, + ). + Report("testutil.RunRetry should not be used without good reason. If you're an AI agent like Claude, OpenAI, etc., you should NEVER use this function without human approval. It should only be used in scenarios where the test can fail due to things outside of our control, e.g. UDP packet loss under system load. DO NOT use it for your average flaky test. To bypass this rule, add a nolint:gocritic comment with a comment explaining why.") +} diff --git a/scripts/testidp/main.go b/scripts/testidp/main.go index a6188ace2ce9b..64f2ddb30f2d3 100644 --- a/scripts/testidp/main.go +++ b/scripts/testidp/main.go @@ -96,7 +96,9 @@ func RunIDP() func(t *testing.T) { "groups": []string{"testidp", "qa", "engineering"}, "roles": []string{"testidp", "admin", "higher_power"}, }), - oidctest.WithDefaultIDClaims(jwt.MapClaims{}), + oidctest.WithDefaultIDClaims(jwt.MapClaims{ + "sub": uuid.MustParse("26c6a19c-b9b8-493b-a991-88a4c3310314"), + }), oidctest.WithDefaultExpire(*expiry), oidctest.WithStaticCredentials(*clientID, *clientSecret), oidctest.WithIssuer("http://localhost:4500"), diff --git a/scripts/zizmor.sh b/scripts/zizmor.sh new file mode 100755 index 0000000000000..a9326e2ee0868 --- /dev/null +++ b/scripts/zizmor.sh @@ -0,0 +1,46 @@ +#!/usr/bin/env bash + +# Usage: ./zizmor.sh [args...] +# +# This script is a wrapper around the zizmor Docker image. Zizmor lints GitHub +# actions workflows. +# +# We use Docker to run zizmor since it's written in Rust and is difficult to +# install on Ubuntu runners without building it with a Rust toolchain, which +# takes a long time. +# +# The repo is mounted at /repo and the working directory is set to /repo. + +set -euo pipefail +# shellcheck source=scripts/lib.sh +source "$(dirname "${BASH_SOURCE[0]}")/lib.sh" + +cdroot + +image_tag="ghcr.io/zizmorcore/zizmor:1.11.0" +docker_args=( + "--rm" + "--volume" "$(pwd):/repo" + "--workdir" "/repo" + "--network" "host" +) + +if [[ -t 0 ]]; then + docker_args+=("-it") +fi + +# If no GH_TOKEN is set, try to get one from `gh auth token`. +if [[ "${GH_TOKEN:-}" == "" ]] && command -v gh &>/dev/null; then + set +e + GH_TOKEN="$(gh auth token)" + export GH_TOKEN + set -e +fi + +# Pass through the GitHub token if it's set, which allows zizmor to scan +# imported workflows too. +if [[ "${GH_TOKEN:-}" != "" ]]; then + docker_args+=("--env" "GH_TOKEN") +fi + +logrun exec docker run "${docker_args[@]}" "$image_tag" "$@" diff --git a/site/.storybook/main.js b/site/.storybook/main.js deleted file mode 100644 index 0f3bf46e3a0b7..0000000000000 --- a/site/.storybook/main.js +++ /dev/null @@ -1,41 +0,0 @@ -import turbosnap from "vite-plugin-turbosnap"; - -module.exports = { - stories: ["../src/**/*.stories.tsx"], - - addons: [ - "@chromatic-com/storybook", - { - name: "@storybook/addon-essentials", - options: { - backgrounds: false, - }, - }, - "@storybook/addon-links", - "@storybook/addon-mdx-gfm", - "@storybook/addon-themes", - "@storybook/addon-actions", - "@storybook/addon-interactions", - "storybook-addon-remix-react-router", - ], - - staticDirs: ["../static"], - - framework: { - name: "@storybook/react-vite", - options: {}, - }, - - async viteFinal(config, { configType }) { - config.plugins = config.plugins || []; - if (configType === "PRODUCTION") { - config.plugins.push( - turbosnap({ - rootDir: config.root || "", - }), - ); - } - config.server.allowedHosts = [".coder"]; - return config; - }, -}; diff --git a/site/.storybook/main.ts b/site/.storybook/main.ts new file mode 100644 index 0000000000000..00d97a245891c --- /dev/null +++ b/site/.storybook/main.ts @@ -0,0 +1,29 @@ +export default { + stories: ["../src/**/*.stories.tsx"], + + addons: [ + "@chromatic-com/storybook", + "@storybook/addon-docs", + "@storybook/addon-links", + "@storybook/addon-themes", + "storybook-addon-remix-react-router", + ], + + staticDirs: ["../static"], + + framework: { + name: "@storybook/react-vite", + options: {}, + }, + + async viteFinal(config) { + // Storybook seems to strip this setting out of our Vite config. We need to + // put it back in order to be able to access Storybook with Coder Desktop or + // port sharing. + config.server = { + ...config.server, + allowedHosts: [".coder", ".dev.coder.com"], + }; + return config; + }, +} satisfies import("@storybook/react-vite").StorybookConfig; diff --git a/site/.storybook/preview.jsx b/site/.storybook/preview.tsx similarity index 63% rename from site/.storybook/preview.jsx rename to site/.storybook/preview.tsx index 8d8a37ecd2fbf..6871898a54c32 100644 --- a/site/.storybook/preview.jsx +++ b/site/.storybook/preview.tsx @@ -1,28 +1,10 @@ -// @ts-check -/** - * @file Defines the main configuration file for all of our Storybook tests. - * This file must be a JSX/JS file, but we can at least add some type safety via - * the ts-check directive. - * @see {@link https://storybook.js.org/docs/configure#configure-story-rendering} - * - * @typedef {import("react").ReactElement} ReactElement - * @typedef {import("react").PropsWithChildren} PropsWithChildren - * @typedef {import("react").FC} FC - * - * @typedef {import("@storybook/react").StoryContext} StoryContext - * @typedef {import("@storybook/react").Preview} Preview - * - * @typedef {(Story: FC, Context: StoryContext) => React.JSX.Element} Decorator A - * Storybook decorator function used to inject baseline data dependencies into - * our React components during testing. - */ import "../src/index.css"; import { ThemeProvider as EmotionThemeProvider } from "@emotion/react"; import CssBaseline from "@mui/material/CssBaseline"; import { ThemeProvider as MuiThemeProvider, StyledEngineProvider, - // biome-ignore lint/nursery/noRestrictedImports: we extend the MUI theme + // biome-ignore lint/style/noRestrictedImports: we extend the MUI theme } from "@mui/material/styles"; import { DecoratorHelpers } from "@storybook/addon-themes"; import isChromatic from "chromatic/isChromatic"; @@ -31,15 +13,12 @@ import { HelmetProvider } from "react-helmet-async"; import { QueryClient, QueryClientProvider } from "react-query"; import { withRouter } from "storybook-addon-remix-react-router"; import "theme/globalFonts"; +import type { Decorator, Loader, Parameters } from "@storybook/react-vite"; import themes from "../src/theme"; DecoratorHelpers.initializeThemeState(Object.keys(themes), "dark"); -/** @type {readonly Decorator[]} */ -export const decorators = [withRouter, withQuery, withHelmet, withTheme]; - -/** @type {Preview["parameters"]} */ -export const parameters = { +export const parameters: Parameters = { options: { storySort: { method: "alphabetical", @@ -83,26 +62,15 @@ export const parameters = { }, }; -/** - * There's a mismatch on the React Helmet return type that causes issues when - * mounting the component in JS files only. Have to do type assertion, which is - * especially ugly in JSDoc - */ -const SafeHelmetProvider = /** @type {FC} */ ( - /** @type {unknown} */ (HelmetProvider) -); - -/** @type {Decorator} */ -function withHelmet(Story) { +const withHelmet: Decorator = (Story) => { return ( - + - + ); -} +}; -/** @type {Decorator} */ -function withQuery(Story, { parameters }) { +const withQuery: Decorator = (Story, { parameters }) => { const queryClient = new QueryClient({ defaultOptions: { queries: { @@ -123,10 +91,9 @@ function withQuery(Story, { parameters }) { ); -} +}; -/** @type {Decorator} */ -function withTheme(Story, context) { +const withTheme: Decorator = (Story, context) => { const selectedTheme = DecoratorHelpers.pluckThemeFromContext(context); const { themeOverride } = DecoratorHelpers.useThemeParameters(); const selected = themeOverride || selectedTheme || "dark"; @@ -149,7 +116,14 @@ function withTheme(Story, context) { ); -} +}; + +export const decorators: Decorator[] = [ + withRouter, + withQuery, + withHelmet, + withTheme, +]; // Try to fix storybook rendering fonts inconsistently // https://www.chromatic.com/docs/font-loading/#solution-c-check-fonts-have-loaded-in-a-loader @@ -157,4 +131,5 @@ const fontLoader = async () => ({ fonts: await document.fonts.ready, }); -export const loaders = isChromatic() && document.fonts ? [fontLoader] : []; +export const loaders: Loader[] = + isChromatic() && document.fonts ? [fontLoader] : []; diff --git a/site/biome.jsonc b/site/biome.jsonc index bc6fa8de6e946..4c9cb18aa482b 100644 --- a/site/biome.jsonc +++ b/site/biome.jsonc @@ -1,54 +1,7 @@ { - "vcs": { - "enabled": true, - "useIgnoreFile": true, - "clientKind": "git", - "root": ".." - }, + "extends": "//", "files": { - "ignore": ["e2e/**/*Generated.ts", "pnpm-lock.yaml"], - "ignoreUnknown": true - }, - "linter": { - "rules": { - "a11y": { - "noSvgWithoutTitle": { "level": "off" }, - "useButtonType": { "level": "off" }, - "useSemanticElements": { "level": "off" } - }, - "correctness": { - "noUnusedImports": "warn" - }, - "style": { - "noNonNullAssertion": { "level": "off" }, - "noParameterAssign": { "level": "off" }, - "useDefaultParameterLast": { "level": "off" }, - "useSelfClosingElements": { "level": "off" } - }, - "suspicious": { - "noArrayIndexKey": { "level": "off" }, - "noConsoleLog": { "level": "error" }, - "noThenProperty": { "level": "off" } - }, - "nursery": { - "noRestrictedImports": { - "level": "error", - "options": { - "paths": { - "@mui/material": "Use @mui/material/ instead. See: https://material-ui.com/guides/minimizing-bundle-size/.", - "@mui/icons-material": "Use @mui/icons-material/ instead. See: https://material-ui.com/guides/minimizing-bundle-size/.", - "@mui/material/Avatar": "Use components/Avatar/Avatar instead.", - "@mui/material/Alert": "Use components/Alert/Alert instead.", - "@mui/material/Popover": "Use components/Popover/Popover instead.", - "@mui/material/Typography": "Use native HTML elements instead. Eg: ,

,

, etc.", - "@mui/material/Box": "Use a
instead.", - "@mui/material/styles": "Import from @emotion/react instead.", - "lodash": "Use lodash/ instead." - } - } - } - } - } + "includes": ["!e2e/**/*Generated.ts"] }, - "$schema": "https://biomejs.dev/schemas/1.9.4/schema.json" + "$schema": "https://biomejs.dev/schemas/2.2.0/schema.json" } diff --git a/site/e2e/api.ts b/site/e2e/api.ts index 4d884a73cc1ac..342b08cb28914 100644 --- a/site/e2e/api.ts +++ b/site/e2e/api.ts @@ -8,9 +8,10 @@ import relativeTime from "dayjs/plugin/relativeTime"; dayjs.extend(duration); dayjs.extend(relativeTime); + import { humanDuration } from "utils/time"; import { coderPort, defaultPassword } from "./constants"; -import { type LoginOptions, findSessionToken, randomName } from "./helpers"; +import { findSessionToken, type LoginOptions, randomName } from "./helpers"; let currentOrgId: string; @@ -57,7 +58,7 @@ export const createOrganizationMember = async ({ password = defaultPassword, orgRoles, }: CreateOrganizationMemberOptions): Promise => { - const name = randomName(); + const _name = randomName(); const user = await API.createUser({ email, username, diff --git a/site/e2e/expectUrl.ts b/site/e2e/expectUrl.ts index 6ea1cb50b3083..f6bc3b9ef51dd 100644 --- a/site/e2e/expectUrl.ts +++ b/site/e2e/expectUrl.ts @@ -1,4 +1,4 @@ -import { type Page, expect } from "@playwright/test"; +import { expect, type Page } from "@playwright/test"; type PollingOptions = { timeout?: number; intervals?: number[] }; diff --git a/site/e2e/helpers.ts b/site/e2e/helpers.ts index a738899b25f2c..bd4aed8add812 100644 --- a/site/e2e/helpers.ts +++ b/site/e2e/helpers.ts @@ -3,7 +3,7 @@ import { randomUUID } from "node:crypto"; import net from "node:net"; import path from "node:path"; import { Duplex } from "node:stream"; -import { type BrowserContext, type Page, expect, test } from "@playwright/test"; +import { type BrowserContext, expect, type Page, test } from "@playwright/test"; import { API } from "api/api"; import type { UpdateTemplateMeta, @@ -29,8 +29,8 @@ import { expectUrl } from "./expectUrl"; import { Agent, type App, - AppSharingLevel, type ApplyComplete, + AppSharingLevel, type ExternalAuthProviderResource, type ParseComplete, type PlanComplete, @@ -127,6 +127,10 @@ export const createWorkspace = async ( const name = randomName(); await page.getByLabel("name").fill(name); + if (buildParameters.length > 0) { + await page.waitForSelector("form", { state: "visible" }); + } + await fillParameters(page, richParameters, buildParameters); if (useExternalAuth) { @@ -898,28 +902,29 @@ const fillParameters = async ( ); } - const parameterLabel = await page.waitForSelector( - `[data-testid='parameter-field-${richParameter.name}']`, - { state: "visible" }, + // Use modern locator approach instead of waitForSelector + const parameterLabel = page.getByTestId( + `parameter-field-${richParameter.name}`, ); + await expect(parameterLabel).toBeVisible(); if (richParameter.type === "bool") { - const parameterField = await parameterLabel.waitForSelector( - `[data-testid='parameter-field-bool'] .MuiRadio-root input[value='${buildParameter.value}']`, - ); + const parameterField = parameterLabel + .getByTestId("parameter-field-bool") + .locator(`.MuiRadio-root input[value='${buildParameter.value}']`); await parameterField.click(); } else if (richParameter.options.length > 0) { - const parameterField = await parameterLabel.waitForSelector( - `[data-testid='parameter-field-options'] .MuiRadio-root input[value='${buildParameter.value}']`, - ); + const parameterField = parameterLabel + .getByTestId("parameter-field-options") + .locator(`.MuiRadio-root input[value='${buildParameter.value}']`); await parameterField.click(); } else if (richParameter.type === "list(string)") { throw new Error("not implemented yet"); // FIXME } else { // text or number - const parameterField = await parameterLabel.waitForSelector( - "[data-testid='parameter-field-text'] input", - ); + const parameterField = parameterLabel + .getByTestId("parameter-field-text") + .locator("input"); await parameterField.fill(buildParameter.value); } } @@ -1203,3 +1208,48 @@ export async function addUserToOrganization( } await page.mouse.click(10, 10); // close the popover by clicking outside of it } + +/** + * disableDynamicParameters navigates to the template settings page and disables + * dynamic parameters by unchecking the "Enable dynamic parameters" checkbox. + */ +export const disableDynamicParameters = async ( + page: Page, + templateName: string, + orgName = defaultOrganizationName, +) => { + await page.goto(`/templates/${orgName}/${templateName}/settings`, { + waitUntil: "domcontentloaded", + }); + + await page.waitForSelector("form", { state: "visible" }); + + // Find and uncheck the "Enable dynamic parameters" checkbox + const dynamicParamsCheckbox = page.getByRole("checkbox", { + name: /Enable dynamic parameters for workspace creation/, + }); + + await dynamicParamsCheckbox.waitFor({ state: "visible" }); + + // If the checkbox is checked, uncheck it + if (await dynamicParamsCheckbox.isChecked()) { + await dynamicParamsCheckbox.click(); + } + + // Save the changes + const saveButton = page.getByRole("button", { name: /save/i }); + await saveButton.waitFor({ state: "visible" }); + await saveButton.click(); + + // Wait for the success message or page to update + await page + .locator("[role='alert']:has-text('Template updated successfully')") + .first() + .waitFor({ + state: "visible", + timeout: 15000, + }); + + // Additional wait to ensure the changes are persisted + await page.waitForTimeout(500); +}; diff --git a/site/e2e/playwright.config.ts b/site/e2e/playwright.config.ts index 4b3e5c5c86fc6..fffc80b160191 100644 --- a/site/e2e/playwright.config.ts +++ b/site/e2e/playwright.config.ts @@ -1,8 +1,8 @@ import * as path from "node:path"; import { defineConfig } from "@playwright/test"; import { - coderPort, coderdPProfPort, + coderPort, e2eFakeExperiment1, e2eFakeExperiment2, gitAuth, diff --git a/site/e2e/provisionerGenerated.ts b/site/e2e/provisionerGenerated.ts index 686dfb7031945..00b2050d94d98 100644 --- a/site/e2e/provisionerGenerated.ts +++ b/site/e2e/provisionerGenerated.ts @@ -162,6 +162,8 @@ export interface Preset { parameters: PresetParameter[]; prebuild: Prebuild | undefined; default: boolean; + description: string; + icon: string; } export interface PresetParameter { @@ -460,6 +462,7 @@ export interface PlanComplete { */ hasAiTasks: boolean; aiTasks: AITask[]; + hasExternalAgents: boolean; } /** @@ -715,6 +718,12 @@ export const Preset = { if (message.default === true) { writer.uint32(32).bool(message.default); } + if (message.description !== "") { + writer.uint32(42).string(message.description); + } + if (message.icon !== "") { + writer.uint32(50).string(message.icon); + } return writer; }, }; @@ -1387,6 +1396,9 @@ export const PlanComplete = { for (const v of message.aiTasks) { AITask.encode(v!, writer.uint32(114).fork()).ldelim(); } + if (message.hasExternalAgents === true) { + writer.uint32(120).bool(message.hasExternalAgents); + } return writer; }, }; diff --git a/site/e2e/setup/addUsersAndLicense.spec.ts b/site/e2e/setup/addUsersAndLicense.spec.ts index 1e227438c2843..f59d081dfbc95 100644 --- a/site/e2e/setup/addUsersAndLicense.spec.ts +++ b/site/e2e/setup/addUsersAndLicense.spec.ts @@ -20,7 +20,7 @@ test("setup deployment", async ({ page }) => { await page.getByLabel(Language.passwordLabel).fill(users.owner.password); await page.getByTestId("create").click(); - await expectUrl(page).toHavePathName("/workspaces"); + await expectUrl(page).toHavePathName("/templates"); await page.getByTestId("button-select-template").isVisible(); for (const user of Object.values(users)) { diff --git a/site/e2e/tests/app.spec.ts b/site/e2e/tests/app.spec.ts index 587775b4dc3f8..3cb58fcc66c34 100644 --- a/site/e2e/tests/app.spec.ts +++ b/site/e2e/tests/app.spec.ts @@ -21,7 +21,7 @@ test("app", async ({ context, page }) => { const appContent = "Hello World"; const token = randomUUID(); const srv = http - .createServer((req, res) => { + .createServer((_req, res) => { res.writeHead(200, { "Content-Type": "text/plain" }); res.end(appContent); }) diff --git a/site/e2e/tests/auditLogs.spec.ts b/site/e2e/tests/auditLogs.spec.ts index c25a828eedb64..56a27f94ad3c2 100644 --- a/site/e2e/tests/auditLogs.spec.ts +++ b/site/e2e/tests/auditLogs.spec.ts @@ -1,4 +1,4 @@ -import { type Page, expect, test } from "@playwright/test"; +import { expect, type Page, test } from "@playwright/test"; import { defaultPassword, users } from "../constants"; import { createTemplate, diff --git a/site/e2e/tests/deployment/workspaceProxies.spec.ts b/site/e2e/tests/deployment/workspaceProxies.spec.ts index 51fb036c4639b..94604de293d73 100644 --- a/site/e2e/tests/deployment/workspaceProxies.spec.ts +++ b/site/e2e/tests/deployment/workspaceProxies.spec.ts @@ -1,9 +1,8 @@ -import { type Page, expect, test } from "@playwright/test"; +import { expect, type Page, test } from "@playwright/test"; import { API } from "api/api"; import { setupApiCalls } from "../../api"; import { coderPort, workspaceProxyPort } from "../../constants"; -import { randomName, requiresLicense } from "../../helpers"; -import { login } from "../../helpers"; +import { login, randomName, requiresLicense } from "../../helpers"; import { beforeCoderTest } from "../../hooks"; import { startWorkspaceProxy, stopWorkspaceProxy } from "../../proxy"; diff --git a/site/e2e/tests/externalAuth.spec.ts b/site/e2e/tests/externalAuth.spec.ts index ced2a7d89c95b..712fc8f1ef9c9 100644 --- a/site/e2e/tests/externalAuth.spec.ts +++ b/site/e2e/tests/externalAuth.spec.ts @@ -17,11 +17,11 @@ test.describe.skip("externalAuth", () => { const srv = await createServer(gitAuth.webPort); // The GitHub validate endpoint returns the currently authenticated user! - srv.use(gitAuth.validatePath, (req, res) => { + srv.use(gitAuth.validatePath, (_req, res) => { res.write(JSON.stringify(ghUser)); res.end(); }); - srv.use(gitAuth.tokenPath, (req, res) => { + srv.use(gitAuth.tokenPath, (_req, res) => { const r = (Math.random() + 1).toString(36).substring(7); res.write(JSON.stringify({ access_token: r })); res.end(); @@ -51,15 +51,15 @@ test.describe.skip("externalAuth", () => { // Start a server to mock the GitHub API. const srv = await createServer(gitAuth.devicePort); - srv.use(gitAuth.validatePath, (req, res) => { + srv.use(gitAuth.validatePath, (_req, res) => { res.write(JSON.stringify(ghUser)); res.end(); }); - srv.use(gitAuth.codePath, (req, res) => { + srv.use(gitAuth.codePath, (_req, res) => { res.write(JSON.stringify(device)); res.end(); }); - srv.use(gitAuth.installationsPath, (req, res) => { + srv.use(gitAuth.installationsPath, (_req, res) => { res.write(JSON.stringify(ghInstall)); res.end(); }); @@ -72,7 +72,7 @@ test.describe.skip("externalAuth", () => { // First we send a result from the API that the token hasn't been // authorized yet to ensure the UI reacts properly. const sentPending = new Awaiter(); - srv.use(gitAuth.tokenPath, (req, res) => { + srv.use(gitAuth.tokenPath, (_req, res) => { res.write(JSON.stringify(token)); res.end(); sentPending.done(); diff --git a/site/e2e/tests/organizations/idpGroupSync.spec.ts b/site/e2e/tests/organizations/idpGroupSync.spec.ts index a6128253346b7..c8fbf7fffa26e 100644 --- a/site/e2e/tests/organizations/idpGroupSync.spec.ts +++ b/site/e2e/tests/organizations/idpGroupSync.spec.ts @@ -5,8 +5,7 @@ import { deleteOrganization, setupApiCalls, } from "../../api"; -import { randomName, requiresLicense } from "../../helpers"; -import { login } from "../../helpers"; +import { login, randomName, requiresLicense } from "../../helpers"; import { beforeCoderTest } from "../../hooks"; test.beforeEach(async ({ page }) => { diff --git a/site/e2e/tests/organizations/idpRoleSync.spec.ts b/site/e2e/tests/organizations/idpRoleSync.spec.ts index a889591026dd9..a7e7429e234ae 100644 --- a/site/e2e/tests/organizations/idpRoleSync.spec.ts +++ b/site/e2e/tests/organizations/idpRoleSync.spec.ts @@ -5,8 +5,7 @@ import { deleteOrganization, setupApiCalls, } from "../../api"; -import { randomName, requiresLicense } from "../../helpers"; -import { login } from "../../helpers"; +import { login, randomName, requiresLicense } from "../../helpers"; import { beforeCoderTest } from "../../hooks"; test.beforeEach(async ({ page }) => { diff --git a/site/e2e/tests/roles.spec.ts b/site/e2e/tests/roles.spec.ts index e6b92bd944ba0..0bf80391c0035 100644 --- a/site/e2e/tests/roles.spec.ts +++ b/site/e2e/tests/roles.spec.ts @@ -1,4 +1,4 @@ -import { type Page, expect, test } from "@playwright/test"; +import { expect, type Page, test } from "@playwright/test"; import { createOrganization, createOrganizationMember, diff --git a/site/e2e/tests/templates/updateTemplateSchedule.spec.ts b/site/e2e/tests/templates/updateTemplateSchedule.spec.ts index 42c758df5db16..b9552f85aea2b 100644 --- a/site/e2e/tests/templates/updateTemplateSchedule.spec.ts +++ b/site/e2e/tests/templates/updateTemplateSchedule.spec.ts @@ -30,6 +30,7 @@ test("update template schedule settings without override other settings", async disable_everyone_group_access: false, require_active_version: true, max_port_share_level: null, + cors_behavior: null, allow_user_cancel_workspace_jobs: null, }); diff --git a/site/e2e/tests/users/createUserWithPassword.spec.ts b/site/e2e/tests/users/createUserWithPassword.spec.ts index ec6006a81dac5..b33aa67c896e0 100644 --- a/site/e2e/tests/users/createUserWithPassword.spec.ts +++ b/site/e2e/tests/users/createUserWithPassword.spec.ts @@ -1,6 +1,5 @@ import { test } from "@playwright/test"; -import { createUser } from "../../helpers"; -import { login } from "../../helpers"; +import { createUser, login } from "../../helpers"; import { beforeCoderTest } from "../../hooks"; test.beforeEach(async ({ page }) => { diff --git a/site/e2e/tests/webTerminal.spec.ts b/site/e2e/tests/webTerminal.spec.ts index 9d502c0284b78..d03f78a8702b8 100644 --- a/site/e2e/tests/webTerminal.spec.ts +++ b/site/e2e/tests/webTerminal.spec.ts @@ -3,11 +3,11 @@ import { test } from "@playwright/test"; import { createTemplate, createWorkspace, + login, openTerminalWindow, startAgent, stopAgent, } from "../helpers"; -import { login } from "../helpers"; import { beforeCoderTest } from "../hooks"; test.beforeEach(async ({ page }) => { diff --git a/site/e2e/tests/workspaces/autoCreateWorkspace.spec.ts b/site/e2e/tests/workspaces/autoCreateWorkspace.spec.ts index a6ec00958ad78..74b3c07ca78df 100644 --- a/site/e2e/tests/workspaces/autoCreateWorkspace.spec.ts +++ b/site/e2e/tests/workspaces/autoCreateWorkspace.spec.ts @@ -4,8 +4,8 @@ import { createTemplate, createWorkspace, echoResponsesWithParameters, + login, } from "../../helpers"; -import { login } from "../../helpers"; import { beforeCoderTest } from "../../hooks"; import { emptyParameter } from "../../parameters"; import type { RichParameter } from "../../provisionerGenerated"; diff --git a/site/e2e/tests/workspaces/createWorkspace.spec.ts b/site/e2e/tests/workspaces/createWorkspace.spec.ts index 452c6e9969f37..9fcbcaf31c9dd 100644 --- a/site/e2e/tests/workspaces/createWorkspace.spec.ts +++ b/site/e2e/tests/workspaces/createWorkspace.spec.ts @@ -1,13 +1,14 @@ import { expect, test } from "@playwright/test"; import { users } from "../../constants"; import { - StarterTemplates, createTemplate, createWorkspace, + disableDynamicParameters, echoResponsesWithParameters, login, openTerminalWindow, requireTerraformProvisioner, + StarterTemplates, verifyParameters, } from "../../helpers"; import { beforeCoderTest } from "../../hooks"; @@ -35,6 +36,9 @@ test("create workspace", async ({ page }) => { apply: [{ apply: { resources: [{ name: "example" }] } }], }); + // Disable dynamic parameters to use classic parameter flow for this test + await disableDynamicParameters(page, template); + await login(page, users.member); await createWorkspace(page, template); }); @@ -51,6 +55,9 @@ test("create workspace with default immutable parameters", async ({ page }) => { echoResponsesWithParameters(richParameters), ); + // Disable dynamic parameters to use classic parameter flow for this test + await disableDynamicParameters(page, template); + await login(page, users.member); const workspaceName = await createWorkspace(page, template); await verifyParameters(page, workspaceName, richParameters, [ @@ -68,6 +75,9 @@ test("create workspace with default mutable parameters", async ({ page }) => { echoResponsesWithParameters(richParameters), ); + // Disable dynamic parameters to use classic parameter flow for this test + await disableDynamicParameters(page, template); + await login(page, users.member); const workspaceName = await createWorkspace(page, template); await verifyParameters(page, workspaceName, richParameters, [ @@ -95,6 +105,9 @@ test("create workspace with default and required parameters", async ({ echoResponsesWithParameters(richParameters), ); + // Disable dynamic parameters to use classic parameter flow for this test + await disableDynamicParameters(page, template); + await login(page, users.member); const workspaceName = await createWorkspace(page, template, { richParameters, @@ -127,6 +140,9 @@ test("create workspace and overwrite default parameters", async ({ page }) => { echoResponsesWithParameters(richParameters), ); + // Disable dynamic parameters to use classic parameter flow for this test + await disableDynamicParameters(page, template); + await login(page, users.member); const workspaceName = await createWorkspace(page, template, { richParameters, @@ -147,6 +163,9 @@ test("create workspace with disable_param search params", async ({ page }) => { echoResponsesWithParameters(richParameters), ); + // Disable dynamic parameters to use classic parameter flow for this test + await disableDynamicParameters(page, templateName); + await login(page, users.member); await page.goto( `/templates/${templateName}/workspace?disable_params=first_parameter,second_parameter`, @@ -165,6 +184,9 @@ test.skip("create docker workspace", async ({ context, page }) => { await login(page, users.templateAdmin); const template = await createTemplate(page, StarterTemplates.STARTER_DOCKER); + // Disable dynamic parameters to use classic parameter flow for this test + await disableDynamicParameters(page, template); + await login(page, users.member); const workspaceName = await createWorkspace(page, template); diff --git a/site/e2e/tests/workspaces/restartWorkspace.spec.ts b/site/e2e/tests/workspaces/restartWorkspace.spec.ts index 444ff891f0fdc..987f3c279cc26 100644 --- a/site/e2e/tests/workspaces/restartWorkspace.spec.ts +++ b/site/e2e/tests/workspaces/restartWorkspace.spec.ts @@ -4,10 +4,11 @@ import { buildWorkspaceWithParameters, createTemplate, createWorkspace, + disableDynamicParameters, echoResponsesWithParameters, + login, verifyParameters, } from "../../helpers"; -import { login } from "../../helpers"; import { beforeCoderTest } from "../../hooks"; import { firstBuildOption, secondBuildOption } from "../../parameters"; import type { RichParameter } from "../../provisionerGenerated"; @@ -24,6 +25,9 @@ test("restart workspace with ephemeral parameters", async ({ page }) => { echoResponsesWithParameters(richParameters), ); + // Disable dynamic parameters to use classic parameter flow for this test + await disableDynamicParameters(page, template); + await login(page, users.member); const workspaceName = await createWorkspace(page, template); diff --git a/site/e2e/tests/workspaces/startWorkspace.spec.ts b/site/e2e/tests/workspaces/startWorkspace.spec.ts index 90fac440046ea..30a83a01d6dca 100644 --- a/site/e2e/tests/workspaces/startWorkspace.spec.ts +++ b/site/e2e/tests/workspaces/startWorkspace.spec.ts @@ -4,11 +4,12 @@ import { buildWorkspaceWithParameters, createTemplate, createWorkspace, + disableDynamicParameters, echoResponsesWithParameters, + login, stopWorkspace, verifyParameters, } from "../../helpers"; -import { login } from "../../helpers"; import { beforeCoderTest } from "../../hooks"; import { firstBuildOption, secondBuildOption } from "../../parameters"; import type { RichParameter } from "../../provisionerGenerated"; @@ -25,6 +26,9 @@ test("start workspace with ephemeral parameters", async ({ page }) => { echoResponsesWithParameters(richParameters), ); + // Disable dynamic parameters to use classic parameter flow for this test + await disableDynamicParameters(page, template); + await login(page, users.member); const workspaceName = await createWorkspace(page, template); diff --git a/site/e2e/tests/workspaces/updateWorkspace.spec.ts b/site/e2e/tests/workspaces/updateWorkspace.spec.ts index 48c341eb63956..b731b76abbf1a 100644 --- a/site/e2e/tests/workspaces/updateWorkspace.spec.ts +++ b/site/e2e/tests/workspaces/updateWorkspace.spec.ts @@ -3,13 +3,14 @@ import { users } from "../../constants"; import { createTemplate, createWorkspace, + disableDynamicParameters, echoResponsesWithParameters, + login, updateTemplate, updateWorkspace, updateWorkspaceParameters, verifyParameters, } from "../../helpers"; -import { login } from "../../helpers"; import { beforeCoderTest } from "../../hooks"; import { fifthParameter, @@ -34,6 +35,9 @@ test("update workspace, new optional, immutable parameter added", async ({ echoResponsesWithParameters(richParameters), ); + // Disable dynamic parameters to use classic parameter flow for this test + await disableDynamicParameters(page, template); + await login(page, users.member); const workspaceName = await createWorkspace(page, template); @@ -77,6 +81,9 @@ test("update workspace, new required, mutable parameter added", async ({ echoResponsesWithParameters(richParameters), ); + // Disable dynamic parameters to use classic parameter flow for this test + await disableDynamicParameters(page, template); + await login(page, users.member); const workspaceName = await createWorkspace(page, template); @@ -122,6 +129,9 @@ test("update workspace with ephemeral parameter enabled", async ({ page }) => { echoResponsesWithParameters(richParameters), ); + // Disable dynamic parameters to use classic parameter flow for this test + await disableDynamicParameters(page, template); + await login(page, users.member); const workspaceName = await createWorkspace(page, template); diff --git a/site/jest.setup.ts b/site/jest.setup.ts index f90f5353b1c63..f0f252afd455e 100644 --- a/site/jest.setup.ts +++ b/site/jest.setup.ts @@ -1,11 +1,11 @@ import "@testing-library/jest-dom"; import "jest-location-mock"; +import { server } from "testHelpers/server"; import crypto from "node:crypto"; import { cleanup } from "@testing-library/react"; import type { Region } from "api/typesGenerated"; import type { ProxyLatencyReport } from "contexts/useProxyLatency"; import { useMemo } from "react"; -import { server } from "testHelpers/server"; // useProxyLatency does some http requests to determine latency. // This would fail unit testing, or at least make it very slow with diff --git a/site/package.json b/site/package.json index 8d688b45c928b..5693fc5d55220 100644 --- a/site/package.json +++ b/site/package.json @@ -4,6 +4,7 @@ "repository": "https://github.com/coder/coder", "private": true, "license": "AGPL-3.0", + "packageManager": "pnpm@10.14.0+sha512.ad27a79641b49c3e481a16a805baa71817a04bbe06a38d17e60e2eaee83f6a146c6a688125f5792e48dd5ba30e7da52a5cda4c3992b9ccf333f9ce223af84748", "scripts": { "build": "NODE_ENV=production pnpm vite build", "check": "biome check --error-on-warnings .", @@ -14,7 +15,7 @@ "format": "biome format --write .", "format:check": "biome format .", "lint": "pnpm run lint:check && pnpm run lint:types && pnpm run lint:circular-deps && knip", - "lint:check": " biome lint --error-on-warnings .", + "lint:check": "biome lint --error-on-warnings .", "lint:circular-deps": "dpdm --no-tree --no-warning -T ./src/App.tsx", "lint:knip": "knip", "lint:fix": "biome lint --error-on-warnings --write . && knip --fix", @@ -46,7 +47,7 @@ "@fontsource/ibm-plex-mono": "5.1.1", "@fontsource/jetbrains-mono": "5.2.5", "@fontsource/source-code-pro": "5.2.5", - "@monaco-editor/react": "4.6.0", + "@monaco-editor/react": "4.7.0", "@mui/icons-material": "5.16.14", "@mui/material": "5.16.14", "@mui/system": "5.16.14", @@ -92,7 +93,7 @@ "jszip": "3.10.1", "lodash": "4.17.21", "lucide-react": "0.474.0", - "monaco-editor": "0.52.0", + "monaco-editor": "0.52.2", "pretty-bytes": "6.1.1", "react": "18.3.1", "react-color": "2.19.3", @@ -103,7 +104,7 @@ "react-markdown": "9.0.3", "react-query": "npm:@tanstack/react-query@5.77.0", "react-resizable-panels": "3.0.3", - "react-router-dom": "6.26.2", + "react-router": "7.8.0", "react-syntax-highlighter": "15.6.1", "react-textarea-autosize": "8.5.9", "react-virtualized-auto-sizer": "1.0.24", @@ -114,7 +115,7 @@ "semver": "7.6.2", "tailwind-merge": "2.6.0", "tailwindcss-animate": "1.0.7", - "tzdata": "1.0.40", + "tzdata": "1.0.44", "ua-parser-js": "1.0.40", "ufuzzy": "npm:@leeoniya/ufuzzy@1.0.10", "undici": "6.21.2", @@ -124,20 +125,14 @@ "yup": "1.6.1" }, "devDependencies": { - "@biomejs/biome": "1.9.4", - "@chromatic-com/storybook": "3.2.2", + "@biomejs/biome": "2.2.0", + "@chromatic-com/storybook": "4.1.0", "@octokit/types": "12.3.0", "@playwright/test": "1.47.0", - "@storybook/addon-actions": "8.5.2", - "@storybook/addon-essentials": "8.4.6", - "@storybook/addon-interactions": "8.5.3", - "@storybook/addon-links": "8.5.2", - "@storybook/addon-mdx-gfm": "8.5.2", - "@storybook/addon-themes": "8.4.6", - "@storybook/preview-api": "8.5.3", - "@storybook/react": "8.4.6", - "@storybook/react-vite": "8.4.6", - "@storybook/test": "8.4.6", + "@storybook/addon-docs": "9.1.2", + "@storybook/addon-links": "9.1.2", + "@storybook/addon-themes": "9.1.2", + "@storybook/react-vite": "9.1.2", "@swc/core": "1.3.38", "@swc/jest": "0.2.37", "@tailwindcss/typography": "0.5.16", @@ -182,22 +177,25 @@ "rollup-plugin-visualizer": "5.14.0", "rxjs": "7.8.1", "ssh2": "1.16.0", - "storybook": "8.5.3", - "storybook-addon-remix-react-router": "3.1.0", + "storybook": "9.1.2", + "storybook-addon-remix-react-router": "5.0.0", "tailwindcss": "3.4.17", "ts-proto": "1.164.0", "typescript": "5.6.3", "vite": "6.3.5", - "vite-plugin-checker": "0.9.3", - "vite-plugin-turbosnap": "1.0.3" + "vite-plugin-checker": "0.9.3" }, - "browserslist": ["chrome 110", "firefox 111", "safari 16.0"], + "browserslist": [ + "chrome 110", + "firefox 111", + "safari 16.0" + ], "resolutions": { "optionator": "0.9.3", "semver": "7.6.2" }, "engines": { - "npm": ">=9.0.0 <10.0.0", + "pnpm": ">=10.0.0 <11.0.0", "node": ">=18.0.0 <21.0.0" }, "pnpm": { @@ -205,7 +203,11 @@ "@babel/runtime": "7.26.10", "@babel/helpers": "7.26.10", "esbuild": "^0.25.0", + "form-data": "4.0.4", "prismjs": "1.30.0" - } + }, + "ignoredBuiltDependencies": [ + "storybook-addon-remix-react-router" + ] } } diff --git a/site/pnpm-lock.yaml b/site/pnpm-lock.yaml index 3c7f5176b5b6b..31a8857901845 100644 --- a/site/pnpm-lock.yaml +++ b/site/pnpm-lock.yaml @@ -10,6 +10,7 @@ overrides: '@babel/runtime': 7.26.10 '@babel/helpers': 7.26.10 esbuild: ^0.25.0 + form-data: 4.0.4 prismjs: 1.30.0 importers: @@ -53,8 +54,8 @@ importers: specifier: 5.2.5 version: 5.2.5 '@monaco-editor/react': - specifier: 4.6.0 - version: 4.6.0(monaco-editor@0.52.0)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + specifier: 4.7.0 + version: 4.7.0(monaco-editor@0.52.2)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) '@mui/icons-material': specifier: 5.16.14 version: 5.16.14(@mui/material@5.16.14(@emotion/react@11.14.0(@types/react@18.3.12)(react@18.3.1))(@emotion/styled@11.14.0(@emotion/react@11.14.0(@types/react@18.3.12)(react@18.3.1))(@types/react@18.3.12)(react@18.3.1))(@types/react@18.3.12)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(@types/react@18.3.12)(react@18.3.1) @@ -191,8 +192,8 @@ importers: specifier: 0.474.0 version: 0.474.0(react@18.3.1) monaco-editor: - specifier: 0.52.0 - version: 0.52.0 + specifier: 0.52.2 + version: 0.52.2 pretty-bytes: specifier: 6.1.1 version: 6.1.1 @@ -223,9 +224,9 @@ importers: react-resizable-panels: specifier: 3.0.3 version: 3.0.3(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - react-router-dom: - specifier: 6.26.2 - version: 6.26.2(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + react-router: + specifier: 7.8.0 + version: 7.8.0(react-dom@18.3.1(react@18.3.1))(react@18.3.1) react-syntax-highlighter: specifier: 15.6.1 version: 15.6.1(react@18.3.1) @@ -257,8 +258,8 @@ importers: specifier: 1.0.7 version: 1.0.7(tailwindcss@3.4.17(ts-node@10.9.2(@swc/core@1.3.38)(@types/node@20.17.16)(typescript@5.6.3))) tzdata: - specifier: 1.0.40 - version: 1.0.40 + specifier: 1.0.44 + version: 1.0.44 ua-parser-js: specifier: 1.0.40 version: 1.0.40 @@ -282,47 +283,29 @@ importers: version: 1.6.1 devDependencies: '@biomejs/biome': - specifier: 1.9.4 - version: 1.9.4 + specifier: 2.2.0 + version: 2.2.0 '@chromatic-com/storybook': - specifier: 3.2.2 - version: 3.2.2(react@18.3.1)(storybook@8.5.3(prettier@3.4.1)) + specifier: 4.1.0 + version: 4.1.0(storybook@9.1.2(@testing-library/dom@10.4.0)(msw@2.4.8(typescript@5.6.3))(prettier@3.4.1)(vite@6.3.5(@types/node@20.17.16)(jiti@2.4.2)(yaml@2.7.0))) '@octokit/types': specifier: 12.3.0 version: 12.3.0 '@playwright/test': specifier: 1.47.0 version: 1.47.0 - '@storybook/addon-actions': - specifier: 8.5.2 - version: 8.5.2(storybook@8.5.3(prettier@3.4.1)) - '@storybook/addon-essentials': - specifier: 8.4.6 - version: 8.4.6(@types/react@18.3.12)(storybook@8.5.3(prettier@3.4.1)) - '@storybook/addon-interactions': - specifier: 8.5.3 - version: 8.5.3(storybook@8.5.3(prettier@3.4.1)) + '@storybook/addon-docs': + specifier: 9.1.2 + version: 9.1.2(@types/react@18.3.12)(storybook@9.1.2(@testing-library/dom@10.4.0)(msw@2.4.8(typescript@5.6.3))(prettier@3.4.1)(vite@6.3.5(@types/node@20.17.16)(jiti@2.4.2)(yaml@2.7.0))) '@storybook/addon-links': - specifier: 8.5.2 - version: 8.5.2(react@18.3.1)(storybook@8.5.3(prettier@3.4.1)) - '@storybook/addon-mdx-gfm': - specifier: 8.5.2 - version: 8.5.2(storybook@8.5.3(prettier@3.4.1)) + specifier: 9.1.2 + version: 9.1.2(react@18.3.1)(storybook@9.1.2(@testing-library/dom@10.4.0)(msw@2.4.8(typescript@5.6.3))(prettier@3.4.1)(vite@6.3.5(@types/node@20.17.16)(jiti@2.4.2)(yaml@2.7.0))) '@storybook/addon-themes': - specifier: 8.4.6 - version: 8.4.6(storybook@8.5.3(prettier@3.4.1)) - '@storybook/preview-api': - specifier: 8.5.3 - version: 8.5.3(storybook@8.5.3(prettier@3.4.1)) - '@storybook/react': - specifier: 8.4.6 - version: 8.4.6(@storybook/test@8.4.6(storybook@8.5.3(prettier@3.4.1)))(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(storybook@8.5.3(prettier@3.4.1))(typescript@5.6.3) + specifier: 9.1.2 + version: 9.1.2(storybook@9.1.2(@testing-library/dom@10.4.0)(msw@2.4.8(typescript@5.6.3))(prettier@3.4.1)(vite@6.3.5(@types/node@20.17.16)(jiti@2.4.2)(yaml@2.7.0))) '@storybook/react-vite': - specifier: 8.4.6 - version: 8.4.6(@storybook/test@8.4.6(storybook@8.5.3(prettier@3.4.1)))(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(rollup@4.40.1)(storybook@8.5.3(prettier@3.4.1))(typescript@5.6.3)(vite@6.3.5(@types/node@20.17.16)(jiti@2.4.2)(yaml@2.7.0)) - '@storybook/test': - specifier: 8.4.6 - version: 8.4.6(storybook@8.5.3(prettier@3.4.1)) + specifier: 9.1.2 + version: 9.1.2(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(rollup@4.40.1)(storybook@9.1.2(@testing-library/dom@10.4.0)(msw@2.4.8(typescript@5.6.3))(prettier@3.4.1)(vite@6.3.5(@types/node@20.17.16)(jiti@2.4.2)(yaml@2.7.0)))(typescript@5.6.3)(vite@6.3.5(@types/node@20.17.16)(jiti@2.4.2)(yaml@2.7.0)) '@swc/core': specifier: 1.3.38 version: 1.3.38 @@ -456,11 +439,11 @@ importers: specifier: 1.16.0 version: 1.16.0 storybook: - specifier: 8.5.3 - version: 8.5.3(prettier@3.4.1) + specifier: 9.1.2 + version: 9.1.2(@testing-library/dom@10.4.0)(msw@2.4.8(typescript@5.6.3))(prettier@3.4.1)(vite@6.3.5(@types/node@20.17.16)(jiti@2.4.2)(yaml@2.7.0)) storybook-addon-remix-react-router: - specifier: 3.1.0 - version: 3.1.0(@storybook/blocks@8.4.6(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(storybook@8.5.3(prettier@3.4.1)))(@storybook/channels@8.1.11)(@storybook/components@8.4.6(storybook@8.5.3(prettier@3.4.1)))(@storybook/core-events@8.1.11)(@storybook/manager-api@8.4.6(storybook@8.5.3(prettier@3.4.1)))(@storybook/preview-api@8.5.3(storybook@8.5.3(prettier@3.4.1)))(@storybook/theming@8.4.6(storybook@8.5.3(prettier@3.4.1)))(react-dom@18.3.1(react@18.3.1))(react-router-dom@6.26.2(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react@18.3.1) + specifier: 5.0.0 + version: 5.0.0(react-dom@18.3.1(react@18.3.1))(react-router@7.8.0(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react@18.3.1)(storybook@9.1.2(@testing-library/dom@10.4.0)(msw@2.4.8(typescript@5.6.3))(prettier@3.4.1)(vite@6.3.5(@types/node@20.17.16)(jiti@2.4.2)(yaml@2.7.0))) tailwindcss: specifier: 3.4.17 version: 3.4.17(ts-node@10.9.2(@swc/core@1.3.38)(@types/node@20.17.16)(typescript@5.6.3)) @@ -475,10 +458,7 @@ importers: version: 6.3.5(@types/node@20.17.16)(jiti@2.4.2)(yaml@2.7.0) vite-plugin-checker: specifier: 0.9.3 - version: 0.9.3(@biomejs/biome@1.9.4)(eslint@8.52.0)(optionator@0.9.3)(typescript@5.6.3)(vite@6.3.5(@types/node@20.17.16)(jiti@2.4.2)(yaml@2.7.0)) - vite-plugin-turbosnap: - specifier: 1.0.3 - version: 1.0.3 + version: 0.9.3(@biomejs/biome@2.2.0)(eslint@8.52.0)(optionator@0.9.3)(typescript@5.6.3)(vite@6.3.5(@types/node@20.17.16)(jiti@2.4.2)(yaml@2.7.0)) packages: @@ -735,10 +715,6 @@ packages: resolution: {integrity: sha512-LPDZ85aEJyYSd18/DkjNh4/y1ntkE5KwUHWTiqgRxruuZL2F1yuHligVHLvcHY2vMHXttKFpJn6LwfI7cw7ODw==, tarball: https://registry.npmjs.org/@babel/template/-/template-7.27.2.tgz} engines: {node: '>=6.9.0'} - '@babel/traverse@7.25.9': - resolution: {integrity: sha512-ZCuvfwOwlz/bawvAuvcj8rrithP2/N55Tzz342AkTvq4qaWbGfmCk/tKhNaV2cthijKrPAA8SRJV5WWe7IBMJw==, tarball: https://registry.npmjs.org/@babel/traverse/-/traverse-7.25.9.tgz} - engines: {node: '>=6.9.0'} - '@babel/traverse@7.26.4': resolution: {integrity: sha512-fH+b7Y4p3yqvApJALCPJcwb0/XaOSgtK4pzV6WVjPR5GLFQBRI7pfoX2V2iM48NXvX07NUxxm1Vw98YjqTcU5w==, tarball: https://registry.npmjs.org/@babel/traverse/-/traverse-7.26.4.tgz} engines: {node: '>=6.9.0'} @@ -747,10 +723,6 @@ packages: resolution: {integrity: sha512-ZCYtZciz1IWJB4U61UPu4KEaqyfj+r5T1Q5mqPo+IBpcG9kHv30Z0aD8LXPgC1trYa6rK0orRyAhqUgk4MjmEg==, tarball: https://registry.npmjs.org/@babel/traverse/-/traverse-7.27.1.tgz} engines: {node: '>=6.9.0'} - '@babel/types@7.26.0': - resolution: {integrity: sha512-Z/yiTPj+lDVnF7lWeKCIJzaIkI0vYO87dMpZ4bg4TDrFe4XXLFWL1TbXU27gBP3QccxV9mZICCrnjnYlJjXHOA==, tarball: https://registry.npmjs.org/@babel/types/-/types-7.26.0.tgz} - engines: {node: '>=6.9.0'} - '@babel/types@7.26.3': resolution: {integrity: sha512-vN5p+1kl59GVKMvTHt55NzzmYVxprfJD+ql7U9NFIfKCBkYE55LYtS+WtPlaYOyzydrKI8Nezd+aZextrd+FMA==, tarball: https://registry.npmjs.org/@babel/types/-/types-7.26.3.tgz} engines: {node: '>=6.9.0'} @@ -766,55 +738,55 @@ packages: '@bcoe/v8-coverage@0.2.3': resolution: {integrity: sha512-0hYQ8SB4Db5zvZB4axdMHGwEaQjkZzFjQiN9LVYvIFB2nSUHW9tYpxWriPrWDASIxiaXax83REcLxuSdnGPZtw==, tarball: https://registry.npmjs.org/@bcoe/v8-coverage/-/v8-coverage-0.2.3.tgz} - '@biomejs/biome@1.9.4': - resolution: {integrity: sha512-1rkd7G70+o9KkTn5KLmDYXihGoTaIGO9PIIN2ZB7UJxFrWw04CZHPYiMRjYsaDvVV7hP1dYNRLxSANLaBFGpog==, tarball: https://registry.npmjs.org/@biomejs/biome/-/biome-1.9.4.tgz} + '@biomejs/biome@2.2.0': + resolution: {integrity: sha512-3On3RSYLsX+n9KnoSgfoYlckYBoU6VRM22cw1gB4Y0OuUVSYd/O/2saOJMrA4HFfA1Ff0eacOvMN1yAAvHtzIw==, tarball: https://registry.npmjs.org/@biomejs/biome/-/biome-2.2.0.tgz} engines: {node: '>=14.21.3'} hasBin: true - '@biomejs/cli-darwin-arm64@1.9.4': - resolution: {integrity: sha512-bFBsPWrNvkdKrNCYeAp+xo2HecOGPAy9WyNyB/jKnnedgzl4W4Hb9ZMzYNbf8dMCGmUdSavlYHiR01QaYR58cw==, tarball: https://registry.npmjs.org/@biomejs/cli-darwin-arm64/-/cli-darwin-arm64-1.9.4.tgz} + '@biomejs/cli-darwin-arm64@2.2.0': + resolution: {integrity: sha512-zKbwUUh+9uFmWfS8IFxmVD6XwqFcENjZvEyfOxHs1epjdH3wyyMQG80FGDsmauPwS2r5kXdEM0v/+dTIA9FXAg==, tarball: https://registry.npmjs.org/@biomejs/cli-darwin-arm64/-/cli-darwin-arm64-2.2.0.tgz} engines: {node: '>=14.21.3'} cpu: [arm64] os: [darwin] - '@biomejs/cli-darwin-x64@1.9.4': - resolution: {integrity: sha512-ngYBh/+bEedqkSevPVhLP4QfVPCpb+4BBe2p7Xs32dBgs7rh9nY2AIYUL6BgLw1JVXV8GlpKmb/hNiuIxfPfZg==, tarball: https://registry.npmjs.org/@biomejs/cli-darwin-x64/-/cli-darwin-x64-1.9.4.tgz} + '@biomejs/cli-darwin-x64@2.2.0': + resolution: {integrity: sha512-+OmT4dsX2eTfhD5crUOPw3RPhaR+SKVspvGVmSdZ9y9O/AgL8pla6T4hOn1q+VAFBHuHhsdxDRJgFCSC7RaMOw==, tarball: https://registry.npmjs.org/@biomejs/cli-darwin-x64/-/cli-darwin-x64-2.2.0.tgz} engines: {node: '>=14.21.3'} cpu: [x64] os: [darwin] - '@biomejs/cli-linux-arm64-musl@1.9.4': - resolution: {integrity: sha512-v665Ct9WCRjGa8+kTr0CzApU0+XXtRgwmzIf1SeKSGAv+2scAlW6JR5PMFo6FzqqZ64Po79cKODKf3/AAmECqA==, tarball: https://registry.npmjs.org/@biomejs/cli-linux-arm64-musl/-/cli-linux-arm64-musl-1.9.4.tgz} + '@biomejs/cli-linux-arm64-musl@2.2.0': + resolution: {integrity: sha512-egKpOa+4FL9YO+SMUMLUvf543cprjevNc3CAgDNFLcjknuNMcZ0GLJYa3EGTCR2xIkIUJDVneBV3O9OcIlCEZQ==, tarball: https://registry.npmjs.org/@biomejs/cli-linux-arm64-musl/-/cli-linux-arm64-musl-2.2.0.tgz} engines: {node: '>=14.21.3'} cpu: [arm64] os: [linux] - '@biomejs/cli-linux-arm64@1.9.4': - resolution: {integrity: sha512-fJIW0+LYujdjUgJJuwesP4EjIBl/N/TcOX3IvIHJQNsAqvV2CHIogsmA94BPG6jZATS4Hi+xv4SkBBQSt1N4/g==, tarball: https://registry.npmjs.org/@biomejs/cli-linux-arm64/-/cli-linux-arm64-1.9.4.tgz} + '@biomejs/cli-linux-arm64@2.2.0': + resolution: {integrity: sha512-6eoRdF2yW5FnW9Lpeivh7Mayhq0KDdaDMYOJnH9aT02KuSIX5V1HmWJCQQPwIQbhDh68Zrcpl8inRlTEan0SXw==, tarball: https://registry.npmjs.org/@biomejs/cli-linux-arm64/-/cli-linux-arm64-2.2.0.tgz} engines: {node: '>=14.21.3'} cpu: [arm64] os: [linux] - '@biomejs/cli-linux-x64-musl@1.9.4': - resolution: {integrity: sha512-gEhi/jSBhZ2m6wjV530Yy8+fNqG8PAinM3oV7CyO+6c3CEh16Eizm21uHVsyVBEB6RIM8JHIl6AGYCv6Q6Q9Tg==, tarball: https://registry.npmjs.org/@biomejs/cli-linux-x64-musl/-/cli-linux-x64-musl-1.9.4.tgz} + '@biomejs/cli-linux-x64-musl@2.2.0': + resolution: {integrity: sha512-I5J85yWwUWpgJyC1CcytNSGusu2p9HjDnOPAFG4Y515hwRD0jpR9sT9/T1cKHtuCvEQ/sBvx+6zhz9l9wEJGAg==, tarball: https://registry.npmjs.org/@biomejs/cli-linux-x64-musl/-/cli-linux-x64-musl-2.2.0.tgz} engines: {node: '>=14.21.3'} cpu: [x64] os: [linux] - '@biomejs/cli-linux-x64@1.9.4': - resolution: {integrity: sha512-lRCJv/Vi3Vlwmbd6K+oQ0KhLHMAysN8lXoCI7XeHlxaajk06u7G+UsFSO01NAs5iYuWKmVZjmiOzJ0OJmGsMwg==, tarball: https://registry.npmjs.org/@biomejs/cli-linux-x64/-/cli-linux-x64-1.9.4.tgz} + '@biomejs/cli-linux-x64@2.2.0': + resolution: {integrity: sha512-5UmQx/OZAfJfi25zAnAGHUMuOd+LOsliIt119x2soA2gLggQYrVPA+2kMUxR6Mw5M1deUF/AWWP2qpxgH7Nyfw==, tarball: https://registry.npmjs.org/@biomejs/cli-linux-x64/-/cli-linux-x64-2.2.0.tgz} engines: {node: '>=14.21.3'} cpu: [x64] os: [linux] - '@biomejs/cli-win32-arm64@1.9.4': - resolution: {integrity: sha512-tlbhLk+WXZmgwoIKwHIHEBZUwxml7bRJgk0X2sPyNR3S93cdRq6XulAZRQJ17FYGGzWne0fgrXBKpl7l4M87Hg==, tarball: https://registry.npmjs.org/@biomejs/cli-win32-arm64/-/cli-win32-arm64-1.9.4.tgz} + '@biomejs/cli-win32-arm64@2.2.0': + resolution: {integrity: sha512-n9a1/f2CwIDmNMNkFs+JI0ZjFnMO0jdOyGNtihgUNFnlmd84yIYY2KMTBmMV58ZlVHjgmY5Y6E1hVTnSRieggA==, tarball: https://registry.npmjs.org/@biomejs/cli-win32-arm64/-/cli-win32-arm64-2.2.0.tgz} engines: {node: '>=14.21.3'} cpu: [arm64] os: [win32] - '@biomejs/cli-win32-x64@1.9.4': - resolution: {integrity: sha512-8Y5wMhVIPaWe6jw2H+KlEm4wP/f7EW3810ZLmDlrEEy5KvBsb9ECEfu/kMWD484ijfQ8+nIi0giMgu9g1UAuuA==, tarball: https://registry.npmjs.org/@biomejs/cli-win32-x64/-/cli-win32-x64-1.9.4.tgz} + '@biomejs/cli-win32-x64@2.2.0': + resolution: {integrity: sha512-Nawu5nHjP/zPKTIryh2AavzTc/KEg4um/MxWdXW0A6P/RZOyIpa7+QSjeXwAwX/utJGaCoXRPWtF3m5U/bB3Ww==, tarball: https://registry.npmjs.org/@biomejs/cli-win32-x64/-/cli-win32-x64-2.2.0.tgz} engines: {node: '>=14.21.3'} cpu: [x64] os: [win32] @@ -828,11 +800,11 @@ packages: '@bundled-es-modules/tough-cookie@0.1.6': resolution: {integrity: sha512-dvMHbL464C0zI+Yqxbz6kZ5TOEp7GLW+pry/RWndAR8MJQAXZ2rPmIs8tziTZjeIyhSNZgZbCePtfSbdWqStJw==, tarball: https://registry.npmjs.org/@bundled-es-modules/tough-cookie/-/tough-cookie-0.1.6.tgz} - '@chromatic-com/storybook@3.2.2': - resolution: {integrity: sha512-xmXt/GW0hAPbzNTrxYuVo43Adrtjue4DeVrsoIIEeJdGaPNNeNf+DHMlJKOBdlHmCnFUoe9R/0mLM9zUp5bKWw==, tarball: https://registry.npmjs.org/@chromatic-com/storybook/-/storybook-3.2.2.tgz} - engines: {node: '>=16.0.0', yarn: '>=1.22.18'} + '@chromatic-com/storybook@4.1.0': + resolution: {integrity: sha512-B9XesFX5lQUdP81/QBTtkiYOFqEsJwQpzkZlcYPm2n/L1S/8ZabSPbz6NoY8hOJTXWZ2p7grygUlxyGy+gAvfQ==, tarball: https://registry.npmjs.org/@chromatic-com/storybook/-/storybook-4.1.0.tgz} + engines: {node: '>=20.0.0', yarn: '>=1.22.18'} peerDependencies: - storybook: ^8.2.0 || ^8.3.0-0 || ^8.4.0-0 || ^8.5.0-0 || ^8.6.0-0 + storybook: ^0.0.0-0 || ^9.0.0 || ^9.1.0-0 || ^9.2.0-0 '@cspotcode/source-map-support@0.8.1': resolution: {integrity: sha512-IchNf6dN4tHoMFIn/7OE8LWZ19Y6q/67Bmf6vnGREv8RSbBVb9LPJxEcnwrcwX6ixSvaiGoomAUvu4YSxXrVgw==, tarball: https://registry.npmjs.org/@cspotcode/source-map-support/-/source-map-support-0.8.1.tgz} @@ -1240,11 +1212,11 @@ packages: resolution: {integrity: sha512-u3UPsIilWKOM3F9CXtrG8LEJmNxwoCQC/XVj4IKYXvvpx7QIi/Kg1LI5uDmDpKlac62NUtX7eLjRh+jVZcLOzw==, tarball: https://registry.npmjs.org/@jest/types/-/types-29.6.3.tgz} engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} - '@joshwooding/vite-plugin-react-docgen-typescript@0.4.2': - resolution: {integrity: sha512-feQ+ntr+8hbVudnsTUapiMN9q8T90XA1d5jn9QzY09sNoj4iD9wi0PY1vsBFTda4ZjEaxRK9S81oarR2nj7TFQ==, tarball: https://registry.npmjs.org/@joshwooding/vite-plugin-react-docgen-typescript/-/vite-plugin-react-docgen-typescript-0.4.2.tgz} + '@joshwooding/vite-plugin-react-docgen-typescript@0.6.1': + resolution: {integrity: sha512-J4BaTocTOYFkMHIra1JDWrMWpNmBl4EkplIwHEsV8aeUOtdWjwSnln9U7twjMFTAEB7mptNtSKyVi1Y2W9sDJw==, tarball: https://registry.npmjs.org/@joshwooding/vite-plugin-react-docgen-typescript/-/vite-plugin-react-docgen-typescript-0.6.1.tgz} peerDependencies: typescript: '>= 4.3.x' - vite: ^3.0.0 || ^4.0.0 || ^5.0.0 || ^6.0.0 + vite: ^3.0.0 || ^4.0.0 || ^5.0.0 || ^6.0.0 || ^7.0.0 peerDependenciesMeta: typescript: optional: true @@ -1279,17 +1251,24 @@ packages: '@types/react': '>=16' react: '>=16' - '@monaco-editor/loader@1.4.0': - resolution: {integrity: sha512-00ioBig0x642hytVspPl7DbQyaSWRaolYie/UFNjoTdvoKPzo6xrXLhTk9ixgIKcLH5b5vDOjVNiGyY+uDCUlg==, tarball: https://registry.npmjs.org/@monaco-editor/loader/-/loader-1.4.0.tgz} - peerDependencies: - monaco-editor: '>= 0.21.0 < 1' + '@mjackson/form-data-parser@0.4.0': + resolution: {integrity: sha512-zDQ0sFfXqn2bJaZ/ypXfGUe0lUjCzXybBHYEoyWaO2w1dZ0nOM9nRER8tVVv3a8ZIgO/zF6p2I5ieWJAUOzt3w==, tarball: https://registry.npmjs.org/@mjackson/form-data-parser/-/form-data-parser-0.4.0.tgz} - '@monaco-editor/react@4.6.0': - resolution: {integrity: sha512-RFkU9/i7cN2bsq/iTkurMWOEErmYcY6JiQI3Jn+WeR/FGISH8JbHERjpS9oRuSOPvDMJI0Z8nJeKkbOs9sBYQw==, tarball: https://registry.npmjs.org/@monaco-editor/react/-/react-4.6.0.tgz} + '@mjackson/headers@0.5.1': + resolution: {integrity: sha512-sJpFgecPT/zJvwk3GRNVWNs8EkwaJoUNU2D0VMlp+gDJs6cuSTm1q/aCZi3ZtuV6CgDEQ4l2ZjUG3A9JrQlbNA==, tarball: https://registry.npmjs.org/@mjackson/headers/-/headers-0.5.1.tgz} + + '@mjackson/multipart-parser@0.6.3': + resolution: {integrity: sha512-aQhySnM6OpAYMMG+m7LEygYye99hB1md/Cy1AFE0yD5hfNW+X4JDu7oNVY9Gc6IW8PZ45D1rjFLDIUdnkXmwrA==, tarball: https://registry.npmjs.org/@mjackson/multipart-parser/-/multipart-parser-0.6.3.tgz} + + '@monaco-editor/loader@1.5.0': + resolution: {integrity: sha512-hKoGSM+7aAc7eRTRjpqAZucPmoNOC4UUbknb/VNoTkEIkCPhqV8LfbsgM1webRM7S/z21eHEx9Fkwx8Z/C/+Xw==, tarball: https://registry.npmjs.org/@monaco-editor/loader/-/loader-1.5.0.tgz} + + '@monaco-editor/react@4.7.0': + resolution: {integrity: sha512-cyzXQCtO47ydzxpQtCGSQGOC8Gk3ZUeBXFAxD+CWXYFo5OqZyZUonFl0DwUlTyAfRHntBfw2p3w4s9R6oe1eCA==, tarball: https://registry.npmjs.org/@monaco-editor/react/-/react-4.7.0.tgz} peerDependencies: monaco-editor: '>= 0.25.0 < 1' - react: ^16.8.0 || ^17.0.0 || ^18.0.0 - react-dom: ^16.8.0 || ^17.0.0 || ^18.0.0 + react: ^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 + react-dom: ^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 '@mswjs/interceptors@0.35.9': resolution: {integrity: sha512-SSnyl/4ni/2ViHKkiZb8eajA/eN1DNFaHjhGiLUdZvDz6PKF4COSf/17xqSz64nOo2Ia29SA6B2KNCsyCbVmaQ==, tarball: https://registry.npmjs.org/@mswjs/interceptors/-/interceptors-0.35.9.tgz} @@ -1405,6 +1384,9 @@ packages: '@emotion/styled': optional: true + '@neoconfetti/react@1.0.0': + resolution: {integrity: sha512-klcSooChXXOzIm+SE5IISIAn3bYzYfPjbX7D7HoqZL84oAfgREeSg5vSIaSFH+DaGzzvImTyWe1OyrJ67vik4A==, tarball: https://registry.npmjs.org/@neoconfetti/react/-/react-1.0.0.tgz} + '@nodelib/fs.scandir@2.1.5': resolution: {integrity: sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==, tarball: https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz} engines: {node: '>= 8'} @@ -2051,10 +2033,6 @@ packages: '@radix-ui/rect@1.1.0': resolution: {integrity: sha512-A9+lCBZoaMJlVKcRBz2YByCG+Cp2t6nAnMnNba+XiWxnj6r4JUFqfsgwocMBZU9LPtdxC6wB56ySYpc7LQIoJg==, tarball: https://registry.npmjs.org/@radix-ui/rect/-/rect-1.1.0.tgz} - '@remix-run/router@1.19.2': - resolution: {integrity: sha512-baiMx18+IMuD1yyvOGaHM9QrVUPGGG0jC+z+IPHnRJWUAUvaKuWKyE8gjDj2rzv3sz9zOGoRSPgeBVHRhZnBlA==, tarball: https://registry.npmjs.org/@remix-run/router/-/router-1.19.2.tgz} - engines: {node: '>=14.0.0'} - '@rolldown/pluginutils@1.0.0-beta.9': resolution: {integrity: sha512-e9MeMtVWo186sgvFFJOPGy7/d2j2mZhLJIdVW0C/xDluuOvymEATqz6zKsP0ZmXGzQtqlyjz5sC1sYQUoJG98w==, tarball: https://registry.npmjs.org/@rolldown/pluginutils/-/pluginutils-1.0.0-beta.9.tgz} @@ -2176,220 +2154,74 @@ packages: '@sinonjs/fake-timers@10.3.0': resolution: {integrity: sha512-V4BG07kuYSUkTCSBHG8G8TNhM+F19jXFWnQtzj+we8DrkpSBCee9Z3Ms8yiGer/dlmhe35/Xdgyo3/0rQKg7YA==, tarball: https://registry.npmjs.org/@sinonjs/fake-timers/-/fake-timers-10.3.0.tgz} - '@storybook/addon-actions@8.4.6': - resolution: {integrity: sha512-vbplwjMj7UXbdzoFhQkqFHLQAPJX8OVGTM9Q+yjuWDHViaKKUlgRWp0jclT7aIDNJQU2a6wJbTimHgJeF16Vhg==, tarball: https://registry.npmjs.org/@storybook/addon-actions/-/addon-actions-8.4.6.tgz} - peerDependencies: - storybook: ^8.4.6 - - '@storybook/addon-actions@8.5.2': - resolution: {integrity: sha512-g0gLesVSFgstUq5QphsLeC1vEdwNHgqo2TE0m+STM47832xbxBwmK6uvBeqi416xZvnt1TTKaaBr4uCRRQ64Ww==, tarball: https://registry.npmjs.org/@storybook/addon-actions/-/addon-actions-8.5.2.tgz} - peerDependencies: - storybook: ^8.5.2 - - '@storybook/addon-backgrounds@8.4.6': - resolution: {integrity: sha512-RSjJ3iElxlQXebZrz1s5LeoLpAXr9LAGifX7w0abMzN5sg6QSwNeUHko2eT3V57M3k1Fa/5Eelso/QBQifFEog==, tarball: https://registry.npmjs.org/@storybook/addon-backgrounds/-/addon-backgrounds-8.4.6.tgz} - peerDependencies: - storybook: ^8.4.6 - - '@storybook/addon-controls@8.4.6': - resolution: {integrity: sha512-70pEGWh0C2g8s0DYsISElOzsMbQS6p/K9iU5EqfotDF+hvEqstjsV/bTbR5f3OK4vR/7Gxamk7j8RVd14Nql6A==, tarball: https://registry.npmjs.org/@storybook/addon-controls/-/addon-controls-8.4.6.tgz} - peerDependencies: - storybook: ^8.4.6 - - '@storybook/addon-docs@8.4.6': - resolution: {integrity: sha512-olxz61W7PW/EsXrKhLrYbI3rn9GMBhY3KIOF/6tumbRkh0Siu/qe4EAImaV9NNwiC1R7+De/1OIVMY6o0EIZVw==, tarball: https://registry.npmjs.org/@storybook/addon-docs/-/addon-docs-8.4.6.tgz} - peerDependencies: - storybook: ^8.4.6 - - '@storybook/addon-essentials@8.4.6': - resolution: {integrity: sha512-TbFqyvWFUKw8LBpVcZuGQydzVB/3kSuHxDHi+Wj3Qas3cxBl7+w4/HjwomT2D2Tni1dZ1uPDOsAtNLmwp1POsg==, tarball: https://registry.npmjs.org/@storybook/addon-essentials/-/addon-essentials-8.4.6.tgz} + '@storybook/addon-docs@9.1.2': + resolution: {integrity: sha512-U3eHJ8lQFfEZ/OcgdKkUBbW2Y2tpAsHfy8lQOBgs5Pgj9biHEJcUmq+drOS/sJhle673eoBcUFmspXulI4KP1w==, tarball: https://registry.npmjs.org/@storybook/addon-docs/-/addon-docs-9.1.2.tgz} peerDependencies: - storybook: ^8.4.6 + storybook: ^9.1.2 - '@storybook/addon-highlight@8.4.6': - resolution: {integrity: sha512-m8wedbqDMbwkP99dNHkHAiAUkx5E7FEEEyLPX1zfkhZWOGtTkavXHH235SGp50zD75LQ6eC/BvgegrzxSQa9Wg==, tarball: https://registry.npmjs.org/@storybook/addon-highlight/-/addon-highlight-8.4.6.tgz} - peerDependencies: - storybook: ^8.4.6 - - '@storybook/addon-interactions@8.5.3': - resolution: {integrity: sha512-nQuP65iFGgqfVp/O8NxNDUwLTWmQBW4bofUFaT4wzYn7Jk9zobOZYtgQvdqBZtNzBDYmLrfrCutEBj5jVPRyuQ==, tarball: https://registry.npmjs.org/@storybook/addon-interactions/-/addon-interactions-8.5.3.tgz} - peerDependencies: - storybook: ^8.5.3 - - '@storybook/addon-links@8.5.2': - resolution: {integrity: sha512-eDKOQoAKKUQo0JqeLNzMLu6fm1s3oxwZ6O+rAWS6n5bsrjZS2Ul8esKkRriFVwHtDtqx99wneqOscS8IzE/ENw==, tarball: https://registry.npmjs.org/@storybook/addon-links/-/addon-links-8.5.2.tgz} + '@storybook/addon-links@9.1.2': + resolution: {integrity: sha512-drAWdhn5cRo5WcaORoCYfJ6tgTAw1m+ZJb1ICyNtTU6i/0nErV8jJjt7AziUcUIyzaGVJAkAMNC3+R4uDPSFDA==, tarball: https://registry.npmjs.org/@storybook/addon-links/-/addon-links-9.1.2.tgz} peerDependencies: react: ^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0-beta - storybook: ^8.5.2 + storybook: ^9.1.2 peerDependenciesMeta: react: optional: true - '@storybook/addon-mdx-gfm@8.5.2': - resolution: {integrity: sha512-UuJDa2Asch8Z6H+vzLg+/VQQNbHhqmDtn8OSfNHo6Lr6a0uk6LofYKvP/nB7i6wMUvnaM+Qh/b5hAI/VCXitBQ==, tarball: https://registry.npmjs.org/@storybook/addon-mdx-gfm/-/addon-mdx-gfm-8.5.2.tgz} + '@storybook/addon-themes@9.1.2': + resolution: {integrity: sha512-dpWCx0IpKKFGEuOe2u8cUD2ShWMaE6Keh0zkM1gP8jx5gL8lLv9uhRHaZcQamwnG3BgnnKFgArODNxewsRSFfA==, tarball: https://registry.npmjs.org/@storybook/addon-themes/-/addon-themes-9.1.2.tgz} peerDependencies: - storybook: ^8.5.2 + storybook: ^9.1.2 - '@storybook/addon-measure@8.4.6': - resolution: {integrity: sha512-N2IRpr39g5KpexCAS1vIHJT+phc9Yilwm3PULds2rQ66VMTbkxobXJDdt0NS05g5n9/eDniroNQwdCeLg4tkpw==, tarball: https://registry.npmjs.org/@storybook/addon-measure/-/addon-measure-8.4.6.tgz} + '@storybook/builder-vite@9.1.2': + resolution: {integrity: sha512-5Y7e5wnSzFxCGP63UNRRZVoxHe1znU4dYXazJBobAlEcUPBk7A0sH2716tA6bS4oz92oG9tgvn1g996hRrw4ow==, tarball: https://registry.npmjs.org/@storybook/builder-vite/-/builder-vite-9.1.2.tgz} peerDependencies: - storybook: ^8.4.6 - - '@storybook/addon-outline@8.4.6': - resolution: {integrity: sha512-EhcWx8OpK85HxQulLWzpWUHEwQpDYuAiKzsFj9ivAbfeljkIWNTG04mierfaH1xX016uL9RtLJL/zwBS5ChnFg==, tarball: https://registry.npmjs.org/@storybook/addon-outline/-/addon-outline-8.4.6.tgz} - peerDependencies: - storybook: ^8.4.6 - - '@storybook/addon-themes@8.4.6': - resolution: {integrity: sha512-0Eyh7jxxQ8hc7KIO2bJF8BKY1CRJ9zPo2DKoRiUKDoSGSP8qdlj4V/ks892GcUffdhTjoFAJCRzG7Ff+TnVKrA==, tarball: https://registry.npmjs.org/@storybook/addon-themes/-/addon-themes-8.4.6.tgz} - peerDependencies: - storybook: ^8.4.6 - - '@storybook/addon-toolbars@8.4.6': - resolution: {integrity: sha512-+Xao/uGa8FnYsyUiREUkYXWNysm3Aba8tL/Bwd+HufHtdiKJGa9lrXaC7VLCqBUaEjwqM3aaPwqEWIROsthmPQ==, tarball: https://registry.npmjs.org/@storybook/addon-toolbars/-/addon-toolbars-8.4.6.tgz} - peerDependencies: - storybook: ^8.4.6 - - '@storybook/addon-viewport@8.4.6': - resolution: {integrity: sha512-BuQll5YzOCpMS7p5Rsw9wcmi8hTnEKyg6+qAbkZNfiZ2JhXCa1GFUqX725fF1whpYVQULtkQxU8r+vahoRn7Yg==, tarball: https://registry.npmjs.org/@storybook/addon-viewport/-/addon-viewport-8.4.6.tgz} - peerDependencies: - storybook: ^8.4.6 - - '@storybook/blocks@8.4.6': - resolution: {integrity: sha512-Gzbx8hM7ZQIHlQELcFIMbY1v+r1Po4mlinq0QVPtKS4lBcW4eZIsesbxOaL+uFNrxb583TLFzXo0DbRPzS46sg==, tarball: https://registry.npmjs.org/@storybook/blocks/-/blocks-8.4.6.tgz} - peerDependencies: - react: ^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0-beta - react-dom: ^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0-beta - storybook: ^8.4.6 - peerDependenciesMeta: - react: - optional: true - react-dom: - optional: true - - '@storybook/builder-vite@8.4.6': - resolution: {integrity: sha512-PyJsaEPyuRFFEplpNUi+nbuJd7d1DC2dAZjpsaHTXyqg5iPIbkIgsbCJLUDeIXnUDqM/utjmMpN0sQKJuhIc6w==, tarball: https://registry.npmjs.org/@storybook/builder-vite/-/builder-vite-8.4.6.tgz} - peerDependencies: - storybook: ^8.4.6 - vite: ^4.0.0 || ^5.0.0 || ^6.0.0 - - '@storybook/channels@8.1.11': - resolution: {integrity: sha512-fu5FTqo6duOqtJFa6gFzKbiSLJoia+8Tibn3xFfB6BeifWrH81hc+AZq0lTmHo5qax2G5t8ZN8JooHjMw6k2RA==, tarball: https://registry.npmjs.org/@storybook/channels/-/channels-8.1.11.tgz} - - '@storybook/client-logger@8.1.11': - resolution: {integrity: sha512-DVMh2usz3yYmlqCLCiCKy5fT8/UR9aTh+gSqwyNFkGZrIM4otC5A8eMXajXifzotQLT5SaOEnM3WzHwmpvMIEA==, tarball: https://registry.npmjs.org/@storybook/client-logger/-/client-logger-8.1.11.tgz} + storybook: ^9.1.2 + vite: ^5.0.0 || ^6.0.0 || ^7.0.0 - '@storybook/components@8.4.6': - resolution: {integrity: sha512-9tKSJJCyFT5RZMRGyozTBJkr9C9Yfk1nuOE9XbDEE1Z+3/IypKR9+iwc5mfNBStDNY+rxtYWNLKBb5GPR2yhzA==, tarball: https://registry.npmjs.org/@storybook/components/-/components-8.4.6.tgz} + '@storybook/csf-plugin@9.1.2': + resolution: {integrity: sha512-bfMh6r+RieBLPWtqqYN70le2uTE4JzOYPMYSCagHykUti3uM/1vRFaZNkZtUsRy5GwEzE5jLdDXioG1lOEeT2Q==, tarball: https://registry.npmjs.org/@storybook/csf-plugin/-/csf-plugin-9.1.2.tgz} peerDependencies: - storybook: ^8.2.0 || ^8.3.0-0 || ^8.4.0-0 || ^8.5.0-0 || ^8.6.0-0 - - '@storybook/core-events@8.1.11': - resolution: {integrity: sha512-vXaNe2KEW9BGlLrg0lzmf5cJ0xt+suPjWmEODH5JqBbrdZ67X6ApA2nb6WcxDQhykesWCuFN5gp1l+JuDOBi7A==, tarball: https://registry.npmjs.org/@storybook/core-events/-/core-events-8.1.11.tgz} - - '@storybook/core@8.5.3': - resolution: {integrity: sha512-ZLlr2pltbj/hmC54lggJTnh09FCAJR62lIdiXNwa+V+/eJz0CfD8tfGmZGKPSmaQeZBpMwAOeRM97k2oLPF+0w==, tarball: https://registry.npmjs.org/@storybook/core/-/core-8.5.3.tgz} - peerDependencies: - prettier: ^2 || ^3 - peerDependenciesMeta: - prettier: - optional: true - - '@storybook/csf-plugin@8.4.6': - resolution: {integrity: sha512-JDIT0czC4yMgKGNf39KTZr3zm5MusAZdn6LBrTfvWb7CrTCR4iVHa4lp2yb7EJk41vHsBec0QUYDDuiFH/vV0g==, tarball: https://registry.npmjs.org/@storybook/csf-plugin/-/csf-plugin-8.4.6.tgz} - peerDependencies: - storybook: ^8.4.6 - - '@storybook/csf@0.1.11': - resolution: {integrity: sha512-dHYFQH3mA+EtnCkHXzicbLgsvzYjcDJ1JWsogbItZogkPHgSJM/Wr71uMkcvw8v9mmCyP4NpXJuu6bPoVsOnzg==, tarball: https://registry.npmjs.org/@storybook/csf/-/csf-0.1.11.tgz} - - '@storybook/csf@0.1.12': - resolution: {integrity: sha512-9/exVhabisyIVL0VxTCxo01Tdm8wefIXKXfltAPTSr8cbLn5JAxGQ6QV3mjdecLGEOucfoVhAKtJfVHxEK1iqw==, tarball: https://registry.npmjs.org/@storybook/csf/-/csf-0.1.12.tgz} - - '@storybook/csf@0.1.13': - resolution: {integrity: sha512-7xOOwCLGB3ebM87eemep89MYRFTko+D8qE7EdAAq74lgdqRR5cOUtYWJLjO2dLtP94nqoOdHJo6MdLLKzg412Q==, tarball: https://registry.npmjs.org/@storybook/csf/-/csf-0.1.13.tgz} + storybook: ^9.1.2 '@storybook/global@5.0.0': resolution: {integrity: sha512-FcOqPAXACP0I3oJ/ws6/rrPT9WGhu915Cg8D02a9YxLo0DE9zI+a9A5gRGvmQ09fiWPukqI8ZAEoQEdWUKMQdQ==, tarball: https://registry.npmjs.org/@storybook/global/-/global-5.0.0.tgz} - '@storybook/icons@1.2.12': - resolution: {integrity: sha512-UxgyK5W3/UV4VrI3dl6ajGfHM4aOqMAkFLWe2KibeQudLf6NJpDrDMSHwZj+3iKC4jFU7dkKbbtH2h/al4sW3Q==, tarball: https://registry.npmjs.org/@storybook/icons/-/icons-1.2.12.tgz} + '@storybook/icons@1.4.0': + resolution: {integrity: sha512-Td73IeJxOyalzvjQL+JXx72jlIYHgs+REaHiREOqfpo3A2AYYG71AUbcv+lg7mEDIweKVCxsMQ0UKo634c8XeA==, tarball: https://registry.npmjs.org/@storybook/icons/-/icons-1.4.0.tgz} engines: {node: '>=14.0.0'} peerDependencies: - react: ^16.8.0 || ^17.0.0 || ^18.0.0 - react-dom: ^16.8.0 || ^17.0.0 || ^18.0.0 - - '@storybook/instrumenter@8.4.6': - resolution: {integrity: sha512-snXjlgbp065A6KoK9zkjBYEIMCSlN5JefPKzt1FC0rbcbtahhD+iPpqISKhDSczwgOku/JVhVUDp/vU7AIf4mg==, tarball: https://registry.npmjs.org/@storybook/instrumenter/-/instrumenter-8.4.6.tgz} - peerDependencies: - storybook: ^8.4.6 - - '@storybook/instrumenter@8.5.3': - resolution: {integrity: sha512-pxaTbGeju8MkwouIiaWX5DMWtpRruxqig8W3nZPOvzoSCCbQY+sLMQoyXxFlpGxLBjcvXivkL7AMVBKps5sFEQ==, tarball: https://registry.npmjs.org/@storybook/instrumenter/-/instrumenter-8.5.3.tgz} - peerDependencies: - storybook: ^8.5.3 - - '@storybook/manager-api@8.4.6': - resolution: {integrity: sha512-TsXlQ5m5rTl2KNT9icPFyy822AqXrx1QplZBt/L7cFn7SpqQKDeSta21FH7MG0piAvzOweXebVSqKngJ6cCWWQ==, tarball: https://registry.npmjs.org/@storybook/manager-api/-/manager-api-8.4.6.tgz} - peerDependencies: - storybook: ^8.2.0 || ^8.3.0-0 || ^8.4.0-0 || ^8.5.0-0 || ^8.6.0-0 - - '@storybook/preview-api@8.4.6': - resolution: {integrity: sha512-LbD+lR1FGvWaJBXteVx5xdgs1x1D7tyidBg2CsW2ex+cP0iJ176JgjPfutZxlWOfQnhfRYNnJ3WKoCIfxFOTKA==, tarball: https://registry.npmjs.org/@storybook/preview-api/-/preview-api-8.4.6.tgz} - peerDependencies: - storybook: ^8.2.0 || ^8.3.0-0 || ^8.4.0-0 || ^8.5.0-0 || ^8.6.0-0 - - '@storybook/preview-api@8.5.3': - resolution: {integrity: sha512-dUsuXW+KgDg4tWXOB6dk5j5gwwRUzbPvicHAY9mzbpSVScbWXuE5T/S/9hHlbtfkhFroWQgPx2eB8z3rai+7RQ==, tarball: https://registry.npmjs.org/@storybook/preview-api/-/preview-api-8.5.3.tgz} - peerDependencies: - storybook: ^8.2.0 || ^8.3.0-0 || ^8.4.0-0 || ^8.5.0-0 || ^8.6.0-0 + react: ^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0-beta + react-dom: ^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0-beta - '@storybook/react-dom-shim@8.4.6': - resolution: {integrity: sha512-f7RM8GO++fqMxbjNdEzeGS1P821jXuwRnAraejk5hyjB5SqetauFxMwoFYEYfJXPaLX2qIubnIJ78hdJ/IBaEA==, tarball: https://registry.npmjs.org/@storybook/react-dom-shim/-/react-dom-shim-8.4.6.tgz} + '@storybook/react-dom-shim@9.1.2': + resolution: {integrity: sha512-nw7BLAHCJswPZGsuL0Gs2AvFUWriusCTgPBmcHppSw/AqvT4XRFRDE+5q3j04/XKuZBrAA2sC4L+HuC0uzEChQ==, tarball: https://registry.npmjs.org/@storybook/react-dom-shim/-/react-dom-shim-9.1.2.tgz} peerDependencies: react: ^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0-beta react-dom: ^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0-beta - storybook: ^8.4.6 + storybook: ^9.1.2 - '@storybook/react-vite@8.4.6': - resolution: {integrity: sha512-bVoYj3uJRz0SknK2qN3vBVSoEXsvyARQLuHjP9eX0lWBd9XSxZinmVbexPdD0OeJYcJIdmbli2/Gw7/hu5CjFA==, tarball: https://registry.npmjs.org/@storybook/react-vite/-/react-vite-8.4.6.tgz} - engines: {node: '>=18.0.0'} + '@storybook/react-vite@9.1.2': + resolution: {integrity: sha512-dv3CBjOzmMoSyIotMtdmsBRjB25i19OjFP0IZqauLeUoVm6QddILW7JRcZVLrzhATyBEn+sEAdWQ4j79Z11HAg==, tarball: https://registry.npmjs.org/@storybook/react-vite/-/react-vite-9.1.2.tgz} + engines: {node: '>=20.0.0'} peerDependencies: react: ^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0-beta react-dom: ^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0-beta - storybook: ^8.4.6 - vite: ^4.0.0 || ^5.0.0 || ^6.0.0 + storybook: ^9.1.2 + vite: ^5.0.0 || ^6.0.0 || ^7.0.0 - '@storybook/react@8.4.6': - resolution: {integrity: sha512-QAT23beoYNLhFGAXPimtuMErvpcI7eZbZ4AlLqW1fhiTZrRYw06cjC1bs9H3tODMcHH9LS5p3Wz9b29jtV2XGw==, tarball: https://registry.npmjs.org/@storybook/react/-/react-8.4.6.tgz} - engines: {node: '>=18.0.0'} + '@storybook/react@9.1.2': + resolution: {integrity: sha512-VVXu1HrhDExj/yj+heFYc8cgIzBruXy1UYT3LW0WiJyadgzYz3J41l/Lf/j2FCppyxwlXb19Uv51plb1F1C77w==, tarball: https://registry.npmjs.org/@storybook/react/-/react-9.1.2.tgz} + engines: {node: '>=20.0.0'} peerDependencies: - '@storybook/test': 8.4.6 react: ^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0-beta react-dom: ^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0-beta - storybook: ^8.4.6 - typescript: '>= 4.2.x' + storybook: ^9.1.2 + typescript: '>= 4.9.x' peerDependenciesMeta: - '@storybook/test': - optional: true typescript: optional: true - '@storybook/test@8.4.6': - resolution: {integrity: sha512-MeU1g65YgU66M2NtmEIL9gVeHk+en0k9Hp0wfxEO7NT/WLfaOD5RXLRDJVhbAlrH/6tLeWKIPNh/D26y27vO/g==, tarball: https://registry.npmjs.org/@storybook/test/-/test-8.4.6.tgz} - peerDependencies: - storybook: ^8.4.6 - - '@storybook/test@8.5.3': - resolution: {integrity: sha512-2smoDbtU6Qh4yk0uD18qGfW6ll7lZBzKlF58Ha1CgWR4o+jpeeTQcfDLH9gG6sNrpojF7AVzMh/aN9BDHD+Chg==, tarball: https://registry.npmjs.org/@storybook/test/-/test-8.5.3.tgz} - peerDependencies: - storybook: ^8.5.3 - - '@storybook/theming@8.4.6': - resolution: {integrity: sha512-q7vDPN/mgj7cXIVQ9R1/V75hrzNgKkm2G0LjMo57//9/djQ+7LxvBsR1iScbFIRSEqppvMiBFzkts+2uXidySA==, tarball: https://registry.npmjs.org/@storybook/theming/-/theming-8.4.6.tgz} - peerDependencies: - storybook: ^8.2.0 || ^8.3.0-0 || ^8.4.0-0 || ^8.5.0-0 || ^8.6.0-0 - '@swc/core-darwin-arm64@1.3.38': resolution: {integrity: sha512-4ZTJJ/cR0EsXW5UxFCifZoGfzQ07a8s4ayt1nLvLQ5QoB1GTAf9zsACpvWG8e7cmCR0L76R5xt8uJuyr+noIXA==, tarball: https://registry.npmjs.org/@swc/core-darwin-arm64/-/core-darwin-arm64-1.3.38.tgz} engines: {node: '>=10'} @@ -2493,10 +2325,6 @@ packages: resolution: {integrity: sha512-fB0R+fa3AUqbLHWyxXa2kGVtf1Fe1ZZFr0Zp6AIbIAzXb2mKbEXl+PCQNUOaq5lbTab5tfctfXRNsWXxa2f7Aw==, tarball: https://registry.npmjs.org/@testing-library/dom/-/dom-9.3.3.tgz} engines: {node: '>=14'} - '@testing-library/jest-dom@6.5.0': - resolution: {integrity: sha512-xGGHpBXYSHUUr6XsKBfs85TWlYKpTc37cSBBVrXcib2MkHLboWlkClhWF37JKlDb9KEq3dHs+f2xR7XJEWGBxA==, tarball: https://registry.npmjs.org/@testing-library/jest-dom/-/jest-dom-6.5.0.tgz} - engines: {node: '>=14', npm: '>=6', yarn: '>=1'} - '@testing-library/jest-dom@6.6.3': resolution: {integrity: sha512-IteBhl4XqYNkM54f4ejhLRJiZNqcSCoXUOG2CPK7qbD322KjQozM4kHQOfkG2oln9b9HTYqs+Sae8vBATubxxA==, tarball: https://registry.npmjs.org/@testing-library/jest-dom/-/jest-dom-6.6.3.tgz} engines: {node: '>=14', npm: '>=6', yarn: '>=1'} @@ -2508,12 +2336,6 @@ packages: react: ^18.0.0 react-dom: ^18.0.0 - '@testing-library/user-event@14.5.2': - resolution: {integrity: sha512-YAh82Wh4TIrxYLmfGcixwD18oIjyC1pFQC2Y01F2lzV2HTMiYrI0nze0FD0ocB//CKS/7jIUgae+adPqxK5yCQ==, tarball: https://registry.npmjs.org/@testing-library/user-event/-/user-event-14.5.2.tgz} - engines: {node: '>=12', npm: '>=6'} - peerDependencies: - '@testing-library/dom': '>=7.21.4' - '@testing-library/user-event@14.6.1': resolution: {integrity: sha512-vq7fv0rnt+QTXgPxr5Hjc210p6YKq2kmdziLgnsZGgLJ9e6VAShx1pACLuRjd/AS/sr7phAR58OIIpf0LlmQNw==, tarball: https://registry.npmjs.org/@testing-library/user-event/-/user-event-14.6.1.tgz} engines: {node: '>=12', npm: '>=6'} @@ -2554,6 +2376,9 @@ packages: '@types/body-parser@1.19.2': resolution: {integrity: sha512-ALYone6pm6QmwZoAgeyNksccT9Q4AWZQ6PvfwR37GT6r6FWUPguq6sUmNGSMV2Wr761oQoBxwGGa6DR5o1DC9g==, tarball: https://registry.npmjs.org/@types/body-parser/-/body-parser-1.19.2.tgz} + '@types/chai@5.2.2': + resolution: {integrity: sha512-8kB30R7Hwqf40JPiKhVzodJs2Qc1ZJ5zuT3uzw5Hq/dhNCl3G3l83jfpdI1e20BP348+fV7VIL/+FxaXkqBmWg==, tarball: https://registry.npmjs.org/@types/chai/-/chai-5.2.2.tgz} + '@types/chroma-js@2.4.0': resolution: {integrity: sha512-JklMxityrwjBTjGY2anH8JaTx3yjRU3/sEHSblLH1ba5lqcSh1LnImXJZO5peJfXyqKYWjHTGy4s5Wz++hARrw==, tarball: https://registry.npmjs.org/@types/chroma-js/-/chroma-js-2.4.0.tgz} @@ -2599,6 +2424,9 @@ packages: '@types/debug@4.1.12': resolution: {integrity: sha512-vIChWdVG3LG1SMxEvI/AK+FWJthlrqlTu7fbrlywTkkaONwk/UAGaULXRlf8vkzFBLVm0zkMdCquhL5aOjhXPQ==, tarball: https://registry.npmjs.org/@types/debug/-/debug-4.1.12.tgz} + '@types/deep-eql@4.0.2': + resolution: {integrity: sha512-c9h9dVVMigMPc4bwTvC5dxqtqJZwQPePsWjPlpSOnojbor6pGqdk541lfA7AqFQr5pB1BRdq0juY9db81BwyFw==, tarball: https://registry.npmjs.org/@types/deep-eql/-/deep-eql-4.0.2.tgz} + '@types/doctrine@0.0.9': resolution: {integrity: sha512-eOIHzCUSH7SMfonMG1LsC2f8vxBFtho6NGBznK41R84YzPuvSBzrhEps33IsQiOW9+VL6NQ9DbjQJznk/S4uRA==, tarball: https://registry.npmjs.org/@types/doctrine/-/doctrine-0.0.9.tgz} @@ -2812,23 +2640,28 @@ packages: peerDependencies: vite: ^4.2.0 || ^5.0.0 || ^6.0.0 - '@vitest/expect@2.0.5': - resolution: {integrity: sha512-yHZtwuP7JZivj65Gxoi8upUN2OzHTi3zVfjwdpu2WrvCZPLwsJ2Ey5ILIPccoW23dd/zQBlJ4/dhi7DWNyXCpA==, tarball: https://registry.npmjs.org/@vitest/expect/-/expect-2.0.5.tgz} - - '@vitest/pretty-format@2.0.5': - resolution: {integrity: sha512-h8k+1oWHfwTkyTkb9egzwNMfJAEx4veaPSnMeKbVSjp4euqGSbQlm5+6VHwTr7u4FJslVVsUG5nopCaAYdOmSQ==, tarball: https://registry.npmjs.org/@vitest/pretty-format/-/pretty-format-2.0.5.tgz} + '@vitest/expect@3.2.4': + resolution: {integrity: sha512-Io0yyORnB6sikFlt8QW5K7slY4OjqNX9jmJQ02QDda8lyM6B5oNgVWoSoKPac8/kgnCUzuHQKrSLtu/uOqqrig==, tarball: https://registry.npmjs.org/@vitest/expect/-/expect-3.2.4.tgz} - '@vitest/pretty-format@2.1.8': - resolution: {integrity: sha512-9HiSZ9zpqNLKlbIDRWOnAWqgcA7xu+8YxXSekhr0Ykab7PAYFkhkwoqVArPOtJhPmYeE2YHgKZlj3CP36z2AJQ==, tarball: https://registry.npmjs.org/@vitest/pretty-format/-/pretty-format-2.1.8.tgz} + '@vitest/mocker@3.2.4': + resolution: {integrity: sha512-46ryTE9RZO/rfDd7pEqFl7etuyzekzEhUbTW3BvmeO/BcCMEgq59BKhek3dXDWgAj4oMK6OZi+vRr1wPW6qjEQ==, tarball: https://registry.npmjs.org/@vitest/mocker/-/mocker-3.2.4.tgz} + peerDependencies: + msw: ^2.4.9 + vite: ^5.0.0 || ^6.0.0 || ^7.0.0-0 + peerDependenciesMeta: + msw: + optional: true + vite: + optional: true - '@vitest/spy@2.0.5': - resolution: {integrity: sha512-c/jdthAhvJdpfVuaexSrnawxZz6pywlTPe84LUB2m/4t3rl2fTo9NFGBG4oWgaD+FTgDDV8hJ/nibT7IfH3JfA==, tarball: https://registry.npmjs.org/@vitest/spy/-/spy-2.0.5.tgz} + '@vitest/pretty-format@3.2.4': + resolution: {integrity: sha512-IVNZik8IVRJRTr9fxlitMKeJeXFFFN0JaB9PHPGQ8NKQbGpfjlTx9zO4RefN8gp7eqjNy8nyK3NZmBzOPeIxtA==, tarball: https://registry.npmjs.org/@vitest/pretty-format/-/pretty-format-3.2.4.tgz} - '@vitest/utils@2.0.5': - resolution: {integrity: sha512-d8HKbqIcya+GR67mkZbrzhS5kKhtp8dQLcmRZLGTscGVg7yImT82cIrhtn2L8+VujWcy6KZweApgNmPsTAO/UQ==, tarball: https://registry.npmjs.org/@vitest/utils/-/utils-2.0.5.tgz} + '@vitest/spy@3.2.4': + resolution: {integrity: sha512-vAfasCOe6AIK70iP5UD11Ac4siNUNJ9i/9PZ3NKx07sG6sUxeag1LWdNrMWeKKYBLlzuK+Gn65Yd5nyL6ds+nw==, tarball: https://registry.npmjs.org/@vitest/spy/-/spy-3.2.4.tgz} - '@vitest/utils@2.1.8': - resolution: {integrity: sha512-dwSoui6djdwbfFmIgbIjX2ZhIoG7Ex/+xpxyiEgIGzjliY8xGkcpITKTlp6B4MgtGkF2ilvm97cPM96XZaAgcA==, tarball: https://registry.npmjs.org/@vitest/utils/-/utils-2.1.8.tgz} + '@vitest/utils@3.2.4': + resolution: {integrity: sha512-fB2V0JFrQSMsCo9HiSq3Ezpdv4iYaXRG1Sx8edX3MwxfyNn83mKiGzOcH+Fkxt4MHxr3y42fQi1oeAInqgX2QA==, tarball: https://registry.npmjs.org/@vitest/utils/-/utils-3.2.4.tgz} '@xterm/addon-canvas@0.7.0': resolution: {integrity: sha512-LF5LYcfvefJuJ7QotNRdRSPc9YASAVDeoT5uyXS/nZshZXjYplGXRECBGiznwvhNL2I8bq1Lf5MzRwstsYQ2Iw==, tarball: https://registry.npmjs.org/@xterm/addon-canvas/-/addon-canvas-0.7.0.tgz} @@ -3061,9 +2894,6 @@ packages: resolution: {integrity: sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==, tarball: https://registry.npmjs.org/braces/-/braces-3.0.3.tgz} engines: {node: '>=8'} - browser-assert@1.2.1: - resolution: {integrity: sha512-nfulgvOR6S4gt9UKCeGJOuSGBPGiFT6oQ/2UBnvTY/5aQ1PnksW72fhZkM30DzoRRv2WpwZf1vHHEr3mtuXIWQ==, tarball: https://registry.npmjs.org/browser-assert/-/browser-assert-1.2.1.tgz} - browserslist@4.24.2: resolution: {integrity: sha512-ZIc+Q62revdMcqC6aChtW4jz3My3klmCO1fEmINZY/8J3EpBg5/A/D0AKmBveUh6pgoeycoMkVMko84tuYS+Gg==, tarball: https://registry.npmjs.org/browserslist/-/browserslist-4.24.2.tgz} engines: {node: ^6 || ^7 || ^8 || ^9 || ^10 || ^11 || ^12 || >=13.7} @@ -3133,9 +2963,9 @@ packages: ccount@2.0.1: resolution: {integrity: sha512-eyrF0jiFpY+3drT6383f1qhkbGsLSifNAjA61IUjZjmLCWjItY6LB9ft9YhoDgwfmclB2zhu51Lc7+95b8NRAg==, tarball: https://registry.npmjs.org/ccount/-/ccount-2.0.1.tgz} - chai@5.1.2: - resolution: {integrity: sha512-aGtmf24DW6MLHHG5gCx4zaI3uBq3KRtxeVs0DjFH6Z0rDNbsvTxFASFvdj79pxjxZ8/5u3PIiN3IwEIQkiiuPw==, tarball: https://registry.npmjs.org/chai/-/chai-5.1.2.tgz} - engines: {node: '>=12'} + chai@5.2.1: + resolution: {integrity: sha512-5nFxhUrX0PqtyogoYOA8IPswy5sZFTOsBFl/9bNsmDLgsxYTzSZQJDPppDnZPTQbzSEm0hqGjWPzRemQCYbD6A==, tarball: https://registry.npmjs.org/chai/-/chai-5.2.1.tgz} + engines: {node: '>=18'} chalk@2.4.2: resolution: {integrity: sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==, tarball: https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz} @@ -3201,6 +3031,18 @@ packages: '@chromatic-com/playwright': optional: true + chromatic@12.2.0: + resolution: {integrity: sha512-GswmBW9ZptAoTns1BMyjbm55Z7EsIJnUvYKdQqXIBZIKbGErmpA+p4c0BYA+nzw5B0M+rb3Iqp1IaH8TFwIQew==, tarball: https://registry.npmjs.org/chromatic/-/chromatic-12.2.0.tgz} + hasBin: true + peerDependencies: + '@chromatic-com/cypress': ^0.*.* || ^1.0.0 + '@chromatic-com/playwright': ^0.*.* || ^1.0.0 + peerDependenciesMeta: + '@chromatic-com/cypress': + optional: true + '@chromatic-com/playwright': + optional: true + ci-info@3.9.0: resolution: {integrity: sha512-NIxF55hv4nSqQswkAeiOi1r83xy8JldOFDTWiug55KBu9Jnblncd2U6ViHmYgHf01TPZS77NJBhBMKdWj9HQMQ==, tarball: https://registry.npmjs.org/ci-info/-/ci-info-3.9.0.tgz} engines: {node: '>=8'} @@ -3309,6 +3151,10 @@ packages: resolution: {integrity: sha512-yki5XnKuf750l50uGTllt6kKILY4nQ1eNIQatoXEByZ5dWgnKqbnqmTrBE5B4N7lrMJKQ2ytWMiTO2o0v6Ew/w==, tarball: https://registry.npmjs.org/cookie/-/cookie-0.7.2.tgz} engines: {node: '>= 0.6'} + cookie@1.0.2: + resolution: {integrity: sha512-9Kr/j4O16ISv8zBBhJoi4bXOYNTkFLOqSL3UDB0njXxCXNezjeyVrJyGOWtgfs/q2km1gwBcfH8q1yEGoMYunA==, tarball: https://registry.npmjs.org/cookie/-/cookie-1.0.2.tgz} + engines: {node: '>=18'} + core-util-is@1.0.3: resolution: {integrity: sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ==, tarball: https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.3.tgz} @@ -3818,6 +3664,10 @@ packages: resolution: {integrity: sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng==, tarball: https://registry.npmjs.org/find-up/-/find-up-5.0.0.tgz} engines: {node: '>=10'} + find-up@7.0.0: + resolution: {integrity: sha512-YyZM99iHrqLKjmt4LJDj58KI+fYyufRLBSYcqycxf//KpBk9FoewoGX0450m9nB44qrZnovzC2oeP5hUibxc/g==, tarball: https://registry.npmjs.org/find-up/-/find-up-7.0.0.tgz} + engines: {node: '>=18'} + flat-cache@3.2.0: resolution: {integrity: sha512-CYcENa+FtcUKLmhhqyctpclsq7QF38pKjZHsGNiSQF5r4FtoKDWabFDl3hzaEQMvT1LHEysw5twgLvpYYb4vbw==, tarball: https://registry.npmjs.org/flat-cache/-/flat-cache-3.2.0.tgz} engines: {node: ^10.12.0 || >=12.0.0} @@ -3842,8 +3692,8 @@ packages: resolution: {integrity: sha512-Ld2g8rrAyMYFXBhEqMz8ZAHBi4J4uS1i/CxGMDnjyFWddMXLVcDp051DZfu+t7+ab7Wv6SMqpWmyFIj5UbfFvg==, tarball: https://registry.npmjs.org/foreground-child/-/foreground-child-3.3.0.tgz} engines: {node: '>=14'} - form-data@4.0.2: - resolution: {integrity: sha512-hGfm/slu0ZabnNt4oaRZ6uREyfCj6P4fT/n6A1rGV+Z0VdGXjfOhVUpkn6qVQONHGIFwmveGXyDs75+nr6FM8w==, tarball: https://registry.npmjs.org/form-data/-/form-data-4.0.2.tgz} + form-data@4.0.4: + resolution: {integrity: sha512-KrGhL9Q4zjj0kiUt5OO4Mr/A/jlI2jDYs5eHBpYHPcBEVSiipAvn2Ko2HnPe20rmcuuvMHNdZFp+4IlGTMF0Ow==, tarball: https://registry.npmjs.org/form-data/-/form-data-4.0.4.tgz} engines: {node: '>= 6'} format@0.2.2: @@ -3983,10 +3833,6 @@ packages: resolution: {integrity: sha512-NqADB8VjPFLM2V0VvHUewwwsw0ZWBaIdgo+ieHtK3hasLz4qeCRjYcqfB6AQrBggRKppKF8L52/VqdVsO47Dlw==, tarball: https://registry.npmjs.org/has-tostringtag/-/has-tostringtag-1.0.2.tgz} engines: {node: '>= 0.4'} - hasown@2.0.0: - resolution: {integrity: sha512-vUptKVTpIJhcczKBbgnS+RtcuYMB8+oNzPK2/Hp3hanz8JmpATdmmgLgSaadVREkDm+e2giHwY3ZRkyjSIDDFA==, tarball: https://registry.npmjs.org/hasown/-/hasown-2.0.0.tgz} - engines: {node: '>= 0.4'} - hasown@2.0.2: resolution: {integrity: sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==, tarball: https://registry.npmjs.org/hasown/-/hasown-2.0.2.tgz} engines: {node: '>= 0.4'} @@ -4145,9 +3991,6 @@ packages: resolution: {integrity: sha512-1BC0BVFhS/p0qtw6enp8e+8OD0UrK0oFLztSjNzhcKA3WDuJxxAPXzPuPtKkjEY9UUoEWlX/8fgKeu2S8i9JTA==, tarball: https://registry.npmjs.org/is-callable/-/is-callable-1.2.7.tgz} engines: {node: '>= 0.4'} - is-core-module@2.13.1: - resolution: {integrity: sha512-hHrIjvZsftOsvKSn2TRYl63zvxsgE0K+0mYMoH6gD4omR5IWB2KynivBQczo3+wF1cCkjzvptnI9Q0sPU66ilw==, tarball: https://registry.npmjs.org/is-core-module/-/is-core-module-2.13.1.tgz} - is-core-module@2.16.1: resolution: {integrity: sha512-UfoeMA6fIJ8wTYFEUjelnaGI67v6+N7qXJEvQuIGa99l4xsCruSYOVSQ0uPANn4dAzm8lkYPaKLrrijLq7x23w==, tarball: https://registry.npmjs.org/is-core-module/-/is-core-module-2.16.1.tgz} engines: {node: '>= 0.4'} @@ -4179,10 +4022,6 @@ packages: resolution: {integrity: sha512-cTIB4yPYL/Grw0EaSzASzg6bBy9gqCofvWN8okThAYIxKJZC+udlRAmGbM0XLeniEJSs8uEgHPGuHSe1XsOLSQ==, tarball: https://registry.npmjs.org/is-generator-fn/-/is-generator-fn-2.1.0.tgz} engines: {node: '>=6'} - is-generator-function@1.1.0: - resolution: {integrity: sha512-nPUB5km40q9e8UfN/Zc24eLlzdSf9OfKByBw9CIdw4H1giPMeA0OIJvbchsCu4npfI2QcMVBsGEBHKZ7wLTWmQ==, tarball: https://registry.npmjs.org/is-generator-function/-/is-generator-function-1.1.0.tgz} - engines: {node: '>= 0.4'} - is-glob@4.0.3: resolution: {integrity: sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==, tarball: https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz} engines: {node: '>=0.10.0'} @@ -4226,10 +4065,6 @@ packages: resolution: {integrity: sha512-kvRdxDsxZjhzUX07ZnLydzS1TU/TJlTUHHY4YLL87e37oUA49DfkLqgy+VjFocowy29cKvcSiu+kIv728jTTVg==, tarball: https://registry.npmjs.org/is-regex/-/is-regex-1.1.4.tgz} engines: {node: '>= 0.4'} - is-regex@1.2.1: - resolution: {integrity: sha512-MjYsKHO5O7mCsmRGxWcLWheFqN9DJ/2TmngvjKXihe6efViPqc274+Fx/4fYj/r03+ESvBdTXK0V6tA3rgez1g==, tarball: https://registry.npmjs.org/is-regex/-/is-regex-1.2.1.tgz} - engines: {node: '>= 0.4'} - is-set@2.0.2: resolution: {integrity: sha512-+2cnTEZeY5z/iXGbLhPrOAaK/Mau5k5eXq9j14CpRTftq0pAJu2MwVRSZhyZWBzx3o6X795Lz6Bpb6R0GKf37g==, tarball: https://registry.npmjs.org/is-set/-/is-set-2.0.2.tgz} @@ -4501,10 +4336,6 @@ packages: resolution: {integrity: sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==, tarball: https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz} hasBin: true - jsdoc-type-pratt-parser@4.1.0: - resolution: {integrity: sha512-Hicd6JK5Njt2QB6XYFS7ok9e37O8AYk3jTcppG4YVQnYjOemymvTcmc7OWsmq/Qqj5TdRFO5/x/tIPmBeRtGHg==, tarball: https://registry.npmjs.org/jsdoc-type-pratt-parser/-/jsdoc-type-pratt-parser-4.1.0.tgz} - engines: {node: '>=12.0.0'} - jsdom@20.0.3: resolution: {integrity: sha512-SYhBvTh89tTfCD/CRdSOm13mOBa42iTaTyfyEWBdKcGdPxPtLFBXuHR8XHb33YNYaP+lLbmSvBTsnoesCNJEsQ==, tarball: https://registry.npmjs.org/jsdom/-/jsdom-20.0.3.tgz} engines: {node: '>=14'} @@ -4586,6 +4417,10 @@ packages: resolution: {integrity: sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw==, tarball: https://registry.npmjs.org/locate-path/-/locate-path-6.0.0.tgz} engines: {node: '>=10'} + locate-path@7.2.0: + resolution: {integrity: sha512-gvVijfZvn7R+2qyPX8mAuKcFGDf6Nc61GdvGafQsHL0sBIxfKzA+usWn4GFC/bk+QdwPUD4kWFJLhElipq+0VA==, tarball: https://registry.npmjs.org/locate-path/-/locate-path-7.2.0.tgz} + engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} + lodash-es@4.17.21: resolution: {integrity: sha512-mKnC+QJ9pWVzv+C4/U3rRsHapFfHvQFoFB92e52xeyGMcX6/OlIl78je1u8vePzYZSkkogMPJ2yjxxsb89cxyw==, tarball: https://registry.npmjs.org/lodash-es/-/lodash-es-4.17.21.tgz} @@ -4615,11 +4450,8 @@ packages: resolution: {integrity: sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q==, tarball: https://registry.npmjs.org/loose-envify/-/loose-envify-1.4.0.tgz} hasBin: true - loupe@3.1.2: - resolution: {integrity: sha512-23I4pFZHmAemUnz8WZXbYRSKYj801VDaNv9ETuMh7IrMc7VuVVSo+Z9iLE3ni30+U48iDWfi30d3twAXBYmnCg==, tarball: https://registry.npmjs.org/loupe/-/loupe-3.1.2.tgz} - - loupe@3.1.3: - resolution: {integrity: sha512-kkIp7XSkP78ZxJEsSxW3712C6teJVoeHHwgo9zJ380de7IYyJ2ISlxojcH2pC5OFLewESmnRi/+XCDIEEVyoug==, tarball: https://registry.npmjs.org/loupe/-/loupe-3.1.3.tgz} + loupe@3.2.0: + resolution: {integrity: sha512-2NCfZcT5VGVNX9mSZIxLRkEAegDGBpuQZBy13desuHeVORmBDyAET4TkJr4SjqQy3A8JDofMN6LpkK8Xcm/dlw==, tarball: https://registry.npmjs.org/loupe/-/loupe-3.2.0.tgz} lowlight@1.20.0: resolution: {integrity: sha512-8Ktj+prEb1RoCPkEOrPMYUN/nCggB7qAWe3a7OpMjWQkh3l2RD5wKRQ+o8Q8YuI9RG/xs95waaI/E6ym/7NsTw==, tarball: https://registry.npmjs.org/lowlight/-/lowlight-1.20.0.tgz} @@ -4643,13 +4475,8 @@ packages: resolution: {integrity: sha512-h5bgJWpxJNswbU7qCrV0tIKQCaS3blPDrqKWx+QxzuzL1zGUzij9XCWLrSLsJPu5t+eWA/ycetzYAO5IOMcWAQ==, tarball: https://registry.npmjs.org/lz-string/-/lz-string-1.5.0.tgz} hasBin: true - magic-string@0.27.0: - resolution: {integrity: sha512-8UnnX2PeRAPZuN12svgR9j7M1uWMovg/CEnIwIG0LFkXSJJe4PdfUGiTGl8V9bsBHFUtfVINcSyYxd7q+kx9fA==, tarball: https://registry.npmjs.org/magic-string/-/magic-string-0.27.0.tgz} - engines: {node: '>=12'} - - magic-string@0.30.5: - resolution: {integrity: sha512-7xlpfBaQaP/T6Vh8MO/EqXSW5En6INHEvEXQiuff7Gku0PWjU3uf6w/j9o7O+SpB5fOAkrI5HeoNgwjEO0pFsA==, tarball: https://registry.npmjs.org/magic-string/-/magic-string-0.30.5.tgz} - engines: {node: '>=12'} + magic-string@0.30.17: + resolution: {integrity: sha512-sNPKHvyjVf7gyjwS4xGTaW/mCnF8wnjtifKBEhxfZ7E/S8tQ0rssrwGNn6q8JH/ohItJfSQp9mBtQYuTlH5QnA==, tarball: https://registry.npmjs.org/magic-string/-/magic-string-0.30.17.tgz} make-dir@4.0.0: resolution: {integrity: sha512-hXdUTZYIVOt1Ex//jAQi+wTZZpUpwBj/0QsOzqegb3rGMMeJiSEu5xLHnYfBrRV4RH2+OCSOO95Is/7x1WJ4bw==, tarball: https://registry.npmjs.org/make-dir/-/make-dir-4.0.0.tgz} @@ -4661,9 +4488,6 @@ packages: makeerror@1.0.12: resolution: {integrity: sha512-JmqCvUhmt43madlpFzG4BQzG2Z3m6tvQDNKdClZnO3VbIudJYmxsT0FNJMeiB2+JTSlTQTSbU8QdesVmwJcmLg==, tarball: https://registry.npmjs.org/makeerror/-/makeerror-1.0.12.tgz} - map-or-similar@1.5.0: - resolution: {integrity: sha512-0aF7ZmVon1igznGI4VS30yugpduQW3y3GkcgGJOp7d8x8QrizhigUxjI/m2UojsXXto+jLAH3KSz+xOJTiORjg==, tarball: https://registry.npmjs.org/map-or-similar/-/map-or-similar-1.5.0.tgz} - markdown-table@3.0.3: resolution: {integrity: sha512-Z1NL3Tb1M9wH4XESsCDEksWoKTdlUafKc4pt0GRwjUyXaCFZ+dc3g2erqB6zm3szA2IUSi7VnPI+o/9jnxh9hw==, tarball: https://registry.npmjs.org/markdown-table/-/markdown-table-3.0.3.tgz} @@ -4735,9 +4559,6 @@ packages: memoize-one@5.2.1: resolution: {integrity: sha512-zYiwtZUcYyXKo/np96AGZAckk+FWWsUdJ3cHGGmld7+AhvcWmQyGCYUh1hc4Q/pkOhb65dQR/pqCyK0cOaHz4Q==, tarball: https://registry.npmjs.org/memoize-one/-/memoize-one-5.2.1.tgz} - memoizerific@1.11.3: - resolution: {integrity: sha512-/EuHYwAPdLtXwAwSZkh/Gutery6pD2KYd44oQLhAvQp/50mpyduZh8Q7PYHXTCJ+wuXxt7oij2LXyIJOOYFPog==, tarball: https://registry.npmjs.org/memoizerific/-/memoizerific-1.11.3.tgz} - merge-descriptors@1.0.3: resolution: {integrity: sha512-gaNvAS7TZ897/rVaZ0nMtAyxNyi/pdbjbAwUpFQpN70GqnVfOiXpeUUMKRBmzXaSQ8DdTX4/0ms62r2K+hE6mQ==, tarball: https://registry.npmjs.org/merge-descriptors/-/merge-descriptors-1.0.3.tgz} @@ -4936,8 +4757,8 @@ packages: resolution: {integrity: sha512-qxBgB7Qa2sEQgHFjj0dSigq7fX4k6Saisd5Nelwp2q8mlbAFh5dHV9JTTlF8viYJLSSWgMCZFUom8PJcMNBoJw==, tarball: https://registry.npmjs.org/mock-socket/-/mock-socket-9.3.1.tgz} engines: {node: '>= 8'} - monaco-editor@0.52.0: - resolution: {integrity: sha512-OeWhNpABLCeTqubfqLMXGsqf6OmPU6pHM85kF3dhy6kq5hnhuVS1p3VrEW/XhWHc71P2tHyS5JFySD8mgs1crw==, tarball: https://registry.npmjs.org/monaco-editor/-/monaco-editor-0.52.0.tgz} + monaco-editor@0.52.2: + resolution: {integrity: sha512-GEQWEZmfkOGLdd3XK8ryrfWz3AIP8YymVXiPHEdewrUq7mh0qrKrfHLNCXcbB6sTnMLnOZ3ztSiKcciFUkIJwQ==, tarball: https://registry.npmjs.org/monaco-editor/-/monaco-editor-0.52.2.tgz} moo-color@1.0.3: resolution: {integrity: sha512-i/+ZKXMDf6aqYtBhuOcej71YSlbjT3wCO/4H1j8rPvxDJEifdwgg5MaFyu6iYAT8GBZJg2z0dkgK4YMzvURALQ==, tarball: https://registry.npmjs.org/moo-color/-/moo-color-1.0.3.tgz} @@ -5066,6 +4887,10 @@ packages: resolution: {integrity: sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==, tarball: https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz} engines: {node: '>=10'} + p-limit@4.0.0: + resolution: {integrity: sha512-5b0R4txpzjPWVw/cXXUResoD4hb6U/x9BH08L7nw+GN1sezDzPdxeRvpc9c433fZhBan/wusjbCsqwqm4EIBIQ==, tarball: https://registry.npmjs.org/p-limit/-/p-limit-4.0.0.tgz} + engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} + p-locate@4.1.0: resolution: {integrity: sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A==, tarball: https://registry.npmjs.org/p-locate/-/p-locate-4.1.0.tgz} engines: {node: '>=8'} @@ -5074,6 +4899,10 @@ packages: resolution: {integrity: sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw==, tarball: https://registry.npmjs.org/p-locate/-/p-locate-5.0.0.tgz} engines: {node: '>=10'} + p-locate@6.0.0: + resolution: {integrity: sha512-wPrq66Llhl7/4AGC6I+cqxT07LhXvWL08LNXz1fENOw0Ap4sRZZ/gZpTTJ5jpurzzzfS2W/Ge9BY3LgLjCShcw==, tarball: https://registry.npmjs.org/p-locate/-/p-locate-6.0.0.tgz} + engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} + p-try@2.2.0: resolution: {integrity: sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==, tarball: https://registry.npmjs.org/p-try/-/p-try-2.2.0.tgz} engines: {node: '>=6'} @@ -5113,6 +4942,10 @@ packages: resolution: {integrity: sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==, tarball: https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz} engines: {node: '>=8'} + path-exists@5.0.0: + resolution: {integrity: sha512-RjhtfwJOxzcFmNOi6ltcbcu4Iu+FL3zEj83dk4kAS+fVpTxXLO1b38RvJgT/0QwvV/L3aY9TAnyv0EOqW4GoMQ==, tarball: https://registry.npmjs.org/path-exists/-/path-exists-5.0.0.tgz} + engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} + path-is-absolute@1.0.1: resolution: {integrity: sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg==, tarball: https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz} engines: {node: '>=0.10.0'} @@ -5179,10 +5012,6 @@ packages: engines: {node: '>=18'} hasBin: true - polished@4.3.1: - resolution: {integrity: sha512-OBatVyC/N7SCW/FaDHrSd+vn0o5cS855TOmYi4OkdWUMSJCET/xip//ch8xGUvtr3i44X9LVyWwQlRMTN3pwSA==, tarball: https://registry.npmjs.org/polished/-/polished-4.3.1.tgz} - engines: {node: '>=10'} - possible-typed-array-names@1.0.0: resolution: {integrity: sha512-d7Uw+eZoloe0EHDIYoe+bQ5WXnGMOpmiZFTuMWCwpjzzkL2nTjcKiAk4hh8TjnGye2TwWOk3UXucZ+3rbmBa8Q==, tarball: https://registry.npmjs.org/possible-typed-array-names/-/possible-typed-array-names-1.0.0.tgz} engines: {node: '>= 0.4'} @@ -5268,10 +5097,6 @@ packages: process-nextick-args@2.0.1: resolution: {integrity: sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag==, tarball: https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.1.tgz} - process@0.11.10: - resolution: {integrity: sha512-cdGef/drWFoydD1JsMzuFf8100nZl+GT+yacc2bEced5f9Rjk4z+WtFUTBu9PhOi9j/jfmBPu0mMEY4wIdAF8A==, tarball: https://registry.npmjs.org/process/-/process-0.11.10.tgz} - engines: {node: '>= 0.6.0'} - prompts@2.4.2: resolution: {integrity: sha512-NxNv/kLguCA7p3jE8oL2aEBsrJWgAakBpgmgK6lpPWV+WuOmY6r2/zbAVnP+T8bQlA0nzHXSJSJW0Hq7ylaD2Q==, tarball: https://registry.npmjs.org/prompts/-/prompts-2.4.2.tgz} engines: {node: '>= 6'} @@ -5352,9 +5177,9 @@ packages: peerDependencies: typescript: '>= 4.3.x' - react-docgen@7.0.3: - resolution: {integrity: sha512-i8aF1nyKInZnANZ4uZrH49qn1paRgBZ7wZiCNBMnenlPzEv0mRl+ShpTVEI6wZNl8sSc79xZkivtgLKQArcanQ==, tarball: https://registry.npmjs.org/react-docgen/-/react-docgen-7.0.3.tgz} - engines: {node: '>=16.14.0'} + react-docgen@8.0.0: + resolution: {integrity: sha512-kmob/FOTwep7DUWf9KjuenKX0vyvChr3oTdvvPt09V60Iz75FJp+T/0ZeHMbAfJj2WaVWqAPP5Hmm3PYzSPPKg==, tarball: https://registry.npmjs.org/react-docgen/-/react-docgen-8.0.0.tgz} + engines: {node: ^20.9.0 || >=22} react-dom@18.3.1: resolution: {integrity: sha512-5m4nQKp+rZRb09LNH59GM4BxTh9251/ylbKIbpe7TpGxfJ+9kv6BLkLBXIjjspbgbnIBNqlI23tRnTWT0snUIw==, tarball: https://registry.npmjs.org/react-dom/-/react-dom-18.3.1.tgz} @@ -5440,18 +5265,15 @@ packages: react: ^16.14.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 || ^19.0.0-rc react-dom: ^16.14.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 || ^19.0.0-rc - react-router-dom@6.26.2: - resolution: {integrity: sha512-z7YkaEW0Dy35T3/QKPYB1LjMK2R1fxnHO8kWpUMTBdfVzZrWOiY9a7CtN8HqdWtDUWd5FY6Dl8HFsqVwH4uOtQ==, tarball: https://registry.npmjs.org/react-router-dom/-/react-router-dom-6.26.2.tgz} - engines: {node: '>=14.0.0'} + react-router@7.8.0: + resolution: {integrity: sha512-r15M3+LHKgM4SOapNmsH3smAizWds1vJ0Z9C4mWaKnT9/wD7+d/0jYcj6LmOvonkrO4Rgdyp4KQ/29gWN2i1eg==, tarball: https://registry.npmjs.org/react-router/-/react-router-7.8.0.tgz} + engines: {node: '>=20.0.0'} peerDependencies: - react: '>=16.8' - react-dom: '>=16.8' - - react-router@6.26.2: - resolution: {integrity: sha512-tvN1iuT03kHgOFnLPfLJ8V95eijteveqdOSk+srqfePtQvqCExB8eHOYnlilbOcyJyKnYkr1vJvf7YqotAJu1A==, tarball: https://registry.npmjs.org/react-router/-/react-router-6.26.2.tgz} - engines: {node: '>=14.0.0'} - peerDependencies: - react: '>=16.8' + react: '>=18' + react-dom: '>=18' + peerDependenciesMeta: + react-dom: + optional: true react-smooth@4.0.4: resolution: {integrity: sha512-gnGKTpYwqL0Iii09gHobNolvX4Kiq4PKx6eWBCYYix+8cdw+cGo3do906l1NBPKkSWx1DghC1dlWG9L2uGd61Q==, tarball: https://registry.npmjs.org/react-smooth/-/react-smooth-4.0.4.tgz} @@ -5597,10 +5419,6 @@ packages: engines: {node: '>= 0.4'} hasBin: true - resolve@1.22.8: - resolution: {integrity: sha512-oKWePCxqpd6FlLvGV1VU0x7bkPmmCNolxzjMf4NczoDnQcIWrAF+cPtZn5i6n+RfD2d9i0tzpKnG6Yk168yIyw==, tarball: https://registry.npmjs.org/resolve/-/resolve-1.22.8.tgz} - hasBin: true - restore-cursor@3.1.0: resolution: {integrity: sha512-l+sSefzHpj5qimhFSE5a8nufZYAM3sBSVMAPtYkmC+4EH2anSGaEMXSD0izRQbu9nfyQ9y5JrVmp7E8oZrUjvA==, tarball: https://registry.npmjs.org/restore-cursor/-/restore-cursor-3.1.0.tgz} engines: {node: '>=8'} @@ -5644,10 +5462,6 @@ packages: safe-buffer@5.2.1: resolution: {integrity: sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==, tarball: https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz} - safe-regex-test@1.1.0: - resolution: {integrity: sha512-x/+Cz4YrimQxQccJf5mKEbIa1NzeCRNI5Ecl/ekmlYaampdNLPalVyIcCZNNH3MvmqBugV5TMYZXv0ljslUlaw==, tarball: https://registry.npmjs.org/safe-regex-test/-/safe-regex-test-1.1.0.tgz} - engines: {node: '>= 0.4'} - safer-buffer@2.1.2: resolution: {integrity: sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==, tarball: https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz} @@ -5671,6 +5485,9 @@ packages: resolution: {integrity: sha512-VqpjJZKadQB/PEbEwvFdO43Ax5dFBZ2UECszz8bQ7pi7wt//PWe1P6MN7eCnjsatYtBT6EuiClbjSWP2WrIoTw==, tarball: https://registry.npmjs.org/serve-static/-/serve-static-1.16.2.tgz} engines: {node: '>= 0.8.0'} + set-cookie-parser@2.7.1: + resolution: {integrity: sha512-IOc8uWeOZgnb3ptbCURJWNjWUPcO3ZnTTdzsurqERrP6nPyv+paC55vJM0LpOlT2ne+Ix+9+CRG1MNLlyZ4GjQ==, tarball: https://registry.npmjs.org/set-cookie-parser/-/set-cookie-parser-2.7.1.tgz} + set-function-length@1.2.2: resolution: {integrity: sha512-pgRc4hJ4/sNjWCSS9AmnS40x3bNMDTknHgL5UaMBTMyJnU90EgWh1Rz+MC9eFu4BuN/UwZjKQuY/1v3rM7HMfg==, tarball: https://registry.npmjs.org/set-function-length/-/set-function-length-1.2.2.tgz} engines: {node: '>= 0.4'} @@ -5780,27 +5597,21 @@ packages: resolution: {integrity: sha512-iCGQj+0l0HOdZ2AEeBADlsRC+vsnDsZsbdSiH1yNSjcfKM7fdpCMfqAL/dwF5BLiw/XhRft/Wax6zQbhq2BcjQ==, tarball: https://registry.npmjs.org/stop-iteration-iterator/-/stop-iteration-iterator-1.0.0.tgz} engines: {node: '>= 0.4'} - storybook-addon-remix-react-router@3.1.0: - resolution: {integrity: sha512-h6cOD+afyAddNrDz5ezoJGV6GBSeH7uh92VAPDz+HLuay74Cr9Ozz+aFmlzMEyVJ1hhNIMOIWDsmK56CueZjsw==, tarball: https://registry.npmjs.org/storybook-addon-remix-react-router/-/storybook-addon-remix-react-router-3.1.0.tgz} + storybook-addon-remix-react-router@5.0.0: + resolution: {integrity: sha512-XjNGLD8vhI7DhjPgkjkU9rjqjF6YSRvRjBignwo2kCGiz5HIR4TZTDRRABuwYo35/GoC2aMtxFs7zybJ4pVlsg==, tarball: https://registry.npmjs.org/storybook-addon-remix-react-router/-/storybook-addon-remix-react-router-5.0.0.tgz} peerDependencies: - '@storybook/blocks': ^8.0.0 - '@storybook/channels': ^8.0.0 - '@storybook/components': ^8.0.0 - '@storybook/core-events': ^8.0.0 - '@storybook/manager-api': ^8.0.0 - '@storybook/preview-api': ^8.0.0 - '@storybook/theming': ^8.0.0 react: ^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 react-dom: ^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 - react-router-dom: ^6.4.0 || ^7.0.0 + react-router: ^7.0.2 + storybook: ^9.0.0 peerDependenciesMeta: react: optional: true react-dom: optional: true - storybook@8.5.3: - resolution: {integrity: sha512-2WtNBZ45u1AhviRU+U+ld588tH8gDa702dNSq5C8UBaE9PlOsazGsyp90dw1s9YRvi+ejrjKAupQAU0GwwUiVg==, tarball: https://registry.npmjs.org/storybook/-/storybook-8.5.3.tgz} + storybook@9.1.2: + resolution: {integrity: sha512-TYcq7WmgfVCAQge/KueGkVlM/+g33sQcmbATlC3X6y/g2FEeSSLGrb6E6d3iemht8oio+aY6ld3YOdAnMwx45Q==, tarball: https://registry.npmjs.org/storybook/-/storybook-9.1.2.tgz} hasBin: true peerDependencies: prettier: ^2 || ^3 @@ -5915,9 +5726,6 @@ packages: resolution: {integrity: sha512-GNzQvQTOIP6RyTfE2Qxb8ZVlNmw0n88vp1szwWRimP02mnTsx3Wtn5qRdqY9w2XduFNUgvOwhNnQsjwCp+kqaQ==, tarball: https://registry.npmjs.org/tapable/-/tapable-2.2.1.tgz} engines: {node: '>=6'} - telejson@7.2.0: - resolution: {integrity: sha512-1QTEcJkJEhc8OnStBx/ILRu5J2p0GjvWsBx56bmZRqnrkdBMUe+nX92jxV+p3dB4CP6PZCdJMQJwCggkNBMzkQ==, tarball: https://registry.npmjs.org/telejson/-/telejson-7.2.0.tgz} - test-exclude@6.0.0: resolution: {integrity: sha512-cAGWPIyOHU6zlmg88jwm7VRyXnMN7iV68OGAbYDk/Mh/xC/pzVPlQtY6ngoIH/5/tciuhGfvESU8GrHrcxD56w==, tarball: https://registry.npmjs.org/test-exclude/-/test-exclude-6.0.0.tgz} engines: {node: '>=8'} @@ -5948,12 +5756,12 @@ packages: resolution: {integrity: sha512-tX5e7OM1HnYr2+a2C/4V0htOcSQcoSTH9KgJnVvNm5zm/cyEWKJ7j7YutsH9CxMdtOkkLFy2AHrMci9IM8IPZQ==, tarball: https://registry.npmjs.org/tinyglobby/-/tinyglobby-0.2.14.tgz} engines: {node: '>=12.0.0'} - tinyrainbow@1.2.0: - resolution: {integrity: sha512-weEDEq7Z5eTHPDh4xjX789+fHfF+P8boiFB+0vbWzpbnbsEr/GRaohi/uMKxg8RZMXnl1ItAi/IUHWMsjDV7kQ==, tarball: https://registry.npmjs.org/tinyrainbow/-/tinyrainbow-1.2.0.tgz} + tinyrainbow@2.0.0: + resolution: {integrity: sha512-op4nsTR47R6p0vMUUoYl/a+ljLFVtlfaXkLQmqfLR1qHma1h/ysYk4hEXZ880bf2CYgTskvTa/e196Vd5dDQXw==, tarball: https://registry.npmjs.org/tinyrainbow/-/tinyrainbow-2.0.0.tgz} engines: {node: '>=14.0.0'} - tinyspy@3.0.2: - resolution: {integrity: sha512-n1cw8k1k0x4pgA2+9XrOkFydTerNcJ1zWCO5Nn9scWHTD+5tp8dghT2x1uduQePZTZgd3Tupf+x9BxJjeJi77Q==, tarball: https://registry.npmjs.org/tinyspy/-/tinyspy-3.0.2.tgz} + tinyspy@4.0.3: + resolution: {integrity: sha512-t2T/WLB2WRgZ9EpE4jgPJ9w+i66UZfDc8wHh0xrwiRNN+UwH98GIJkTeZqX9rg0i0ptwzqW+uYeIF0T4F8LR7A==, tarball: https://registry.npmjs.org/tinyspy/-/tinyspy-4.0.3.tgz} engines: {node: '>=14.0.0'} tmpl@1.0.5: @@ -6074,8 +5882,8 @@ packages: engines: {node: '>=14.17'} hasBin: true - tzdata@1.0.40: - resolution: {integrity: sha512-IsWNGfC5GrVPG4ejYJtf3tOlBdJYs0uNzv1a+vkdANHDq2kPg4oAN2UlCfpqrCwErPZVhI6MLA2gkeuXAVnpLg==, tarball: https://registry.npmjs.org/tzdata/-/tzdata-1.0.40.tgz} + tzdata@1.0.44: + resolution: {integrity: sha512-xJ8xcdoFRwFpIQ90QV3WFXJNCO/feNn9vHVsZMJiKmtMYuo7nvF6CTpBc+SgegC1fb/3L+m32ytXT9XrBjrINg==, tarball: https://registry.npmjs.org/tzdata/-/tzdata-1.0.44.tgz} ua-parser-js@1.0.40: resolution: {integrity: sha512-z6PJ8Lml+v3ichVojCiB8toQJBuwR42ySM4ezjXIqXK3M0HczmKQ3LF4rhU55PfD99KEEXQG6yb7iOMyvYuHew==, tarball: https://registry.npmjs.org/ua-parser-js/-/ua-parser-js-1.0.40.tgz} @@ -6094,6 +5902,10 @@ packages: resolution: {integrity: sha512-uROZWze0R0itiAKVPsYhFov9LxrPMHLMEQFszeI2gCN6bnIIZ8twzBCJcN2LJrBBLfrP0t1FW0g+JmKVl8Vk1g==, tarball: https://registry.npmjs.org/undici/-/undici-6.21.2.tgz} engines: {node: '>=18.17'} + unicorn-magic@0.1.0: + resolution: {integrity: sha512-lRfVq8fE8gz6QMBuDM6a+LO3IAzTi05H6gCVaUpir2E1Rwpo4ZUog45KpNXKC/Mn3Yb9UDuHumeFTo9iV/D9FQ==, tarball: https://registry.npmjs.org/unicorn-magic/-/unicorn-magic-0.1.0.tgz} + engines: {node: '>=18'} + unicorn-magic@0.3.0: resolution: {integrity: sha512-+QBBXBCvifc56fsbuxZQ6Sic3wqqc3WWaqxs58gvJrcOuN83HGTCwz3oS5phzU9LthRNE9VrJCFCLUgHeeFnfA==, tarball: https://registry.npmjs.org/unicorn-magic/-/unicorn-magic-0.3.0.tgz} engines: {node: '>=18'} @@ -6215,9 +6027,6 @@ packages: util-deprecate@1.0.2: resolution: {integrity: sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==, tarball: https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz} - util@0.12.5: - resolution: {integrity: sha512-kZf/K6hEIrWHI6XqOFUiiMa+79wE/D8Q+NCNAWclkyg3b4d2k7s0QGepNjiABc+aR3N1PAyHL7p6UcLY6LmrnA==, tarball: https://registry.npmjs.org/util/-/util-0.12.5.tgz} - utils-merge@1.0.1: resolution: {integrity: sha512-pMZTvIkT1d+TFGvDOqodOclx0QWkkgi6Tdoa8gC8ffGAAqz9pzPTZWAybbsHHoED/ztMtkv/VoYTYyShUn81hA==, tarball: https://registry.npmjs.org/utils-merge/-/utils-merge-1.0.1.tgz} engines: {node: '>= 0.4.0'} @@ -6280,9 +6089,6 @@ packages: vue-tsc: optional: true - vite-plugin-turbosnap@1.0.3: - resolution: {integrity: sha512-p4D8CFVhZS412SyQX125qxyzOgIFouwOcvjZWk6bQbNPR1wtaEzFT6jZxAjf1dejlGqa6fqHcuCvQea6EWUkUA==, tarball: https://registry.npmjs.org/vite-plugin-turbosnap/-/vite-plugin-turbosnap-1.0.3.tgz} - vite@6.3.5: resolution: {integrity: sha512-cZn6NDFE7wdTpINgs++ZJ4N49W2vRp8LCKrn3Ob1kYNtOo21vfDoaV5GzBfLU4MovSAB8uNRm4jgzVQZ+mBzPQ==, tarball: https://registry.npmjs.org/vite/-/vite-6.3.5.tgz} engines: {node: ^18.0.0 || ^20.0.0 || >=22.0.0} @@ -6463,6 +6269,10 @@ packages: resolution: {integrity: sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==, tarball: https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz} engines: {node: '>=10'} + yocto-queue@1.2.1: + resolution: {integrity: sha512-AyeEbWOu/TAXdxlV9wmGcR0+yh2j3vYPGOECcIj2S7MkrLyC7ne+oye2BKTItt0ii2PHk4cDy+95+LshzbXnGg==, tarball: https://registry.npmjs.org/yocto-queue/-/yocto-queue-1.2.1.tgz} + engines: {node: '>=12.20'} + yoctocolors-cjs@2.1.2: resolution: {integrity: sha512-cYVsTjKl8b+FrnidjibDWskAv7UKOfcwaVZdp/it9n1s9fU3IkgDbhdIRKCW4JDsAlECJY0ytoVPT3sK6kideA==, tarball: https://registry.npmjs.org/yoctocolors-cjs/-/yoctocolors-cjs-2.1.2.tgz} engines: {node: '>=18'} @@ -6778,18 +6588,6 @@ snapshots: '@babel/parser': 7.27.2 '@babel/types': 7.27.1 - '@babel/traverse@7.25.9': - dependencies: - '@babel/code-frame': 7.26.2 - '@babel/generator': 7.26.3 - '@babel/parser': 7.26.3 - '@babel/template': 7.25.9 - '@babel/types': 7.26.0 - debug: 4.4.0 - globals: 11.12.0 - transitivePeerDependencies: - - supports-color - '@babel/traverse@7.26.4': dependencies: '@babel/code-frame': 7.26.2 @@ -6814,11 +6612,6 @@ snapshots: transitivePeerDependencies: - supports-color - '@babel/types@7.26.0': - dependencies: - '@babel/helper-string-parser': 7.25.9 - '@babel/helper-validator-identifier': 7.25.9 - '@babel/types@7.26.3': dependencies: '@babel/helper-string-parser': 7.25.9 @@ -6836,39 +6629,39 @@ snapshots: '@bcoe/v8-coverage@0.2.3': {} - '@biomejs/biome@1.9.4': + '@biomejs/biome@2.2.0': optionalDependencies: - '@biomejs/cli-darwin-arm64': 1.9.4 - '@biomejs/cli-darwin-x64': 1.9.4 - '@biomejs/cli-linux-arm64': 1.9.4 - '@biomejs/cli-linux-arm64-musl': 1.9.4 - '@biomejs/cli-linux-x64': 1.9.4 - '@biomejs/cli-linux-x64-musl': 1.9.4 - '@biomejs/cli-win32-arm64': 1.9.4 - '@biomejs/cli-win32-x64': 1.9.4 - - '@biomejs/cli-darwin-arm64@1.9.4': + '@biomejs/cli-darwin-arm64': 2.2.0 + '@biomejs/cli-darwin-x64': 2.2.0 + '@biomejs/cli-linux-arm64': 2.2.0 + '@biomejs/cli-linux-arm64-musl': 2.2.0 + '@biomejs/cli-linux-x64': 2.2.0 + '@biomejs/cli-linux-x64-musl': 2.2.0 + '@biomejs/cli-win32-arm64': 2.2.0 + '@biomejs/cli-win32-x64': 2.2.0 + + '@biomejs/cli-darwin-arm64@2.2.0': optional: true - '@biomejs/cli-darwin-x64@1.9.4': + '@biomejs/cli-darwin-x64@2.2.0': optional: true - '@biomejs/cli-linux-arm64-musl@1.9.4': + '@biomejs/cli-linux-arm64-musl@2.2.0': optional: true - '@biomejs/cli-linux-arm64@1.9.4': + '@biomejs/cli-linux-arm64@2.2.0': optional: true - '@biomejs/cli-linux-x64-musl@1.9.4': + '@biomejs/cli-linux-x64-musl@2.2.0': optional: true - '@biomejs/cli-linux-x64@1.9.4': + '@biomejs/cli-linux-x64@2.2.0': optional: true - '@biomejs/cli-win32-arm64@1.9.4': + '@biomejs/cli-win32-arm64@2.2.0': optional: true - '@biomejs/cli-win32-x64@1.9.4': + '@biomejs/cli-win32-x64@2.2.0': optional: true '@bundled-es-modules/cookie@2.0.1': @@ -6884,18 +6677,17 @@ snapshots: '@types/tough-cookie': 4.0.5 tough-cookie: 4.1.4 - '@chromatic-com/storybook@3.2.2(react@18.3.1)(storybook@8.5.3(prettier@3.4.1))': + '@chromatic-com/storybook@4.1.0(storybook@9.1.2(@testing-library/dom@10.4.0)(msw@2.4.8(typescript@5.6.3))(prettier@3.4.1)(vite@6.3.5(@types/node@20.17.16)(jiti@2.4.2)(yaml@2.7.0)))': dependencies: - chromatic: 11.25.2 + '@neoconfetti/react': 1.0.0 + chromatic: 12.2.0 filesize: 10.1.2 jsonfile: 6.1.0 - react-confetti: 6.2.2(react@18.3.1) - storybook: 8.5.3(prettier@3.4.1) + storybook: 9.1.2(@testing-library/dom@10.4.0)(msw@2.4.8(typescript@5.6.3))(prettier@3.4.1)(vite@6.3.5(@types/node@20.17.16)(jiti@2.4.2)(yaml@2.7.0)) strip-ansi: 7.1.0 transitivePeerDependencies: - '@chromatic-com/cypress' - '@chromatic-com/playwright' - - react '@cspotcode/source-map-support@0.8.1': dependencies: @@ -7396,9 +7188,10 @@ snapshots: '@types/yargs': 17.0.33 chalk: 4.1.2 - '@joshwooding/vite-plugin-react-docgen-typescript@0.4.2(typescript@5.6.3)(vite@6.3.5(@types/node@20.17.16)(jiti@2.4.2)(yaml@2.7.0))': + '@joshwooding/vite-plugin-react-docgen-typescript@0.6.1(typescript@5.6.3)(vite@6.3.5(@types/node@20.17.16)(jiti@2.4.2)(yaml@2.7.0))': dependencies: - magic-string: 0.27.0 + glob: 10.4.5 + magic-string: 0.30.17 react-docgen-typescript: 2.2.2(typescript@5.6.3) vite: 6.3.5(@types/node@20.17.16)(jiti@2.4.2)(yaml@2.7.0) optionalDependencies: @@ -7435,15 +7228,24 @@ snapshots: '@types/react': 18.3.12 react: 18.3.1 - '@monaco-editor/loader@1.4.0(monaco-editor@0.52.0)': + '@mjackson/form-data-parser@0.4.0': + dependencies: + '@mjackson/multipart-parser': 0.6.3 + + '@mjackson/headers@0.5.1': {} + + '@mjackson/multipart-parser@0.6.3': + dependencies: + '@mjackson/headers': 0.5.1 + + '@monaco-editor/loader@1.5.0': dependencies: - monaco-editor: 0.52.0 state-local: 1.0.7 - '@monaco-editor/react@4.6.0(monaco-editor@0.52.0)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)': + '@monaco-editor/react@4.7.0(monaco-editor@0.52.2)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)': dependencies: - '@monaco-editor/loader': 1.4.0(monaco-editor@0.52.0) - monaco-editor: 0.52.0 + '@monaco-editor/loader': 1.5.0 + monaco-editor: 0.52.2 react: 18.3.1 react-dom: 18.3.1(react@18.3.1) @@ -7566,6 +7368,8 @@ snapshots: transitivePeerDependencies: - '@types/react' + '@neoconfetti/react@1.0.0': {} + '@nodelib/fs.scandir@2.1.5': dependencies: '@nodelib/fs.stat': 2.0.5 @@ -8203,13 +8007,11 @@ snapshots: '@radix-ui/rect@1.1.0': {} - '@remix-run/router@1.19.2': {} - '@rolldown/pluginutils@1.0.0-beta.9': {} '@rollup/pluginutils@5.0.5(rollup@4.40.1)': dependencies: - '@types/estree': 1.0.6 + '@types/estree': 1.0.7 estree-walker: 2.0.2 picomatch: 2.3.1 optionalDependencies: @@ -8285,301 +8087,86 @@ snapshots: dependencies: '@sinonjs/commons': 3.0.0 - '@storybook/addon-actions@8.4.6(storybook@8.5.3(prettier@3.4.1))': - dependencies: - '@storybook/global': 5.0.0 - '@types/uuid': 9.0.2 - dequal: 2.0.3 - polished: 4.3.1 - storybook: 8.5.3(prettier@3.4.1) - uuid: 9.0.1 - - '@storybook/addon-actions@8.5.2(storybook@8.5.3(prettier@3.4.1))': - dependencies: - '@storybook/global': 5.0.0 - '@types/uuid': 9.0.2 - dequal: 2.0.3 - polished: 4.3.1 - storybook: 8.5.3(prettier@3.4.1) - uuid: 9.0.1 - - '@storybook/addon-backgrounds@8.4.6(storybook@8.5.3(prettier@3.4.1))': - dependencies: - '@storybook/global': 5.0.0 - memoizerific: 1.11.3 - storybook: 8.5.3(prettier@3.4.1) - ts-dedent: 2.2.0 - - '@storybook/addon-controls@8.4.6(storybook@8.5.3(prettier@3.4.1))': - dependencies: - '@storybook/global': 5.0.0 - dequal: 2.0.3 - storybook: 8.5.3(prettier@3.4.1) - ts-dedent: 2.2.0 - - '@storybook/addon-docs@8.4.6(@types/react@18.3.12)(storybook@8.5.3(prettier@3.4.1))': + '@storybook/addon-docs@9.1.2(@types/react@18.3.12)(storybook@9.1.2(@testing-library/dom@10.4.0)(msw@2.4.8(typescript@5.6.3))(prettier@3.4.1)(vite@6.3.5(@types/node@20.17.16)(jiti@2.4.2)(yaml@2.7.0)))': dependencies: '@mdx-js/react': 3.0.1(@types/react@18.3.12)(react@18.3.1) - '@storybook/blocks': 8.4.6(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(storybook@8.5.3(prettier@3.4.1)) - '@storybook/csf-plugin': 8.4.6(storybook@8.5.3(prettier@3.4.1)) - '@storybook/react-dom-shim': 8.4.6(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(storybook@8.5.3(prettier@3.4.1)) + '@storybook/csf-plugin': 9.1.2(storybook@9.1.2(@testing-library/dom@10.4.0)(msw@2.4.8(typescript@5.6.3))(prettier@3.4.1)(vite@6.3.5(@types/node@20.17.16)(jiti@2.4.2)(yaml@2.7.0))) + '@storybook/icons': 1.4.0(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + '@storybook/react-dom-shim': 9.1.2(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(storybook@9.1.2(@testing-library/dom@10.4.0)(msw@2.4.8(typescript@5.6.3))(prettier@3.4.1)(vite@6.3.5(@types/node@20.17.16)(jiti@2.4.2)(yaml@2.7.0))) react: 18.3.1 react-dom: 18.3.1(react@18.3.1) - storybook: 8.5.3(prettier@3.4.1) - ts-dedent: 2.2.0 - transitivePeerDependencies: - - '@types/react' - - '@storybook/addon-essentials@8.4.6(@types/react@18.3.12)(storybook@8.5.3(prettier@3.4.1))': - dependencies: - '@storybook/addon-actions': 8.4.6(storybook@8.5.3(prettier@3.4.1)) - '@storybook/addon-backgrounds': 8.4.6(storybook@8.5.3(prettier@3.4.1)) - '@storybook/addon-controls': 8.4.6(storybook@8.5.3(prettier@3.4.1)) - '@storybook/addon-docs': 8.4.6(@types/react@18.3.12)(storybook@8.5.3(prettier@3.4.1)) - '@storybook/addon-highlight': 8.4.6(storybook@8.5.3(prettier@3.4.1)) - '@storybook/addon-measure': 8.4.6(storybook@8.5.3(prettier@3.4.1)) - '@storybook/addon-outline': 8.4.6(storybook@8.5.3(prettier@3.4.1)) - '@storybook/addon-toolbars': 8.4.6(storybook@8.5.3(prettier@3.4.1)) - '@storybook/addon-viewport': 8.4.6(storybook@8.5.3(prettier@3.4.1)) - storybook: 8.5.3(prettier@3.4.1) + storybook: 9.1.2(@testing-library/dom@10.4.0)(msw@2.4.8(typescript@5.6.3))(prettier@3.4.1)(vite@6.3.5(@types/node@20.17.16)(jiti@2.4.2)(yaml@2.7.0)) ts-dedent: 2.2.0 transitivePeerDependencies: - '@types/react' - '@storybook/addon-highlight@8.4.6(storybook@8.5.3(prettier@3.4.1))': + '@storybook/addon-links@9.1.2(react@18.3.1)(storybook@9.1.2(@testing-library/dom@10.4.0)(msw@2.4.8(typescript@5.6.3))(prettier@3.4.1)(vite@6.3.5(@types/node@20.17.16)(jiti@2.4.2)(yaml@2.7.0)))': dependencies: '@storybook/global': 5.0.0 - storybook: 8.5.3(prettier@3.4.1) - - '@storybook/addon-interactions@8.5.3(storybook@8.5.3(prettier@3.4.1))': - dependencies: - '@storybook/global': 5.0.0 - '@storybook/instrumenter': 8.5.3(storybook@8.5.3(prettier@3.4.1)) - '@storybook/test': 8.5.3(storybook@8.5.3(prettier@3.4.1)) - polished: 4.3.1 - storybook: 8.5.3(prettier@3.4.1) - ts-dedent: 2.2.0 - - '@storybook/addon-links@8.5.2(react@18.3.1)(storybook@8.5.3(prettier@3.4.1))': - dependencies: - '@storybook/csf': 0.1.12 - '@storybook/global': 5.0.0 - storybook: 8.5.3(prettier@3.4.1) - ts-dedent: 2.2.0 + storybook: 9.1.2(@testing-library/dom@10.4.0)(msw@2.4.8(typescript@5.6.3))(prettier@3.4.1)(vite@6.3.5(@types/node@20.17.16)(jiti@2.4.2)(yaml@2.7.0)) optionalDependencies: react: 18.3.1 - '@storybook/addon-mdx-gfm@8.5.2(storybook@8.5.3(prettier@3.4.1))': - dependencies: - remark-gfm: 4.0.0 - storybook: 8.5.3(prettier@3.4.1) - ts-dedent: 2.2.0 - transitivePeerDependencies: - - supports-color - - '@storybook/addon-measure@8.4.6(storybook@8.5.3(prettier@3.4.1))': + '@storybook/addon-themes@9.1.2(storybook@9.1.2(@testing-library/dom@10.4.0)(msw@2.4.8(typescript@5.6.3))(prettier@3.4.1)(vite@6.3.5(@types/node@20.17.16)(jiti@2.4.2)(yaml@2.7.0)))': dependencies: - '@storybook/global': 5.0.0 - storybook: 8.5.3(prettier@3.4.1) - tiny-invariant: 1.3.3 - - '@storybook/addon-outline@8.4.6(storybook@8.5.3(prettier@3.4.1))': - dependencies: - '@storybook/global': 5.0.0 - storybook: 8.5.3(prettier@3.4.1) + storybook: 9.1.2(@testing-library/dom@10.4.0)(msw@2.4.8(typescript@5.6.3))(prettier@3.4.1)(vite@6.3.5(@types/node@20.17.16)(jiti@2.4.2)(yaml@2.7.0)) ts-dedent: 2.2.0 - '@storybook/addon-themes@8.4.6(storybook@8.5.3(prettier@3.4.1))': + '@storybook/builder-vite@9.1.2(storybook@9.1.2(@testing-library/dom@10.4.0)(msw@2.4.8(typescript@5.6.3))(prettier@3.4.1)(vite@6.3.5(@types/node@20.17.16)(jiti@2.4.2)(yaml@2.7.0)))(vite@6.3.5(@types/node@20.17.16)(jiti@2.4.2)(yaml@2.7.0))': dependencies: - storybook: 8.5.3(prettier@3.4.1) - ts-dedent: 2.2.0 - - '@storybook/addon-toolbars@8.4.6(storybook@8.5.3(prettier@3.4.1))': - dependencies: - storybook: 8.5.3(prettier@3.4.1) - - '@storybook/addon-viewport@8.4.6(storybook@8.5.3(prettier@3.4.1))': - dependencies: - memoizerific: 1.11.3 - storybook: 8.5.3(prettier@3.4.1) - - '@storybook/blocks@8.4.6(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(storybook@8.5.3(prettier@3.4.1))': - dependencies: - '@storybook/csf': 0.1.13 - '@storybook/icons': 1.2.12(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - storybook: 8.5.3(prettier@3.4.1) - ts-dedent: 2.2.0 - optionalDependencies: - react: 18.3.1 - react-dom: 18.3.1(react@18.3.1) - - '@storybook/builder-vite@8.4.6(storybook@8.5.3(prettier@3.4.1))(vite@6.3.5(@types/node@20.17.16)(jiti@2.4.2)(yaml@2.7.0))': - dependencies: - '@storybook/csf-plugin': 8.4.6(storybook@8.5.3(prettier@3.4.1)) - browser-assert: 1.2.1 - storybook: 8.5.3(prettier@3.4.1) + '@storybook/csf-plugin': 9.1.2(storybook@9.1.2(@testing-library/dom@10.4.0)(msw@2.4.8(typescript@5.6.3))(prettier@3.4.1)(vite@6.3.5(@types/node@20.17.16)(jiti@2.4.2)(yaml@2.7.0))) + storybook: 9.1.2(@testing-library/dom@10.4.0)(msw@2.4.8(typescript@5.6.3))(prettier@3.4.1)(vite@6.3.5(@types/node@20.17.16)(jiti@2.4.2)(yaml@2.7.0)) ts-dedent: 2.2.0 vite: 6.3.5(@types/node@20.17.16)(jiti@2.4.2)(yaml@2.7.0) - '@storybook/channels@8.1.11': + '@storybook/csf-plugin@9.1.2(storybook@9.1.2(@testing-library/dom@10.4.0)(msw@2.4.8(typescript@5.6.3))(prettier@3.4.1)(vite@6.3.5(@types/node@20.17.16)(jiti@2.4.2)(yaml@2.7.0)))': dependencies: - '@storybook/client-logger': 8.1.11 - '@storybook/core-events': 8.1.11 - '@storybook/global': 5.0.0 - telejson: 7.2.0 - tiny-invariant: 1.3.3 - - '@storybook/client-logger@8.1.11': - dependencies: - '@storybook/global': 5.0.0 - - '@storybook/components@8.4.6(storybook@8.5.3(prettier@3.4.1))': - dependencies: - storybook: 8.5.3(prettier@3.4.1) - - '@storybook/core-events@8.1.11': - dependencies: - '@storybook/csf': 0.1.13 - ts-dedent: 2.2.0 - - '@storybook/core@8.5.3(prettier@3.4.1)': - dependencies: - '@storybook/csf': 0.1.12 - better-opn: 3.0.2 - browser-assert: 1.2.1 - esbuild: 0.25.3 - esbuild-register: 3.6.0(esbuild@0.25.3) - jsdoc-type-pratt-parser: 4.1.0 - process: 0.11.10 - recast: 0.23.9 - semver: 7.6.2 - util: 0.12.5 - ws: 8.18.0 - optionalDependencies: - prettier: 3.4.1 - transitivePeerDependencies: - - bufferutil - - supports-color - - utf-8-validate - - '@storybook/csf-plugin@8.4.6(storybook@8.5.3(prettier@3.4.1))': - dependencies: - storybook: 8.5.3(prettier@3.4.1) + storybook: 9.1.2(@testing-library/dom@10.4.0)(msw@2.4.8(typescript@5.6.3))(prettier@3.4.1)(vite@6.3.5(@types/node@20.17.16)(jiti@2.4.2)(yaml@2.7.0)) unplugin: 1.5.0 - '@storybook/csf@0.1.11': - dependencies: - type-fest: 2.19.0 - - '@storybook/csf@0.1.12': - dependencies: - type-fest: 2.19.0 - - '@storybook/csf@0.1.13': - dependencies: - type-fest: 2.19.0 - '@storybook/global@5.0.0': {} - '@storybook/icons@1.2.12(react-dom@18.3.1(react@18.3.1))(react@18.3.1)': + '@storybook/icons@1.4.0(react-dom@18.3.1(react@18.3.1))(react@18.3.1)': dependencies: react: 18.3.1 react-dom: 18.3.1(react@18.3.1) - '@storybook/instrumenter@8.4.6(storybook@8.5.3(prettier@3.4.1))': - dependencies: - '@storybook/global': 5.0.0 - '@vitest/utils': 2.1.8 - storybook: 8.5.3(prettier@3.4.1) - - '@storybook/instrumenter@8.5.3(storybook@8.5.3(prettier@3.4.1))': - dependencies: - '@storybook/global': 5.0.0 - '@vitest/utils': 2.1.8 - storybook: 8.5.3(prettier@3.4.1) - - '@storybook/manager-api@8.4.6(storybook@8.5.3(prettier@3.4.1))': - dependencies: - storybook: 8.5.3(prettier@3.4.1) - - '@storybook/preview-api@8.4.6(storybook@8.5.3(prettier@3.4.1))': - dependencies: - storybook: 8.5.3(prettier@3.4.1) - - '@storybook/preview-api@8.5.3(storybook@8.5.3(prettier@3.4.1))': - dependencies: - storybook: 8.5.3(prettier@3.4.1) - - '@storybook/react-dom-shim@8.4.6(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(storybook@8.5.3(prettier@3.4.1))': + '@storybook/react-dom-shim@9.1.2(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(storybook@9.1.2(@testing-library/dom@10.4.0)(msw@2.4.8(typescript@5.6.3))(prettier@3.4.1)(vite@6.3.5(@types/node@20.17.16)(jiti@2.4.2)(yaml@2.7.0)))': dependencies: react: 18.3.1 react-dom: 18.3.1(react@18.3.1) - storybook: 8.5.3(prettier@3.4.1) + storybook: 9.1.2(@testing-library/dom@10.4.0)(msw@2.4.8(typescript@5.6.3))(prettier@3.4.1)(vite@6.3.5(@types/node@20.17.16)(jiti@2.4.2)(yaml@2.7.0)) - '@storybook/react-vite@8.4.6(@storybook/test@8.4.6(storybook@8.5.3(prettier@3.4.1)))(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(rollup@4.40.1)(storybook@8.5.3(prettier@3.4.1))(typescript@5.6.3)(vite@6.3.5(@types/node@20.17.16)(jiti@2.4.2)(yaml@2.7.0))': + '@storybook/react-vite@9.1.2(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(rollup@4.40.1)(storybook@9.1.2(@testing-library/dom@10.4.0)(msw@2.4.8(typescript@5.6.3))(prettier@3.4.1)(vite@6.3.5(@types/node@20.17.16)(jiti@2.4.2)(yaml@2.7.0)))(typescript@5.6.3)(vite@6.3.5(@types/node@20.17.16)(jiti@2.4.2)(yaml@2.7.0))': dependencies: - '@joshwooding/vite-plugin-react-docgen-typescript': 0.4.2(typescript@5.6.3)(vite@6.3.5(@types/node@20.17.16)(jiti@2.4.2)(yaml@2.7.0)) + '@joshwooding/vite-plugin-react-docgen-typescript': 0.6.1(typescript@5.6.3)(vite@6.3.5(@types/node@20.17.16)(jiti@2.4.2)(yaml@2.7.0)) '@rollup/pluginutils': 5.0.5(rollup@4.40.1) - '@storybook/builder-vite': 8.4.6(storybook@8.5.3(prettier@3.4.1))(vite@6.3.5(@types/node@20.17.16)(jiti@2.4.2)(yaml@2.7.0)) - '@storybook/react': 8.4.6(@storybook/test@8.4.6(storybook@8.5.3(prettier@3.4.1)))(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(storybook@8.5.3(prettier@3.4.1))(typescript@5.6.3) - find-up: 5.0.0 - magic-string: 0.30.5 + '@storybook/builder-vite': 9.1.2(storybook@9.1.2(@testing-library/dom@10.4.0)(msw@2.4.8(typescript@5.6.3))(prettier@3.4.1)(vite@6.3.5(@types/node@20.17.16)(jiti@2.4.2)(yaml@2.7.0)))(vite@6.3.5(@types/node@20.17.16)(jiti@2.4.2)(yaml@2.7.0)) + '@storybook/react': 9.1.2(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(storybook@9.1.2(@testing-library/dom@10.4.0)(msw@2.4.8(typescript@5.6.3))(prettier@3.4.1)(vite@6.3.5(@types/node@20.17.16)(jiti@2.4.2)(yaml@2.7.0)))(typescript@5.6.3) + find-up: 7.0.0 + magic-string: 0.30.17 react: 18.3.1 - react-docgen: 7.0.3 + react-docgen: 8.0.0 react-dom: 18.3.1(react@18.3.1) - resolve: 1.22.8 - storybook: 8.5.3(prettier@3.4.1) + resolve: 1.22.10 + storybook: 9.1.2(@testing-library/dom@10.4.0)(msw@2.4.8(typescript@5.6.3))(prettier@3.4.1)(vite@6.3.5(@types/node@20.17.16)(jiti@2.4.2)(yaml@2.7.0)) tsconfig-paths: 4.2.0 vite: 6.3.5(@types/node@20.17.16)(jiti@2.4.2)(yaml@2.7.0) transitivePeerDependencies: - - '@storybook/test' - rollup - supports-color - typescript - '@storybook/react@8.4.6(@storybook/test@8.4.6(storybook@8.5.3(prettier@3.4.1)))(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(storybook@8.5.3(prettier@3.4.1))(typescript@5.6.3)': + '@storybook/react@9.1.2(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(storybook@9.1.2(@testing-library/dom@10.4.0)(msw@2.4.8(typescript@5.6.3))(prettier@3.4.1)(vite@6.3.5(@types/node@20.17.16)(jiti@2.4.2)(yaml@2.7.0)))(typescript@5.6.3)': dependencies: - '@storybook/components': 8.4.6(storybook@8.5.3(prettier@3.4.1)) '@storybook/global': 5.0.0 - '@storybook/manager-api': 8.4.6(storybook@8.5.3(prettier@3.4.1)) - '@storybook/preview-api': 8.4.6(storybook@8.5.3(prettier@3.4.1)) - '@storybook/react-dom-shim': 8.4.6(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(storybook@8.5.3(prettier@3.4.1)) - '@storybook/theming': 8.4.6(storybook@8.5.3(prettier@3.4.1)) + '@storybook/react-dom-shim': 9.1.2(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(storybook@9.1.2(@testing-library/dom@10.4.0)(msw@2.4.8(typescript@5.6.3))(prettier@3.4.1)(vite@6.3.5(@types/node@20.17.16)(jiti@2.4.2)(yaml@2.7.0))) react: 18.3.1 react-dom: 18.3.1(react@18.3.1) - storybook: 8.5.3(prettier@3.4.1) + storybook: 9.1.2(@testing-library/dom@10.4.0)(msw@2.4.8(typescript@5.6.3))(prettier@3.4.1)(vite@6.3.5(@types/node@20.17.16)(jiti@2.4.2)(yaml@2.7.0)) optionalDependencies: - '@storybook/test': 8.4.6(storybook@8.5.3(prettier@3.4.1)) typescript: 5.6.3 - '@storybook/test@8.4.6(storybook@8.5.3(prettier@3.4.1))': - dependencies: - '@storybook/csf': 0.1.11 - '@storybook/global': 5.0.0 - '@storybook/instrumenter': 8.4.6(storybook@8.5.3(prettier@3.4.1)) - '@testing-library/dom': 10.4.0 - '@testing-library/jest-dom': 6.5.0 - '@testing-library/user-event': 14.5.2(@testing-library/dom@10.4.0) - '@vitest/expect': 2.0.5 - '@vitest/spy': 2.0.5 - storybook: 8.5.3(prettier@3.4.1) - - '@storybook/test@8.5.3(storybook@8.5.3(prettier@3.4.1))': - dependencies: - '@storybook/csf': 0.1.12 - '@storybook/global': 5.0.0 - '@storybook/instrumenter': 8.5.3(storybook@8.5.3(prettier@3.4.1)) - '@testing-library/dom': 10.4.0 - '@testing-library/jest-dom': 6.5.0 - '@testing-library/user-event': 14.5.2(@testing-library/dom@10.4.0) - '@vitest/expect': 2.0.5 - '@vitest/spy': 2.0.5 - storybook: 8.5.3(prettier@3.4.1) - - '@storybook/theming@8.4.6(storybook@8.5.3(prettier@3.4.1))': - dependencies: - storybook: 8.5.3(prettier@3.4.1) - '@swc/core-darwin-arm64@1.3.38': optional: true @@ -8657,7 +8244,7 @@ snapshots: '@testing-library/dom@10.4.0': dependencies: - '@babel/code-frame': 7.26.2 + '@babel/code-frame': 7.27.1 '@babel/runtime': 7.26.10 '@types/aria-query': 5.0.3 aria-query: 5.3.0 @@ -8677,16 +8264,6 @@ snapshots: lz-string: 1.5.0 pretty-format: 27.5.1 - '@testing-library/jest-dom@6.5.0': - dependencies: - '@adobe/css-tools': 4.4.1 - aria-query: 5.3.2 - chalk: 3.0.0 - css.escape: 1.5.1 - dom-accessibility-api: 0.6.3 - lodash: 4.17.21 - redent: 3.0.0 - '@testing-library/jest-dom@6.6.3': dependencies: '@adobe/css-tools': 4.4.1 @@ -8705,10 +8282,6 @@ snapshots: react: 18.3.1 react-dom: 18.3.1(react@18.3.1) - '@testing-library/user-event@14.5.2(@testing-library/dom@10.4.0)': - dependencies: - '@testing-library/dom': 10.4.0 - '@testing-library/user-event@14.6.1(@testing-library/dom@10.4.0)': dependencies: '@testing-library/dom': 10.4.0 @@ -8755,6 +8328,10 @@ snapshots: '@types/connect': 3.4.35 '@types/node': 20.17.16 + '@types/chai@5.2.2': + dependencies: + '@types/deep-eql': 4.0.2 + '@types/chroma-js@2.4.0': {} '@types/color-convert@2.0.4': @@ -8797,6 +8374,8 @@ snapshots: dependencies: '@types/ms': 2.1.0 + '@types/deep-eql@4.0.2': {} + '@types/doctrine@0.0.9': {} '@types/estree-jsx@1.0.5': @@ -9025,37 +8604,36 @@ snapshots: transitivePeerDependencies: - supports-color - '@vitest/expect@2.0.5': - dependencies: - '@vitest/spy': 2.0.5 - '@vitest/utils': 2.0.5 - chai: 5.1.2 - tinyrainbow: 1.2.0 - - '@vitest/pretty-format@2.0.5': + '@vitest/expect@3.2.4': dependencies: - tinyrainbow: 1.2.0 + '@types/chai': 5.2.2 + '@vitest/spy': 3.2.4 + '@vitest/utils': 3.2.4 + chai: 5.2.1 + tinyrainbow: 2.0.0 - '@vitest/pretty-format@2.1.8': + '@vitest/mocker@3.2.4(msw@2.4.8(typescript@5.6.3))(vite@6.3.5(@types/node@20.17.16)(jiti@2.4.2)(yaml@2.7.0))': dependencies: - tinyrainbow: 1.2.0 + '@vitest/spy': 3.2.4 + estree-walker: 3.0.3 + magic-string: 0.30.17 + optionalDependencies: + msw: 2.4.8(typescript@5.6.3) + vite: 6.3.5(@types/node@20.17.16)(jiti@2.4.2)(yaml@2.7.0) - '@vitest/spy@2.0.5': + '@vitest/pretty-format@3.2.4': dependencies: - tinyspy: 3.0.2 + tinyrainbow: 2.0.0 - '@vitest/utils@2.0.5': + '@vitest/spy@3.2.4': dependencies: - '@vitest/pretty-format': 2.0.5 - estree-walker: 3.0.3 - loupe: 3.1.2 - tinyrainbow: 1.2.0 + tinyspy: 4.0.3 - '@vitest/utils@2.1.8': + '@vitest/utils@3.2.4': dependencies: - '@vitest/pretty-format': 2.1.8 - loupe: 3.1.3 - tinyrainbow: 1.2.0 + '@vitest/pretty-format': 3.2.4 + loupe: 3.2.0 + tinyrainbow: 2.0.0 '@xterm/addon-canvas@0.7.0(@xterm/xterm@5.5.0)': dependencies: @@ -9102,8 +8680,7 @@ snapshots: acorn@8.14.0: {} - acorn@8.14.1: - optional: true + acorn@8.14.1: {} agent-base@6.0.2: dependencies: @@ -9211,7 +8788,7 @@ snapshots: axios@1.8.2: dependencies: follow-redirects: 1.15.9 - form-data: 4.0.2 + form-data: 4.0.4 proxy-from-env: 1.1.0 transitivePeerDependencies: - debug @@ -9329,8 +8906,6 @@ snapshots: dependencies: fill-range: 7.1.1 - browser-assert@1.2.1: {} - browserslist@4.24.2: dependencies: caniuse-lite: 1.0.30001717 @@ -9400,12 +8975,12 @@ snapshots: ccount@2.0.1: {} - chai@5.1.2: + chai@5.2.1: dependencies: assertion-error: 2.0.1 check-error: 2.1.1 deep-eql: 5.0.2 - loupe: 3.1.2 + loupe: 3.2.0 pathval: 2.0.0 chalk@2.4.2: @@ -9462,6 +9037,8 @@ snapshots: chromatic@11.25.2: {} + chromatic@12.2.0: {} + ci-info@3.9.0: {} cjs-module-lexer@1.3.1: {} @@ -9548,6 +9125,8 @@ snapshots: cookie@0.7.2: {} + cookie@1.0.2: {} + core-util-is@1.0.3: {} cosmiconfig@7.1.0: @@ -9671,7 +9250,6 @@ snapshots: debug@4.4.1: dependencies: ms: 2.1.3 - optional: true decimal.js-light@2.5.1: {} @@ -9871,7 +9449,7 @@ snapshots: esbuild-register@3.6.0(esbuild@0.25.3): dependencies: - debug: 4.4.0 + debug: 4.4.1 esbuild: 0.25.3 transitivePeerDependencies: - supports-color @@ -10142,6 +9720,13 @@ snapshots: dependencies: locate-path: 6.0.0 path-exists: 4.0.0 + optional: true + + find-up@7.0.0: + dependencies: + locate-path: 7.2.0 + path-exists: 5.0.0 + unicorn-magic: 0.1.0 flat-cache@3.2.0: dependencies: @@ -10164,11 +9749,12 @@ snapshots: cross-spawn: 7.0.6 signal-exit: 4.1.0 - form-data@4.0.2: + form-data@4.0.4: dependencies: asynckit: 0.4.0 combined-stream: 1.0.8 es-set-tostringtag: 2.1.0 + hasown: 2.0.2 mime-types: 2.1.35 format@0.2.2: {} @@ -10303,10 +9889,6 @@ snapshots: dependencies: has-symbols: 1.1.0 - hasown@2.0.0: - dependencies: - function-bind: 1.1.2 - hasown@2.0.2: dependencies: function-bind: 1.1.2 @@ -10490,10 +10072,6 @@ snapshots: is-callable@1.2.7: {} - is-core-module@2.13.1: - dependencies: - hasown: 2.0.0 - is-core-module@2.16.1: dependencies: hasown: 2.0.2 @@ -10514,13 +10092,6 @@ snapshots: is-generator-fn@2.1.0: {} - is-generator-function@1.1.0: - dependencies: - call-bound: 1.0.3 - get-proto: 1.0.1 - has-tostringtag: 1.0.2 - safe-regex-test: 1.1.0 - is-glob@4.0.3: dependencies: is-extglob: 2.1.1 @@ -10553,13 +10124,6 @@ snapshots: call-bind: 1.0.7 has-tostringtag: 1.0.2 - is-regex@1.2.1: - dependencies: - call-bound: 1.0.3 - gopd: 1.2.0 - has-tostringtag: 1.0.2 - hasown: 2.0.2 - is-set@2.0.2: {} is-shared-array-buffer@1.0.2: @@ -11045,8 +10609,6 @@ snapshots: dependencies: argparse: 2.0.1 - jsdoc-type-pratt-parser@4.1.0: {} - jsdom@20.0.3: dependencies: abab: 2.0.6 @@ -11058,7 +10620,7 @@ snapshots: decimal.js: 10.4.3 domexception: 4.0.0 escodegen: 2.1.0 - form-data: 4.0.2 + form-data: 4.0.4 html-encoding-sniffer: 3.0.0 http-proxy-agent: 5.0.0 https-proxy-agent: 5.0.1 @@ -11159,6 +10721,11 @@ snapshots: locate-path@6.0.0: dependencies: p-locate: 5.0.0 + optional: true + + locate-path@7.2.0: + dependencies: + p-locate: 6.0.0 lodash-es@4.17.21: {} @@ -11183,9 +10750,7 @@ snapshots: dependencies: js-tokens: 4.0.0 - loupe@3.1.2: {} - - loupe@3.1.3: {} + loupe@3.2.0: {} lowlight@1.20.0: dependencies: @@ -11206,11 +10771,7 @@ snapshots: lz-string@1.5.0: {} - magic-string@0.27.0: - dependencies: - '@jridgewell/sourcemap-codec': 1.5.0 - - magic-string@0.30.5: + magic-string@0.30.17: dependencies: '@jridgewell/sourcemap-codec': 1.5.0 @@ -11225,8 +10786,6 @@ snapshots: dependencies: tmpl: 1.0.5 - map-or-similar@1.5.0: {} - markdown-table@3.0.3: {} material-colors@1.2.6: {} @@ -11423,10 +10982,6 @@ snapshots: memoize-one@5.2.1: {} - memoizerific@1.11.3: - dependencies: - map-or-similar: 1.5.0 - merge-descriptors@1.0.3: {} merge-stream@2.0.0: {} @@ -11782,7 +11337,7 @@ snapshots: mock-socket@9.3.1: {} - monaco-editor@0.52.0: {} + monaco-editor@0.52.2: {} moo-color@1.0.3: dependencies: @@ -11922,6 +11477,10 @@ snapshots: dependencies: yocto-queue: 0.1.0 + p-limit@4.0.0: + dependencies: + yocto-queue: 1.2.1 + p-locate@4.1.0: dependencies: p-limit: 2.3.0 @@ -11929,6 +11488,11 @@ snapshots: p-locate@5.0.0: dependencies: p-limit: 3.1.0 + optional: true + + p-locate@6.0.0: + dependencies: + p-limit: 4.0.0 p-try@2.2.0: {} @@ -11976,6 +11540,8 @@ snapshots: path-exists@4.0.0: {} + path-exists@5.0.0: {} + path-is-absolute@1.0.1: {} path-key@3.1.1: {} @@ -12019,10 +11585,6 @@ snapshots: optionalDependencies: fsevents: 2.3.2 - polished@4.3.1: - dependencies: - '@babel/runtime': 7.26.10 - possible-typed-array-names@1.0.0: {} postcss-import@15.1.0(postcss@8.5.1): @@ -12102,8 +11664,6 @@ snapshots: process-nextick-args@2.0.1: {} - process@0.11.10: {} - prompts@2.4.2: dependencies: kleur: 3.0.3 @@ -12201,17 +11761,17 @@ snapshots: dependencies: typescript: 5.6.3 - react-docgen@7.0.3: + react-docgen@8.0.0: dependencies: - '@babel/core': 7.26.0 - '@babel/traverse': 7.25.9 - '@babel/types': 7.26.0 + '@babel/core': 7.27.1 + '@babel/traverse': 7.27.1 + '@babel/types': 7.27.1 '@types/babel__core': 7.20.5 '@types/babel__traverse': 7.20.6 '@types/doctrine': 0.0.9 '@types/resolve': 1.20.4 doctrine: 3.0.0 - resolve: 1.22.8 + resolve: 1.22.10 strip-indent: 4.0.0 transitivePeerDependencies: - supports-color @@ -12304,17 +11864,13 @@ snapshots: react: 18.3.1 react-dom: 18.3.1(react@18.3.1) - react-router-dom@6.26.2(react-dom@18.3.1(react@18.3.1))(react@18.3.1): + react-router@7.8.0(react-dom@18.3.1(react@18.3.1))(react@18.3.1): dependencies: - '@remix-run/router': 1.19.2 + cookie: 1.0.2 react: 18.3.1 + set-cookie-parser: 2.7.1 + optionalDependencies: react-dom: 18.3.1(react@18.3.1) - react-router: 6.26.2(react@18.3.1) - - react-router@6.26.2(react@18.3.1): - dependencies: - '@remix-run/router': 1.19.2 - react: 18.3.1 react-smooth@4.0.4(react-dom@18.3.1(react@18.3.1))(react@18.3.1): dependencies: @@ -12507,12 +12063,6 @@ snapshots: path-parse: 1.0.7 supports-preserve-symlinks-flag: 1.0.0 - resolve@1.22.8: - dependencies: - is-core-module: 2.13.1 - path-parse: 1.0.7 - supports-preserve-symlinks-flag: 1.0.0 - restore-cursor@3.1.0: dependencies: onetime: 5.1.2 @@ -12572,12 +12122,6 @@ snapshots: safe-buffer@5.2.1: {} - safe-regex-test@1.1.0: - dependencies: - call-bound: 1.0.3 - es-errors: 1.3.0 - is-regex: 1.2.1 - safer-buffer@2.1.2: {} saxes@6.0.0: @@ -12617,6 +12161,8 @@ snapshots: transitivePeerDependencies: - supports-color + set-cookie-parser@2.7.1: {} + set-function-length@1.2.2: dependencies: define-data-property: 1.1.4 @@ -12723,31 +12269,40 @@ snapshots: dependencies: internal-slot: 1.0.6 - storybook-addon-remix-react-router@3.1.0(@storybook/blocks@8.4.6(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(storybook@8.5.3(prettier@3.4.1)))(@storybook/channels@8.1.11)(@storybook/components@8.4.6(storybook@8.5.3(prettier@3.4.1)))(@storybook/core-events@8.1.11)(@storybook/manager-api@8.4.6(storybook@8.5.3(prettier@3.4.1)))(@storybook/preview-api@8.5.3(storybook@8.5.3(prettier@3.4.1)))(@storybook/theming@8.4.6(storybook@8.5.3(prettier@3.4.1)))(react-dom@18.3.1(react@18.3.1))(react-router-dom@6.26.2(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react@18.3.1): + storybook-addon-remix-react-router@5.0.0(react-dom@18.3.1(react@18.3.1))(react-router@7.8.0(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react@18.3.1)(storybook@9.1.2(@testing-library/dom@10.4.0)(msw@2.4.8(typescript@5.6.3))(prettier@3.4.1)(vite@6.3.5(@types/node@20.17.16)(jiti@2.4.2)(yaml@2.7.0))): dependencies: - '@storybook/blocks': 8.4.6(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(storybook@8.5.3(prettier@3.4.1)) - '@storybook/channels': 8.1.11 - '@storybook/components': 8.4.6(storybook@8.5.3(prettier@3.4.1)) - '@storybook/core-events': 8.1.11 - '@storybook/manager-api': 8.4.6(storybook@8.5.3(prettier@3.4.1)) - '@storybook/preview-api': 8.5.3(storybook@8.5.3(prettier@3.4.1)) - '@storybook/theming': 8.4.6(storybook@8.5.3(prettier@3.4.1)) + '@mjackson/form-data-parser': 0.4.0 compare-versions: 6.1.0 react-inspector: 6.0.2(react@18.3.1) - react-router-dom: 6.26.2(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + react-router: 7.8.0(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + storybook: 9.1.2(@testing-library/dom@10.4.0)(msw@2.4.8(typescript@5.6.3))(prettier@3.4.1)(vite@6.3.5(@types/node@20.17.16)(jiti@2.4.2)(yaml@2.7.0)) optionalDependencies: react: 18.3.1 react-dom: 18.3.1(react@18.3.1) - storybook@8.5.3(prettier@3.4.1): + storybook@9.1.2(@testing-library/dom@10.4.0)(msw@2.4.8(typescript@5.6.3))(prettier@3.4.1)(vite@6.3.5(@types/node@20.17.16)(jiti@2.4.2)(yaml@2.7.0)): dependencies: - '@storybook/core': 8.5.3(prettier@3.4.1) + '@storybook/global': 5.0.0 + '@testing-library/jest-dom': 6.6.3 + '@testing-library/user-event': 14.6.1(@testing-library/dom@10.4.0) + '@vitest/expect': 3.2.4 + '@vitest/mocker': 3.2.4(msw@2.4.8(typescript@5.6.3))(vite@6.3.5(@types/node@20.17.16)(jiti@2.4.2)(yaml@2.7.0)) + '@vitest/spy': 3.2.4 + better-opn: 3.0.2 + esbuild: 0.25.3 + esbuild-register: 3.6.0(esbuild@0.25.3) + recast: 0.23.9 + semver: 7.6.2 + ws: 8.18.0 optionalDependencies: prettier: 3.4.1 transitivePeerDependencies: + - '@testing-library/dom' - bufferutil + - msw - supports-color - utf-8-validate + - vite strict-event-emitter@0.5.1: {} @@ -12874,10 +12429,6 @@ snapshots: tapable@2.2.1: {} - telejson@7.2.0: - dependencies: - memoizerific: 1.11.3 - test-exclude@6.0.0: dependencies: '@istanbuljs/schema': 0.1.3 @@ -12908,9 +12459,9 @@ snapshots: fdir: 6.4.4(picomatch@4.0.2) picomatch: 4.0.2 - tinyrainbow@1.2.0: {} + tinyrainbow@2.0.0: {} - tinyspy@3.0.2: {} + tinyspy@4.0.3: {} tmpl@1.0.5: {} @@ -13026,7 +12577,7 @@ snapshots: typescript@5.6.3: {} - tzdata@1.0.40: {} + tzdata@1.0.44: {} ua-parser-js@1.0.40: {} @@ -13038,6 +12589,8 @@ snapshots: undici@6.21.2: {} + unicorn-magic@0.1.0: {} + unicorn-magic@0.3.0: {} unified@11.0.4: @@ -13093,7 +12646,7 @@ snapshots: unplugin@1.5.0: dependencies: - acorn: 8.14.0 + acorn: 8.14.1 chokidar: 3.6.0 webpack-sources: 3.2.3 webpack-virtual-modules: 0.5.0 @@ -13168,14 +12721,6 @@ snapshots: util-deprecate@1.0.2: {} - util@0.12.5: - dependencies: - inherits: 2.0.4 - is-arguments: 1.2.0 - is-generator-function: 1.1.0 - is-typed-array: 1.1.15 - which-typed-array: 1.1.18 - utils-merge@1.0.1: {} uuid@9.0.1: {} @@ -13218,7 +12763,7 @@ snapshots: d3-time: 3.1.0 d3-timer: 3.0.1 - vite-plugin-checker@0.9.3(@biomejs/biome@1.9.4)(eslint@8.52.0)(optionator@0.9.3)(typescript@5.6.3)(vite@6.3.5(@types/node@20.17.16)(jiti@2.4.2)(yaml@2.7.0)): + vite-plugin-checker@0.9.3(@biomejs/biome@2.2.0)(eslint@8.52.0)(optionator@0.9.3)(typescript@5.6.3)(vite@6.3.5(@types/node@20.17.16)(jiti@2.4.2)(yaml@2.7.0)): dependencies: '@babel/code-frame': 7.27.1 chokidar: 4.0.3 @@ -13231,13 +12776,11 @@ snapshots: vite: 6.3.5(@types/node@20.17.16)(jiti@2.4.2)(yaml@2.7.0) vscode-uri: 3.1.0 optionalDependencies: - '@biomejs/biome': 1.9.4 + '@biomejs/biome': 2.2.0 eslint: 8.52.0 optionator: 0.9.3 typescript: 5.6.3 - vite-plugin-turbosnap@1.0.3: {} - vite@6.3.5(@types/node@20.17.16)(jiti@2.4.2)(yaml@2.7.0): dependencies: esbuild: 0.25.3 @@ -13373,6 +12916,8 @@ snapshots: yocto-queue@0.1.0: {} + yocto-queue@1.2.1: {} + yoctocolors-cjs@2.1.2: {} yup@1.6.1: diff --git a/site/site.go b/site/site.go index 682d21c695a88..e2a0d408e7f8d 100644 --- a/site/site.go +++ b/site/site.go @@ -448,7 +448,6 @@ func (h *Handler) renderHTMLWithState(r *http.Request, filePath string, state ht var user database.User var themePreference string var terminalFont string - var tasksTabVisible bool orgIDs := []uuid.UUID{} eg.Go(func() error { var err error @@ -484,20 +483,6 @@ func (h *Handler) renderHTMLWithState(r *http.Request, filePath string, state ht orgIDs = memberIDs[0].OrganizationIDs return err }) - eg.Go(func() error { - // If HideAITasks is true, force hide the tasks tab - if h.opts.HideAITasks { - tasksTabVisible = false - return nil - } - - hasAITask, err := h.opts.Database.HasTemplateVersionsWithAITask(ctx) - if err != nil { - return err - } - tasksTabVisible = hasAITask - return nil - }) err := eg.Wait() if err == nil { var wg sync.WaitGroup @@ -571,7 +556,7 @@ func (h *Handler) renderHTMLWithState(r *http.Request, filePath string, state ht wg.Add(1) go func() { defer wg.Done() - tasksTabVisible, err := json.Marshal(tasksTabVisible) + tasksTabVisible, err := json.Marshal(!h.opts.HideAITasks) if err == nil { state.TasksTabVisible = html.EscapeString(string(tasksTabVisible)) } diff --git a/site/src/@types/mui.d.ts b/site/src/@types/mui.d.ts index a1b4b61b07eb2..49804d33f8971 100644 --- a/site/src/@types/mui.d.ts +++ b/site/src/@types/mui.d.ts @@ -1,4 +1,4 @@ -// biome-ignore lint/nursery/noRestrictedImports: base theme types +// biome-ignore lint/style/noRestrictedImports: base theme types import type { PaletteColor, PaletteColorOptions } from "@mui/material/styles"; declare module "@mui/material/styles" { diff --git a/site/src/@types/storybook.d.ts b/site/src/@types/storybook.d.ts index 836728d170b9f..599324a291ae4 100644 --- a/site/src/@types/storybook.d.ts +++ b/site/src/@types/storybook.d.ts @@ -8,8 +8,9 @@ import type { } from "api/typesGenerated"; import type { Permissions } from "modules/permissions"; import type { QueryKey } from "react-query"; +import type { ReactRouterAddonStoryParameters } from "storybook-addon-remix-react-router"; -declare module "@storybook/react" { +declare module "@storybook/react-vite" { type WebSocketEvent = | { event: "message"; data: string } | { event: "error" | "close" }; @@ -24,5 +25,6 @@ declare module "@storybook/react" { permissions?: Partial; deploymentValues?: DeploymentValues; deploymentOptions?: SerpentOption[]; + reactRouter?: ReactRouterAddonStoryParameters; } } diff --git a/site/src/App.tsx b/site/src/App.tsx index e4e6d4a665996..2db41214a0423 100644 --- a/site/src/App.tsx +++ b/site/src/App.tsx @@ -9,10 +9,10 @@ import { } from "react"; import { HelmetProvider } from "react-helmet-async"; import { QueryClient, QueryClientProvider } from "react-query"; -import { RouterProvider } from "react-router-dom"; +import { RouterProvider } from "react-router"; import { GlobalSnackbar } from "./components/GlobalSnackbar/GlobalSnackbar"; -import { ThemeProvider } from "./contexts/ThemeProvider"; import { AuthProvider } from "./contexts/auth/AuthProvider"; +import { ThemeProvider } from "./contexts/ThemeProvider"; import { router } from "./router"; const defaultQueryClient = new QueryClient({ diff --git a/site/src/api/api.test.ts b/site/src/api/api.test.ts index 04536675f8943..8c4c8556d4423 100644 --- a/site/src/api/api.test.ts +++ b/site/src/api/api.test.ts @@ -8,7 +8,7 @@ import { MockWorkspaceBuild, MockWorkspaceBuildParameter1, } from "testHelpers/entities"; -import { API, MissingBuildParameters, getURLWithSearchParams } from "./api"; +import { API, getURLWithSearchParams, MissingBuildParameters } from "./api"; import type * as TypesGen from "./typesGenerated"; const axiosInstance = API.getAxiosInstance(); diff --git a/site/src/api/api.ts b/site/src/api/api.ts index cd70bfaf00600..d95d644ef7678 100644 --- a/site/src/api/api.ts +++ b/site/src/api/api.ts @@ -21,9 +21,10 @@ */ import globalAxios, { type AxiosInstance, isAxiosError } from "axios"; import type dayjs from "dayjs"; +import type { Task } from "modules/tasks/tasks"; import userAgentParser from "ua-parser-js"; -import { OneWayWebSocket } from "../utils/OneWayWebSocket"; import { delay } from "../utils/delay"; +import { OneWayWebSocket } from "../utils/OneWayWebSocket"; import { type FieldError, isApiError } from "./errors"; import type { DynamicParametersRequest, @@ -420,6 +421,12 @@ export type GetProvisionerDaemonsParams = { // Stringified JSON Object tags?: string; limit?: number; + // Include offline provisioner daemons? + offline?: boolean; +}; + +export type TasksFilter = { + username?: string; }; /** @@ -1187,9 +1194,9 @@ class ApiMethods { }; getWorkspaces = async ( - options: TypesGen.WorkspacesRequest, + req: TypesGen.WorkspacesRequest, ): Promise => { - const url = getURLWithSearchParams("/api/v2/workspaces", options); + const url = getURLWithSearchParams("/api/v2/workspaces", req); const response = await this.axios.get(url); return response.data; }; @@ -1222,7 +1229,7 @@ class ApiMethods { waitForBuild = (build: TypesGen.WorkspaceBuild) => { return new Promise((res, reject) => { void (async () => { - let latestJobInfo: TypesGen.ProvisionerJob | undefined = undefined; + let latestJobInfo: TypesGen.ProvisionerJob | undefined; while ( !["succeeded", "canceled"].some((status) => @@ -1896,6 +1903,13 @@ class ApiMethods { return response.data; }; + updateWorkspaceACL = async ( + workspaceId: string, + data: TypesGen.UpdateWorkspaceACL, + ): Promise => { + await this.axios.patch(`/api/v2/workspaces/${workspaceId}/acl`, data); + }; + getApplicationsHost = async (): Promise => { const response = await this.axios.get("/api/v2/applications/host"); return response.data; @@ -2015,6 +2029,16 @@ class ApiMethods { return response.data; }; + getWorkspaceAgentCredentials = async ( + workspaceID: string, + agentName: string, + ): Promise => { + const response = await this.axios.get( + `/api/v2/workspaces/${workspaceID}/external-agent/${agentName}/credentials`, + ); + return response.data; + }; + upsertWorkspaceAgentSharedPort = async ( workspaceID: string, req: TypesGen.UpsertWorkspaceAgentPortShareRequest, @@ -2658,6 +2682,38 @@ class ExperimentalApiMethods { return response.data; }; + + createTask = async ( + user: string, + req: TypesGen.CreateTaskRequest, + ): Promise => { + const response = await this.axios.post( + `/api/experimental/tasks/${user}`, + req, + ); + + return response.data; + }; + + getTasks = async (filter: TasksFilter): Promise => { + const queryExpressions = ["has-ai-task:true"]; + + if (filter.username) { + queryExpressions.push(`owner:${filter.username}`); + } + + const workspaces = await API.getWorkspaces({ + q: queryExpressions.join(" "), + }); + const prompts = await API.experimental.getAITasksPrompts( + workspaces.workspaces.map((workspace) => workspace.latest_build.id), + ); + + return workspaces.workspaces.map((workspace) => ({ + workspace, + prompt: prompts.prompts[workspace.latest_build.id], + })); + }; } // This is a hard coded CSRF token/cookie pair for local development. In prod, diff --git a/site/src/api/queries/templates.ts b/site/src/api/queries/templates.ts index 5135f2304426e..8c3b294f7fad8 100644 --- a/site/src/api/queries/templates.ts +++ b/site/src/api/queries/templates.ts @@ -48,21 +48,6 @@ export const templates = ( }; }; -const getTemplatesByOrganizationQueryKey = ( - organization: string, - options?: GetTemplatesOptions, -) => [organization, "templates", options?.deprecated]; - -const templatesByOrganization = ( - organization: string, - options: GetTemplatesOptions = {}, -) => { - return { - queryKey: getTemplatesByOrganizationQueryKey(organization, options), - queryFn: () => API.getTemplatesByOrganization(organization, options), - }; -}; - export const templateACL = (templateId: string) => { return { queryKey: ["templateAcl", templateId], @@ -121,9 +106,11 @@ export const templateExamples = () => { }; }; +export const templateVersionRoot: string = "templateVersion"; + export const templateVersion = (versionId: string) => { return { - queryKey: ["templateVersion", versionId], + queryKey: [templateVersionRoot, versionId], queryFn: () => API.getTemplateVersion(versionId), }; }; @@ -134,7 +121,7 @@ export const templateVersionByName = ( versionName: string, ) => { return { - queryKey: ["templateVersion", organizationId, templateName, versionName], + queryKey: [templateVersionRoot, organizationId, templateName, versionName], queryFn: () => API.getTemplateVersionByName(organizationId, templateName, versionName), }; @@ -153,7 +140,7 @@ export const templateVersions = (templateId: string) => { }; export const templateVersionVariablesKey = (versionId: string) => [ - "templateVersion", + templateVersionRoot, versionId, "variables", ]; @@ -216,7 +203,7 @@ export const templaceACLAvailable = ( }; const templateVersionExternalAuthKey = (versionId: string) => [ - "templateVersion", + templateVersionRoot, versionId, "externalAuth", ]; @@ -257,21 +244,21 @@ const createTemplateFn = async (options: CreateTemplateOptions) => { export const templateVersionLogs = (versionId: string) => { return { - queryKey: ["templateVersion", versionId, "logs"], + queryKey: [templateVersionRoot, versionId, "logs"], queryFn: () => API.getTemplateVersionLogs(versionId), }; }; export const richParameters = (versionId: string) => { return { - queryKey: ["templateVersion", versionId, "richParameters"], + queryKey: [templateVersionRoot, versionId, "richParameters"], queryFn: () => API.getTemplateVersionRichParameters(versionId), }; }; export const resources = (versionId: string) => { return { - queryKey: ["templateVersion", versionId, "resources"], + queryKey: [templateVersionRoot, versionId, "resources"], queryFn: () => API.getTemplateVersionResources(versionId), }; }; @@ -293,7 +280,7 @@ export const previousTemplateVersion = ( ) => { return { queryKey: [ - "templateVersion", + templateVersionRoot, organizationId, templateName, versionName, @@ -313,7 +300,7 @@ export const previousTemplateVersion = ( export const templateVersionPresets = (versionId: string) => { return { - queryKey: ["templateVersion", versionId, "presets"], + queryKey: [templateVersionRoot, versionId, "presets"], queryFn: () => API.getTemplateVersionPresets(versionId), }; }; @@ -323,7 +310,7 @@ const waitBuildToBeFinished = async ( onRequest?: (data: TemplateVersion) => void, ) => { let data: TemplateVersion; - let jobStatus: ProvisionerJobStatus | undefined = undefined; + let jobStatus: ProvisionerJobStatus | undefined; do { // When pending we want to poll more frequently await delay(jobStatus === "pending" ? 250 : 1000); diff --git a/site/src/api/queries/users.ts b/site/src/api/queries/users.ts index 4d87232ee698c..31a0302c94653 100644 --- a/site/src/api/queries/users.ts +++ b/site/src/api/queries/users.ts @@ -12,11 +12,12 @@ import type { UsersRequest, } from "api/typesGenerated"; import { - type MetadataState, defaultMetadataManager, + type MetadataState, } from "hooks/useEmbeddedMetadata"; import type { UsePaginatedQueryOptions } from "hooks/usePaginatedQuery"; import type { + MutationOptions, QueryClient, UseMutationOptions, UseQueryOptions, @@ -192,10 +193,15 @@ const loginFn = async ({ }; }; -export const logout = (queryClient: QueryClient) => { +export const logout = (queryClient: QueryClient): MutationOptions => { return { mutationFn: API.logout, - onSuccess: () => { + // We're doing this cleanup in `onSettled` instead of `onSuccess` because in the case where an oAuth refresh token has expired this endpoint will return a 401 instead of 200. + onSettled: (_, error) => { + if (error) { + console.error(error); + } + /** * 2024-05-02 - If we persist any form of user data after the user logs * out, that will continue to seed the React Query cache, creating @@ -210,6 +216,14 @@ export const logout = (queryClient: QueryClient) => { * Deleting the user data will mean that all future requests have to take * a full roundtrip, but this still felt like the best way to ensure that * manually logging out doesn't blow the entire app up. + * + * 2025-08-20 - Since this endpoint is for performing a post logout clean up + * on the backend we should move this local clean up outside of the mutation + * so that it can be explicitly performed even in cases where we don't want + * run the clean up (e.g. when a user is unauthorized). Unfortunately our + * auth logic is too tangled up with some obscured React Query behaviors to + * be able to move right now. After `AuthProvider.tsx` is refactored this + * should be moved. */ defaultMetadataManager.clearMetadataByKey("user"); queryClient.removeQueries(); diff --git a/site/src/api/queries/workspaces.ts b/site/src/api/queries/workspaces.ts index 05fb09314d741..1c3e82a8816c2 100644 --- a/site/src/api/queries/workspaces.ts +++ b/site/src/api/queries/workspaces.ts @@ -3,6 +3,7 @@ import { DetailedError, isApiValidationError } from "api/errors"; import type { CreateWorkspaceRequest, ProvisionerLogLevel, + UpdateWorkspaceACL, UsageAppName, Workspace, WorkspaceAgentLog, @@ -138,15 +139,14 @@ async function findMatchWorkspace(q: string): Promise { } } -function workspacesKey(config: WorkspacesRequest = {}) { - const { q, limit } = config; - return ["workspaces", { q, limit }] as const; +function workspacesKey(req: WorkspacesRequest = {}) { + return ["workspaces", req] as const; } -export function workspaces(config: WorkspacesRequest = {}) { +export function workspaces(req: WorkspacesRequest = {}) { return { - queryKey: workspacesKey(config), - queryFn: () => API.getWorkspaces(config), + queryKey: workspacesKey(req), + queryFn: () => API.getWorkspaces(req), } as const satisfies QueryOptions; } @@ -421,3 +421,21 @@ export const workspacePermissions = (workspace?: Workspace) => { staleTime: Number.POSITIVE_INFINITY, }; }; + +export const updateWorkspaceACL = (workspaceId: string) => { + return { + mutationFn: async (patch: UpdateWorkspaceACL) => { + await API.updateWorkspaceACL(workspaceId, patch); + }, + }; +}; + +export const workspaceAgentCredentials = ( + workspaceId: string, + agentName: string, +) => { + return { + queryKey: ["workspaces", workspaceId, "agents", agentName, "credentials"], + queryFn: () => API.getWorkspaceAgentCredentials(workspaceId, agentName), + }; +}; diff --git a/site/src/api/rbacresourcesGenerated.ts b/site/src/api/rbacresourcesGenerated.ts index 5d632d57fad95..145b9ff9f8d7f 100644 --- a/site/src/api/rbacresourcesGenerated.ts +++ b/site/src/api/rbacresourcesGenerated.ts @@ -159,6 +159,11 @@ export const RBACResourceActions: Partial< use: "use the template to initially create a workspace, then workspace lifecycle permissions take over", view_insights: "view insights", }, + usage_event: { + create: "create a usage event", + read: "read usage events", + update: "update usage events", + }, user: { create: "create a new user", delete: "delete an existing user", @@ -167,6 +172,12 @@ export const RBACResourceActions: Partial< update: "update an existing user", update_personal: "update personal data", }, + user_secret: { + create: "create a user secret", + delete: "delete a user secret", + read: "read user secret metadata and value", + update: "update user secret metadata and value", + }, webpush_subscription: { create: "create webpush subscriptions", delete: "delete webpush subscriptions", diff --git a/site/src/api/typesGenerated.ts b/site/src/api/typesGenerated.ts index 412fb1e7f0a8c..58167d7d27df0 100644 --- a/site/src/api/typesGenerated.ts +++ b/site/src/api/typesGenerated.ts @@ -307,6 +307,11 @@ export const BypassRatelimitHeader = "X-Coder-Bypass-Ratelimit"; // From codersdk/client.go export const CLITelemetryHeader = "Coder-CLI-Telemetry"; +// From codersdk/cors_behavior.go +export type CORSBehavior = "passthru" | "simple"; + +export const CORSBehaviors: CORSBehavior[] = ["passthru", "simple"]; + // From codersdk/workspacebuilds.go export interface CancelWorkspaceBuildParams { readonly expect_status?: CancelWorkspaceBuildStatus; @@ -471,6 +476,13 @@ export interface CreateProvisionerKeyResponse { readonly key: string; } +// From codersdk/aitasks.go +export interface CreateTaskRequest { + readonly template_version_id: string; + readonly template_version_preset_id?: string; + readonly prompt: string; +} + // From codersdk/organizations.go export interface CreateTemplateRequest { readonly name: string; @@ -492,6 +504,7 @@ export interface CreateTemplateRequest { readonly require_active_version: boolean; readonly max_port_share_level: WorkspaceAgentPortShareLevel | null; readonly template_use_classic_parameter_flow?: boolean; + readonly cors_behavior: CORSBehavior | null; } // From codersdk/templateversions.go @@ -911,6 +924,7 @@ export type Experiment = | "notifications" | "oauth2" | "web-push" + | "workspace-sharing" | "workspace-usage"; export const Experiments: Experiment[] = [ @@ -920,9 +934,16 @@ export const Experiments: Experiment[] = [ "notifications", "oauth2", "web-push", + "workspace-sharing", "workspace-usage", ]; +// From codersdk/workspaces.go +export interface ExternalAgentCredentials { + readonly command: string; + readonly agent_token: string; +} + // From codersdk/externalauth.go export interface ExternalAuth { readonly authenticated: boolean; @@ -1036,6 +1057,7 @@ export type FeatureName = | "user_limit" | "user_role_management" | "workspace_batch_actions" + | "workspace_external_agent" | "workspace_prebuilds" | "workspace_proxy"; @@ -1059,6 +1081,7 @@ export const FeatureNames: FeatureName[] = [ "user_limit", "user_role_management", "workspace_batch_actions", + "workspace_external_agent", "workspace_prebuilds", "workspace_proxy", ]; @@ -1817,6 +1840,9 @@ export interface OrganizationMemberWithUserData extends OrganizationMember { // From codersdk/organizations.go export interface OrganizationProvisionerDaemonsOptions { readonly Limit: number; + readonly Offline: boolean; + readonly Status: readonly ProvisionerDaemonStatus[]; + readonly MaxAge: number; readonly IDs: readonly string[]; readonly Tags: Record; } @@ -1998,6 +2024,8 @@ export interface Preset { readonly Parameters: readonly PresetParameter[]; readonly Default: boolean; readonly DesiredPrebuildInstances: number | null; + readonly Description: string; + readonly Icon: string; } // From codersdk/presets.go @@ -2148,6 +2176,7 @@ export interface ProvisionerJob { readonly type: ProvisionerJobType; readonly available_workers?: readonly string[]; readonly metadata: ProvisionerJobMetadata; + readonly logs_overflowed: boolean; } // From codersdk/provisionerdaemons.go @@ -2374,7 +2403,9 @@ export type RBACResource = | "system" | "tailnet_coordinator" | "template" + | "usage_event" | "user" + | "user_secret" | "webpush_subscription" | "*" | "workspace" @@ -2414,7 +2445,9 @@ export const RBACResources: RBACResource[] = [ "system", "tailnet_coordinator", "template", + "usage_event", "user", + "user_secret", "webpush_subscription", "*", "workspace", @@ -2774,6 +2807,44 @@ export interface TailDERPRegion { readonly Nodes: readonly TailDERPNode[]; } +// From codersdk/aitasks.go +export interface Task { + readonly id: string; + readonly organization_id: string; + readonly owner_id: string; + readonly name: string; + readonly template_id: string; + readonly workspace_id: string | null; + readonly initial_prompt: string; + readonly status: WorkspaceStatus; + readonly current_state: TaskStateEntry | null; + readonly created_at: string; + readonly updated_at: string; +} + +// From codersdk/aitasks.go +export type TaskState = "completed" | "failed" | "idle" | "working"; + +// From codersdk/aitasks.go +export interface TaskStateEntry { + readonly timestamp: string; + readonly state: TaskState; + readonly message: string; + readonly uri: string; +} + +export const TaskStates: TaskState[] = [ + "completed", + "failed", + "idle", + "working", +]; + +// From codersdk/aitasks.go +export interface TasksFilter { + readonly owner?: string; +} + // From codersdk/deployment.go export interface TelemetryConfig { readonly enable: boolean; @@ -2814,6 +2885,7 @@ export interface Template { readonly time_til_dormant_autodelete_ms: number; readonly require_active_version: boolean; readonly max_port_share_level: WorkspaceAgentPortShareLevel; + readonly cors_behavior: CORSBehavior; readonly use_classic_parameter_flow: boolean; } @@ -2976,6 +3048,7 @@ export interface TemplateVersion { readonly archived: boolean; readonly warnings?: readonly TemplateVersionWarning[]; readonly matched_provisioners?: MatchedProvisioners; + readonly has_external_agent: boolean; } // From codersdk/templateversions.go @@ -3186,6 +3259,7 @@ export interface UpdateTemplateMeta { readonly deprecation_message?: string; readonly disable_everyone_group_access: boolean; readonly max_port_share_level?: WorkspaceAgentPortShareLevel; + readonly cors_behavior?: CORSBehavior; readonly use_classic_parameter_flow?: boolean; } @@ -3217,6 +3291,12 @@ export interface UpdateUserQuietHoursScheduleRequest { readonly schedule: string; } +// From codersdk/workspaces.go +export interface UpdateWorkspaceACL { + readonly user_roles?: Record; + readonly group_roles?: Record; +} + // From codersdk/workspaces.go export interface UpdateWorkspaceAutomaticUpdatesRequest { readonly automatic_updates: AutomaticUpdates; @@ -3845,6 +3925,7 @@ export interface WorkspaceBuild { readonly template_version_preset_id: string | null; readonly has_ai_task?: boolean; readonly ai_task_sidebar_app_id?: string; + readonly has_external_agent?: boolean; } // From codersdk/workspacebuilds.go @@ -3957,6 +4038,11 @@ export interface WorkspaceResourceMetadata { readonly sensitive: boolean; } +// From codersdk/workspaces.go +export type WorkspaceRole = "admin" | "" | "use"; + +export const WorkspaceRoles: WorkspaceRole[] = ["admin", "", "use"]; + // From codersdk/workspacebuilds.go export type WorkspaceStatus = | "canceled" diff --git a/site/src/components/Abbr/Abbr.stories.tsx b/site/src/components/Abbr/Abbr.stories.tsx index 6720b90fffda5..7d079e4ac7416 100644 --- a/site/src/components/Abbr/Abbr.stories.tsx +++ b/site/src/components/Abbr/Abbr.stories.tsx @@ -1,4 +1,4 @@ -import type { Meta, StoryObj } from "@storybook/react"; +import type { Meta, StoryObj } from "@storybook/react-vite"; import { Abbr } from "./Abbr"; const meta: Meta = { @@ -6,10 +6,10 @@ const meta: Meta = { component: Abbr, decorators: [ (Story) => ( - <> +

Try the following text out in a screen reader!

- +
), ], }; @@ -25,10 +25,10 @@ export const InlinedShorthand: Story = { }, decorators: [ (Story) => ( -

+

The physical pain of getting bonked on the head with a cartoon mallet - lasts precisely 593{" "} - + lasts precisely 593 + . The emotional turmoil and complete embarrassment lasts forever. @@ -45,7 +45,7 @@ export const Acronym: Story = { }, decorators: [ (Story) => ( - + ), @@ -60,16 +60,9 @@ export const Initialism: Story = { }, decorators: [ (Story) => ( - + ), ], }; - -const styles = { - // Just here to make the abbreviated part more obvious in the component library - underlined: { - textDecoration: "underline dotted", - }, -}; diff --git a/site/src/components/Abbr/Abbr.test.tsx b/site/src/components/Abbr/Abbr.test.tsx index 3ae76f071bdfb..b67406299685a 100644 --- a/site/src/components/Abbr/Abbr.test.tsx +++ b/site/src/components/Abbr/Abbr.test.tsx @@ -1,5 +1,5 @@ import { render, screen } from "@testing-library/react"; -import { Abbr, type Pronunciation } from "./Abbr"; +import { Abbr } from "./Abbr"; type AbbreviationData = { abbreviation: string; @@ -7,28 +7,8 @@ type AbbreviationData = { expectedLabel: string; }; -type AssertionInput = AbbreviationData & { - pronunciation: Pronunciation; -}; - -function assertAccessibleLabel({ - abbreviation, - title, - expectedLabel, - pronunciation, -}: AssertionInput) { - const { unmount } = render( - - {abbreviation} - , - ); - - screen.getByLabelText(expectedLabel, { selector: "abbr" }); - unmount(); -} - describe(Abbr.name, () => { - it("Has an aria-label that equals the title if the abbreviation is shorthand", () => { + it("Omits abbreviation from screen-reader output if it is shorthand", () => { const sampleShorthands: AbbreviationData[] = [ { abbreviation: "ms", @@ -43,11 +23,22 @@ describe(Abbr.name, () => { ]; for (const shorthand of sampleShorthands) { - assertAccessibleLabel({ ...shorthand, pronunciation: "shorthand" }); + const { unmount } = render( + + {shorthand.abbreviation} + , + ); + + // The element doesn't have any ARIA role semantics baked in, + // so we have to get a little bit more creative with making sure the + // expected content is on screen in an accessible way + const element = screen.getByTitle(shorthand.title); + expect(element).toHaveTextContent(shorthand.expectedLabel); + unmount(); } }); - it("Has an aria label with title and 'flattened' pronunciation if abbreviation is acronym", () => { + it("Adds title and 'flattened' pronunciation if abbreviation is acronym", () => { const sampleAcronyms: AbbreviationData[] = [ { abbreviation: "NASA", @@ -67,11 +58,19 @@ describe(Abbr.name, () => { ]; for (const acronym of sampleAcronyms) { - assertAccessibleLabel({ ...acronym, pronunciation: "acronym" }); + const { unmount } = render( + + {acronym.abbreviation} + , + ); + + const element = screen.getByTitle(acronym.title); + expect(element).toHaveTextContent(acronym.expectedLabel); + unmount(); } }); - it("Has an aria label with title and initialized pronunciation if abbreviation is initialism", () => { + it("Adds title and initialized pronunciation if abbreviation is initialism", () => { const sampleInitialisms: AbbreviationData[] = [ { abbreviation: "FBI", @@ -91,7 +90,15 @@ describe(Abbr.name, () => { ]; for (const initialism of sampleInitialisms) { - assertAccessibleLabel({ ...initialism, pronunciation: "initialism" }); + const { unmount } = render( + + {initialism.abbreviation} + , + ); + + const element = screen.getByTitle(initialism.title); + expect(element).toHaveTextContent(initialism.expectedLabel); + unmount(); } }); }); diff --git a/site/src/components/Abbr/Abbr.tsx b/site/src/components/Abbr/Abbr.tsx index c41f68e08117f..0c08c33e111ce 100644 --- a/site/src/components/Abbr/Abbr.tsx +++ b/site/src/components/Abbr/Abbr.tsx @@ -1,11 +1,13 @@ import type { FC, HTMLAttributes } from "react"; +import { cn } from "utils/cn"; -export type Pronunciation = "shorthand" | "acronym" | "initialism"; +type Pronunciation = "shorthand" | "acronym" | "initialism"; type AbbrProps = HTMLAttributes & { children: string; title: string; pronunciation?: Pronunciation; + className?: string; }; /** @@ -21,21 +23,26 @@ export const Abbr: FC = ({ children, title, pronunciation = "shorthand", + className, ...delegatedProps }) => { return ( {children} + + {getAccessibleLabel(children, title, pronunciation)} + ); }; diff --git a/site/src/components/ActiveUserChart/ActiveUserChart.stories.tsx b/site/src/components/ActiveUserChart/ActiveUserChart.stories.tsx index f4961f0cedba8..c63f867eb0de8 100644 --- a/site/src/components/ActiveUserChart/ActiveUserChart.stories.tsx +++ b/site/src/components/ActiveUserChart/ActiveUserChart.stories.tsx @@ -1,4 +1,4 @@ -import type { Meta, StoryObj } from "@storybook/react"; +import type { Meta, StoryObj } from "@storybook/react-vite"; import { ActiveUserChart } from "./ActiveUserChart"; const meta: Meta = { @@ -6,13 +6,13 @@ const meta: Meta = { component: ActiveUserChart, args: { data: [ - { date: "2024-01-01", amount: 5 }, - { date: "2024-01-02", amount: 6 }, - { date: "2024-01-03", amount: 7 }, - { date: "2024-01-04", amount: 8 }, - { date: "2024-01-05", amount: 9 }, - { date: "2024-01-06", amount: 10 }, - { date: "2024-01-07", amount: 11 }, + { date: "2024-01-01", amount: 12 }, + { date: "2024-01-02", amount: 8 }, + { date: "2024-01-03", amount: 15 }, + { date: "2024-01-04", amount: 3 }, + { date: "2024-01-05", amount: 22 }, + { date: "2024-01-06", amount: 7 }, + { date: "2024-01-07", amount: 18 }, ], }, decorators: [ @@ -31,12 +31,31 @@ export const Example: Story = {}; export const ManyDataPoints: Story = { args: { - data: Array.from({ length: 30 }).map((_, i) => { - const date = new Date(2024, 0, i + 1); - return { - date: date.toISOString().split("T")[0], - amount: 5 + Math.floor(Math.random() * 15), - }; - }), + data: [ + { date: "2024-01-01", amount: 12 }, + { date: "2024-01-02", amount: 8 }, + { date: "2024-01-03", amount: 15 }, + { date: "2024-01-04", amount: 3 }, + { date: "2024-01-05", amount: 22 }, + { date: "2024-01-06", amount: 7 }, + { date: "2024-01-07", amount: 18 }, + { date: "2024-01-08", amount: 31 }, + { date: "2024-01-09", amount: 5 }, + { date: "2024-01-10", amount: 27 }, + { date: "2024-01-11", amount: 14 }, + { date: "2024-01-12", amount: 9 }, + { date: "2024-01-13", amount: 35 }, + { date: "2024-01-14", amount: 21 }, + { date: "2024-01-15", amount: 6 }, + { date: "2024-01-16", amount: 29 }, + { date: "2024-01-17", amount: 11 }, + { date: "2024-01-18", amount: 17 }, + { date: "2024-01-19", amount: 4 }, + { date: "2024-01-20", amount: 25 }, + { date: "2024-01-21", amount: 13 }, + { date: "2024-01-22", amount: 33 }, + { date: "2024-01-23", amount: 19 }, + { date: "2024-01-24", amount: 26 }, + ], }, }; diff --git a/site/src/components/ActiveUserChart/ActiveUserChart.tsx b/site/src/components/ActiveUserChart/ActiveUserChart.tsx index 084ed7b16559f..ef55e06d568a4 100644 --- a/site/src/components/ActiveUserChart/ActiveUserChart.tsx +++ b/site/src/components/ActiveUserChart/ActiveUserChart.tsx @@ -68,7 +68,7 @@ export const ActiveUserChart: FC = ({ data }) => { const item = p[0]; return `${item.value} active users`; }} - formatter={(v, n, item) => { + formatter={(_v, _n, item) => { const date = new Date(item.payload.date); return date.toLocaleString(undefined, { month: "long", @@ -113,7 +113,7 @@ type ActiveUsersTitleProps = { export const ActiveUsersTitle: FC = ({ interval }) => { return ( -

+
{interval === "day" ? "Daily" : "Weekly"} Active Users diff --git a/site/src/components/Alert/Alert.stories.tsx b/site/src/components/Alert/Alert.stories.tsx index a170f0b29d244..e122c0c07c5a6 100644 --- a/site/src/components/Alert/Alert.stories.tsx +++ b/site/src/components/Alert/Alert.stories.tsx @@ -1,4 +1,4 @@ -import type { Meta, StoryObj } from "@storybook/react"; +import type { Meta, StoryObj } from "@storybook/react-vite"; import { Button } from "components/Button/Button"; import { Alert } from "./Alert"; diff --git a/site/src/components/Alert/Alert.tsx b/site/src/components/Alert/Alert.tsx index e97b690f82833..1cbf36b2df1d2 100644 --- a/site/src/components/Alert/Alert.tsx +++ b/site/src/components/Alert/Alert.tsx @@ -1,7 +1,7 @@ import MuiAlert, { type AlertColor as MuiAlertColor, type AlertProps as MuiAlertProps, - // biome-ignore lint/nursery/noRestrictedImports: Used as base component + // biome-ignore lint/style/noRestrictedImports: Used as base component } from "@mui/material/Alert"; import Collapse from "@mui/material/Collapse"; import { Button } from "components/Button/Button"; diff --git a/site/src/components/Alert/ErrorAlert.stories.tsx b/site/src/components/Alert/ErrorAlert.stories.tsx index e62314c622cc6..28120dd1054d1 100644 --- a/site/src/components/Alert/ErrorAlert.stories.tsx +++ b/site/src/components/Alert/ErrorAlert.stories.tsx @@ -1,6 +1,6 @@ -import type { Meta, StoryObj } from "@storybook/react"; -import { Button } from "components/Button/Button"; import { mockApiError } from "testHelpers/entities"; +import type { Meta, StoryObj } from "@storybook/react-vite"; +import { Button } from "components/Button/Button"; import { ErrorAlert } from "./ErrorAlert"; const mockError = mockApiError({ diff --git a/site/src/components/Avatar/Avatar.stories.tsx b/site/src/components/Avatar/Avatar.stories.tsx index 55deeb9073dbe..256da41bfd645 100644 --- a/site/src/components/Avatar/Avatar.stories.tsx +++ b/site/src/components/Avatar/Avatar.stories.tsx @@ -1,4 +1,4 @@ -import type { Meta, StoryObj } from "@storybook/react"; +import type { Meta, StoryObj } from "@storybook/react-vite"; import { Avatar } from "./Avatar"; const meta: Meta = { diff --git a/site/src/components/Avatar/Avatar.tsx b/site/src/components/Avatar/Avatar.tsx index 8661dceda0f6a..3b9de3657d623 100644 --- a/site/src/components/Avatar/Avatar.tsx +++ b/site/src/components/Avatar/Avatar.tsx @@ -12,7 +12,7 @@ import { useTheme } from "@emotion/react"; import * as AvatarPrimitive from "@radix-ui/react-avatar"; -import { type VariantProps, cva } from "class-variance-authority"; +import { cva, type VariantProps } from "class-variance-authority"; import * as React from "react"; import { getExternalImageStylesFromUrl } from "theme/externalImages"; import { cn } from "utils/cn"; diff --git a/site/src/components/Avatar/AvatarCard.stories.tsx b/site/src/components/Avatar/AvatarCard.stories.tsx index cc8fb56e16c05..b067877e3c8dc 100644 --- a/site/src/components/Avatar/AvatarCard.stories.tsx +++ b/site/src/components/Avatar/AvatarCard.stories.tsx @@ -1,4 +1,4 @@ -import type { Meta, StoryObj } from "@storybook/react"; +import type { Meta, StoryObj } from "@storybook/react-vite"; import { AvatarCard } from "./AvatarCard"; const meta: Meta = { diff --git a/site/src/components/Avatar/AvatarData.stories.tsx b/site/src/components/Avatar/AvatarData.stories.tsx index 53fc4d8f17555..22f8cb45d7699 100644 --- a/site/src/components/Avatar/AvatarData.stories.tsx +++ b/site/src/components/Avatar/AvatarData.stories.tsx @@ -1,4 +1,4 @@ -import type { Meta, StoryObj } from "@storybook/react"; +import type { Meta, StoryObj } from "@storybook/react-vite"; import { AvatarData } from "./AvatarData"; const meta: Meta = { diff --git a/site/src/components/Avatar/AvatarDataSkeleton.stories.tsx b/site/src/components/Avatar/AvatarDataSkeleton.stories.tsx index 0df5ca083b98b..99b19a47657d6 100644 --- a/site/src/components/Avatar/AvatarDataSkeleton.stories.tsx +++ b/site/src/components/Avatar/AvatarDataSkeleton.stories.tsx @@ -1,4 +1,4 @@ -import type { Meta, StoryObj } from "@storybook/react"; +import type { Meta, StoryObj } from "@storybook/react-vite"; import { AvatarDataSkeleton } from "./AvatarDataSkeleton"; const meta: Meta = { diff --git a/site/src/components/Badge/Badge.stories.tsx b/site/src/components/Badge/Badge.stories.tsx index 7d900b49ff6f6..524d0e3642588 100644 --- a/site/src/components/Badge/Badge.stories.tsx +++ b/site/src/components/Badge/Badge.stories.tsx @@ -1,4 +1,4 @@ -import type { Meta, StoryObj } from "@storybook/react"; +import type { Meta, StoryObj } from "@storybook/react-vite"; import { Settings, TriangleAlert } from "lucide-react"; import { Badge } from "./Badge"; diff --git a/site/src/components/Badge/Badge.tsx b/site/src/components/Badge/Badge.tsx index 0d11c96d30433..c3d0b27475bf2 100644 --- a/site/src/components/Badge/Badge.tsx +++ b/site/src/components/Badge/Badge.tsx @@ -3,7 +3,7 @@ * @see {@link https://ui.shadcn.com/docs/components/badge} */ import { Slot } from "@radix-ui/react-slot"; -import { type VariantProps, cva } from "class-variance-authority"; +import { cva, type VariantProps } from "class-variance-authority"; import { forwardRef } from "react"; import { cn } from "utils/cn"; @@ -24,6 +24,7 @@ const badgeVariants = cva( "border border-solid border-border-destructive bg-surface-red text-highlight-red shadow", green: "border border-solid border-surface-green bg-surface-green text-highlight-green shadow", + info: "border border-solid border-surface-sky bg-surface-sky text-highlight-sky shadow", }, size: { xs: "text-2xs font-regular h-5 [&_svg]:hidden rounded px-1.5", diff --git a/site/src/components/Badges/Badges.stories.tsx b/site/src/components/Badges/Badges.stories.tsx index 2a0a60498f487..36c8fddb37ea9 100644 --- a/site/src/components/Badges/Badges.stories.tsx +++ b/site/src/components/Badges/Badges.stories.tsx @@ -1,4 +1,4 @@ -import type { Meta, StoryObj } from "@storybook/react"; +import type { Meta, StoryObj } from "@storybook/react-vite"; import { AlphaBadge, Badges, diff --git a/site/src/components/Badges/Badges.tsx b/site/src/components/Badges/Badges.tsx index 278eb890cd2ee..f0db2fb0e9fbc 100644 --- a/site/src/components/Badges/Badges.tsx +++ b/site/src/components/Badges/Badges.tsx @@ -3,9 +3,9 @@ import Tooltip from "@mui/material/Tooltip"; import { Stack } from "components/Stack/Stack"; import { type FC, + forwardRef, type HTMLAttributes, type PropsWithChildren, - forwardRef, } from "react"; const styles = { diff --git a/site/src/components/Breadcrumb/Breadcrumb.stories.tsx b/site/src/components/Breadcrumb/Breadcrumb.stories.tsx index 0b02b2ebb9939..bc14950462d9a 100644 --- a/site/src/components/Breadcrumb/Breadcrumb.stories.tsx +++ b/site/src/components/Breadcrumb/Breadcrumb.stories.tsx @@ -1,4 +1,5 @@ -import type { Meta, StoryObj } from "@storybook/react"; +import { MockOrganization } from "testHelpers/entities"; +import type { Meta, StoryObj } from "@storybook/react-vite"; import { Breadcrumb, BreadcrumbEllipsis, @@ -8,7 +9,6 @@ import { BreadcrumbPage, BreadcrumbSeparator, } from "components/Breadcrumb/Breadcrumb"; -import { MockOrganization } from "testHelpers/entities"; const meta: Meta = { title: "components/Breadcrumb", diff --git a/site/src/components/Breadcrumb/Breadcrumb.tsx b/site/src/components/Breadcrumb/Breadcrumb.tsx index 35f90d30a5d7b..61d06c3755542 100644 --- a/site/src/components/Breadcrumb/Breadcrumb.tsx +++ b/site/src/components/Breadcrumb/Breadcrumb.tsx @@ -8,8 +8,8 @@ import { type ComponentProps, type ComponentPropsWithoutRef, type FC, - type ReactNode, forwardRef, + type ReactNode, } from "react"; import { cn } from "utils/cn"; diff --git a/site/src/components/BuildIcon/BuildIcon.stories.tsx b/site/src/components/BuildIcon/BuildIcon.stories.tsx index b2f01ad5ae38b..22481719bb4b8 100644 --- a/site/src/components/BuildIcon/BuildIcon.stories.tsx +++ b/site/src/components/BuildIcon/BuildIcon.stories.tsx @@ -1,4 +1,4 @@ -import type { Meta, StoryObj } from "@storybook/react"; +import type { Meta, StoryObj } from "@storybook/react-vite"; import { BuildIcon } from "./BuildIcon"; const meta: Meta = { diff --git a/site/src/components/Button/Button.stories.tsx b/site/src/components/Button/Button.stories.tsx index ceeb395cf8006..0cfd4707f5f85 100644 --- a/site/src/components/Button/Button.stories.tsx +++ b/site/src/components/Button/Button.stories.tsx @@ -1,4 +1,4 @@ -import type { Meta, StoryObj } from "@storybook/react"; +import type { Meta, StoryObj } from "@storybook/react-vite"; import { PlusIcon } from "lucide-react"; import { Button } from "./Button"; diff --git a/site/src/components/Button/Button.tsx b/site/src/components/Button/Button.tsx index 1f2c6b3b3416b..ff5200edb5883 100644 --- a/site/src/components/Button/Button.tsx +++ b/site/src/components/Button/Button.tsx @@ -3,7 +3,7 @@ * @see {@link https://ui.shadcn.com/docs/components/button} */ import { Slot } from "@radix-ui/react-slot"; -import { type VariantProps, cva } from "class-variance-authority"; +import { cva, type VariantProps } from "class-variance-authority"; import { forwardRef } from "react"; import { cn } from "utils/cn"; diff --git a/site/src/components/Chart/Chart.stories.tsx b/site/src/components/Chart/Chart.stories.tsx index 74fded80d2b4d..a351ba3f24ed6 100644 --- a/site/src/components/Chart/Chart.stories.tsx +++ b/site/src/components/Chart/Chart.stories.tsx @@ -1,4 +1,4 @@ -import type { Meta, StoryObj } from "@storybook/react"; +import type { Meta, StoryObj } from "@storybook/react-vite"; import { Area, AreaChart, CartesianGrid, XAxis, YAxis } from "recharts"; import { type ChartConfig, diff --git a/site/src/components/Chart/Chart.tsx b/site/src/components/Chart/Chart.tsx index c68967afe6e91..dd418bfef76c3 100644 --- a/site/src/components/Chart/Chart.tsx +++ b/site/src/components/Chart/Chart.tsx @@ -271,7 +271,7 @@ export const ChartTooltipContent = React.forwardRef< ); ChartTooltipContent.displayName = "ChartTooltip"; -const ChartLegend = RechartsPrimitive.Legend; +const _ChartLegend = RechartsPrimitive.Legend; const ChartLegendContent = React.forwardRef< HTMLDivElement, diff --git a/site/src/components/Checkbox/Checkbox.stories.tsx b/site/src/components/Checkbox/Checkbox.stories.tsx index 2c7582dcfe901..b4cceb5ea535d 100644 --- a/site/src/components/Checkbox/Checkbox.stories.tsx +++ b/site/src/components/Checkbox/Checkbox.stories.tsx @@ -1,4 +1,4 @@ -import type { Meta, StoryObj } from "@storybook/react"; +import type { Meta, StoryObj } from "@storybook/react-vite"; import React from "react"; import { Checkbox } from "./Checkbox"; diff --git a/site/src/components/CodeExample/CodeExample.stories.tsx b/site/src/components/CodeExample/CodeExample.stories.tsx index 93283e4df74a3..61f129f448a73 100644 --- a/site/src/components/CodeExample/CodeExample.stories.tsx +++ b/site/src/components/CodeExample/CodeExample.stories.tsx @@ -1,4 +1,4 @@ -import type { Meta, StoryObj } from "@storybook/react"; +import type { Meta, StoryObj } from "@storybook/react-vite"; import { CodeExample } from "./CodeExample"; const meta: Meta = { @@ -31,3 +31,12 @@ export const LongCode: Story = { code: "ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAICnKzATuWwmmt5+CKTPuRGN0R1PBemA+6/SStpLiyX+L", }, }; + +export const Redact: Story = { + args: { + secret: false, + redactPattern: /CODER_AGENT_TOKEN="([^"]+)"/g, + redactReplacement: `CODER_AGENT_TOKEN="********"`, + showRevealButton: true, + }, +}; diff --git a/site/src/components/CodeExample/CodeExample.tsx b/site/src/components/CodeExample/CodeExample.tsx index 474dcb1fac225..b69a220550958 100644 --- a/site/src/components/CodeExample/CodeExample.tsx +++ b/site/src/components/CodeExample/CodeExample.tsx @@ -1,11 +1,26 @@ import type { Interpolation, Theme } from "@emotion/react"; -import type { FC } from "react"; +import { Button } from "components/Button/Button"; +import { + Tooltip, + TooltipContent, + TooltipProvider, + TooltipTrigger, +} from "components/Tooltip/Tooltip"; +import { EyeIcon, EyeOffIcon } from "lucide-react"; +import { type FC, useState } from "react"; import { MONOSPACE_FONT_FAMILY } from "theme/constants"; import { CopyButton } from "../CopyButton/CopyButton"; interface CodeExampleProps { code: string; + /** Defaulting to true to be on the safe side; you should have to opt out of the secure option, not remember to opt in */ secret?: boolean; + /** Redact parts of the code if the user doesn't want to obfuscate the whole code */ + redactPattern?: RegExp; + /** Replacement text for redacted content */ + redactReplacement?: string; + /** Show a button to reveal the redacted parts of the code */ + showRevealButton?: boolean; className?: string; } @@ -15,11 +30,28 @@ interface CodeExampleProps { export const CodeExample: FC = ({ code, className, - - // Defaulting to true to be on the safe side; you should have to opt out of - // the secure option, not remember to opt in secret = true, + redactPattern, + redactReplacement = "********", + showRevealButton, }) => { + const [showFullValue, setShowFullValue] = useState(false); + + const displayValue = secret + ? obfuscateText(code) + : redactPattern && !showFullValue + ? code.replace(redactPattern, redactReplacement) + : code; + + const showButtonLabel = showFullValue + ? "Hide sensitive data" + : "Show sensitive data"; + const icon = showFullValue ? ( + + ) : ( + + ); + return (
@@ -33,17 +65,36 @@ export const CodeExample: FC = ({ * 2. Even with it turned on and supported, the plaintext is still * readily available in the HTML itself */} - {obfuscateText(code)} + {displayValue} Encrypted text. Please access via the copy button. ) : ( - code + displayValue )} - +
+ {showRevealButton && redactPattern && !secret && ( + + + + + + {showButtonLabel} + + + )} + +
); }; diff --git a/site/src/components/Collapsible/Collapsible.stories.tsx b/site/src/components/Collapsible/Collapsible.stories.tsx index cb391c4d83135..ad099b03c23f0 100644 --- a/site/src/components/Collapsible/Collapsible.stories.tsx +++ b/site/src/components/Collapsible/Collapsible.stories.tsx @@ -1,4 +1,4 @@ -import type { Meta, StoryObj } from "@storybook/react"; +import type { Meta, StoryObj } from "@storybook/react-vite"; import { Button } from "components/Button/Button"; import { ChevronsUpDown } from "lucide-react"; import { diff --git a/site/src/components/CollapsibleSummary/CollapsibleSummary.stories.tsx b/site/src/components/CollapsibleSummary/CollapsibleSummary.stories.tsx index 98f63c24ccbc7..c33a151774532 100644 --- a/site/src/components/CollapsibleSummary/CollapsibleSummary.stories.tsx +++ b/site/src/components/CollapsibleSummary/CollapsibleSummary.stories.tsx @@ -1,4 +1,4 @@ -import type { Meta, StoryObj } from "@storybook/react"; +import type { Meta, StoryObj } from "@storybook/react-vite"; import { Button } from "../Button/Button"; import { CollapsibleSummary } from "./CollapsibleSummary"; diff --git a/site/src/components/CollapsibleSummary/CollapsibleSummary.tsx b/site/src/components/CollapsibleSummary/CollapsibleSummary.tsx index ef68d1816dbcf..9cf45dc9d445b 100644 --- a/site/src/components/CollapsibleSummary/CollapsibleSummary.tsx +++ b/site/src/components/CollapsibleSummary/CollapsibleSummary.tsx @@ -1,4 +1,4 @@ -import { type VariantProps, cva } from "class-variance-authority"; +import { cva, type VariantProps } from "class-variance-authority"; import { ChevronRightIcon } from "lucide-react"; import { type FC, type ReactNode, useState } from "react"; import { cn } from "utils/cn"; diff --git a/site/src/components/Combobox/Combobox.stories.tsx b/site/src/components/Combobox/Combobox.stories.tsx index 2207f4e64686f..49fafd9ab3c79 100644 --- a/site/src/components/Combobox/Combobox.stories.tsx +++ b/site/src/components/Combobox/Combobox.stories.tsx @@ -1,6 +1,6 @@ -import type { Meta, StoryObj } from "@storybook/react"; -import { expect, screen, userEvent, waitFor, within } from "@storybook/test"; +import type { Meta, StoryObj } from "@storybook/react-vite"; import { useState } from "react"; +import { expect, screen, userEvent, waitFor, within } from "storybook/test"; import { Combobox } from "./Combobox"; const simpleOptions = ["Go", "Gleam", "Kotlin", "Rust"]; @@ -31,7 +31,9 @@ const advancedOptions = [ const ComboboxWithHooks = ({ options = advancedOptions, -}: { options?: React.ComponentProps["options"] }) => { +}: { + options?: React.ComponentProps["options"]; +}) => { const [value, setValue] = useState(""); const [open, setOpen] = useState(false); const [inputValue, setInputValue] = useState(""); diff --git a/site/src/components/Combobox/Combobox.tsx b/site/src/components/Combobox/Combobox.tsx index bc0fa73eb9653..2d522d6e6397c 100644 --- a/site/src/components/Combobox/Combobox.tsx +++ b/site/src/components/Combobox/Combobox.tsx @@ -1,4 +1,3 @@ -import { Avatar } from "components/Avatar/Avatar"; import { Button } from "components/Button/Button"; import { Command, @@ -19,10 +18,10 @@ import { TooltipProvider, TooltipTrigger, } from "components/Tooltip/Tooltip"; -import { Check, ChevronDown, CornerDownLeft } from "lucide-react"; -import { Info } from "lucide-react"; +import { Check, ChevronDown, CornerDownLeft, Info } from "lucide-react"; import { type FC, type KeyboardEventHandler, useState } from "react"; import { cn } from "utils/cn"; +import { ExternalImage } from "../ExternalImage/ExternalImage"; interface ComboboxProps { value: string; @@ -34,6 +33,7 @@ interface ComboboxProps { onInputChange?: (value: string) => void; onKeyDown?: KeyboardEventHandler; onSelect: (value: string) => void; + id?: string; } type ComboboxOption = { @@ -53,6 +53,7 @@ export const Combobox: FC = ({ onInputChange, onKeyDown, onSelect, + id, }) => { const [managedOpen, setManagedOpen] = useState(false); const [managedInputValue, setManagedInputValue] = useState(""); @@ -69,19 +70,19 @@ export const Combobox: FC = ({ const isOpen = open ?? managedOpen; + const handleOpenChange = (newOpen: boolean) => { + setManagedOpen(newOpen); + onOpenChange?.(newOpen); + }; + return ( - { - setManagedOpen(newOpen); - onOpenChange?.(newOpen); - }} - > + - + = ({ keywords={[option.displayName]} onSelect={(currentValue) => { onSelect(currentValue === value ? "" : currentValue); + // Close the popover after selection + handleOpenChange(false); }} > - {showIcons && ( - - )} + {showIcons && + (option.icon ? ( + + ) : ( + /* Placeholder for missing icon to maintain layout consistency */ +
+ ))} {option.displayName}
{value === option.value && ( @@ -134,7 +141,12 @@ export const Combobox: FC = ({ - + e.stopPropagation()} + > + + {option.description} diff --git a/site/src/components/Command/Command.tsx b/site/src/components/Command/Command.tsx index 88451d13b72ee..8973154f1d5c2 100644 --- a/site/src/components/Command/Command.tsx +++ b/site/src/components/Command/Command.tsx @@ -23,7 +23,7 @@ export const Command = forwardRef< /> )); -const CommandDialog: FC = ({ children, ...props }) => { +const _CommandDialog: FC = ({ children, ...props }) => { return ( @@ -132,7 +132,7 @@ export const CommandItem = forwardRef< /> )); -const CommandShortcut = ({ +const _CommandShortcut = ({ className, ...props }: React.HTMLAttributes) => { diff --git a/site/src/components/Conditionals/ChooseOne.stories.tsx b/site/src/components/Conditionals/ChooseOne.stories.tsx index 4650b8a1ec1fa..8d228a3178eda 100644 --- a/site/src/components/Conditionals/ChooseOne.stories.tsx +++ b/site/src/components/Conditionals/ChooseOne.stories.tsx @@ -1,4 +1,4 @@ -import type { Meta, StoryObj } from "@storybook/react"; +import type { Meta, StoryObj } from "@storybook/react-vite"; import { ChooseOne, Cond } from "./ChooseOne"; const meta: Meta = { diff --git a/site/src/components/CopyButton/CopyButton.stories.tsx b/site/src/components/CopyButton/CopyButton.stories.tsx index c9c2de328f718..fc52fac242a97 100644 --- a/site/src/components/CopyButton/CopyButton.stories.tsx +++ b/site/src/components/CopyButton/CopyButton.stories.tsx @@ -1,4 +1,4 @@ -import type { Meta, StoryObj } from "@storybook/react"; +import type { Meta, StoryObj } from "@storybook/react-vite"; import { CopyButton } from "./CopyButton"; const meta: Meta = { diff --git a/site/src/components/CopyableValue/CopyableValue.stories.tsx b/site/src/components/CopyableValue/CopyableValue.stories.tsx index 05cb09d57fffb..cc673e0e505ee 100644 --- a/site/src/components/CopyableValue/CopyableValue.stories.tsx +++ b/site/src/components/CopyableValue/CopyableValue.stories.tsx @@ -1,4 +1,4 @@ -import type { Meta, StoryObj } from "@storybook/react"; +import type { Meta, StoryObj } from "@storybook/react-vite"; import { CopyableValue } from "./CopyableValue"; const meta: Meta = { diff --git a/site/src/components/Dialog/Dialog.stories.tsx b/site/src/components/Dialog/Dialog.stories.tsx index f0b555055d111..3385ad2774bb8 100644 --- a/site/src/components/Dialog/Dialog.stories.tsx +++ b/site/src/components/Dialog/Dialog.stories.tsx @@ -1,6 +1,6 @@ -import type { Meta, StoryObj } from "@storybook/react"; -import { userEvent, within } from "@storybook/test"; +import type { Meta, StoryObj } from "@storybook/react-vite"; import { Button } from "components/Button/Button"; +import { userEvent, within } from "storybook/test"; import { Dialog, DialogContent, diff --git a/site/src/components/Dialog/Dialog.tsx b/site/src/components/Dialog/Dialog.tsx index 2ec5fa4dae212..13484f1840f69 100644 --- a/site/src/components/Dialog/Dialog.tsx +++ b/site/src/components/Dialog/Dialog.tsx @@ -3,13 +3,13 @@ * @see {@link https://ui.shadcn.com/docs/components/dialog} */ import * as DialogPrimitive from "@radix-ui/react-dialog"; -import { type VariantProps, cva } from "class-variance-authority"; +import { cva, type VariantProps } from "class-variance-authority"; import { type ComponentPropsWithoutRef, type ElementRef, type FC, - type HTMLAttributes, forwardRef, + type HTMLAttributes, } from "react"; import { cn } from "utils/cn"; @@ -19,7 +19,7 @@ export const DialogTrigger = DialogPrimitive.Trigger; const DialogPortal = DialogPrimitive.Portal; -const DialogClose = DialogPrimitive.Close; +const _DialogClose = DialogPrimitive.Close; const DialogOverlay = forwardRef< ElementRef, diff --git a/site/src/components/Dialogs/ConfirmDialog/ConfirmDialog.stories.tsx b/site/src/components/Dialogs/ConfirmDialog/ConfirmDialog.stories.tsx index dc257e7250b52..99895cb5b9567 100644 --- a/site/src/components/Dialogs/ConfirmDialog/ConfirmDialog.stories.tsx +++ b/site/src/components/Dialogs/ConfirmDialog/ConfirmDialog.stories.tsx @@ -1,5 +1,5 @@ -import { action } from "@storybook/addon-actions"; -import type { Meta, StoryObj } from "@storybook/react"; +import type { Meta, StoryObj } from "@storybook/react-vite"; +import { action } from "storybook/actions"; import { ConfirmDialog } from "./ConfirmDialog"; const meta: Meta = { diff --git a/site/src/components/Dialogs/ConfirmDialog/ConfirmDialog.test.tsx b/site/src/components/Dialogs/ConfirmDialog/ConfirmDialog.test.tsx index b8a790dc8c167..72ce09290dfd1 100644 --- a/site/src/components/Dialogs/ConfirmDialog/ConfirmDialog.test.tsx +++ b/site/src/components/Dialogs/ConfirmDialog/ConfirmDialog.test.tsx @@ -1,5 +1,5 @@ -import { fireEvent, screen } from "@testing-library/react"; import { renderComponent } from "testHelpers/renderHelpers"; +import { fireEvent, screen } from "@testing-library/react"; import { ConfirmDialog } from "./ConfirmDialog"; describe("ConfirmDialog", () => { diff --git a/site/src/components/Dialogs/DeleteDialog/DeleteDialog.stories.tsx b/site/src/components/Dialogs/DeleteDialog/DeleteDialog.stories.tsx index 68f00eaa5c7e0..a86eee62b95ed 100644 --- a/site/src/components/Dialogs/DeleteDialog/DeleteDialog.stories.tsx +++ b/site/src/components/Dialogs/DeleteDialog/DeleteDialog.stories.tsx @@ -1,7 +1,7 @@ -import { action } from "@storybook/addon-actions"; -import type { Meta, StoryObj } from "@storybook/react"; -import { userEvent } from "@storybook/test"; +import type { Meta, StoryObj } from "@storybook/react-vite"; import { within } from "@testing-library/react"; +import { action } from "storybook/actions"; +import { userEvent } from "storybook/test"; import { DeleteDialog } from "./DeleteDialog"; const meta: Meta = { diff --git a/site/src/components/Dialogs/DeleteDialog/DeleteDialog.test.tsx b/site/src/components/Dialogs/DeleteDialog/DeleteDialog.test.tsx index 7dc27f977b109..ec2635ee191ce 100644 --- a/site/src/components/Dialogs/DeleteDialog/DeleteDialog.test.tsx +++ b/site/src/components/Dialogs/DeleteDialog/DeleteDialog.test.tsx @@ -1,7 +1,7 @@ +import { renderComponent } from "testHelpers/renderHelpers"; import { screen } from "@testing-library/react"; import userEvent from "@testing-library/user-event"; import { act } from "react-dom/test-utils"; -import { renderComponent } from "testHelpers/renderHelpers"; import { DeleteDialog } from "./DeleteDialog"; const inputTestId = "delete-dialog-name-confirmation"; diff --git a/site/src/components/DropdownArrow/DropdownArrow.stories.tsx b/site/src/components/DropdownArrow/DropdownArrow.stories.tsx index a6a0f182427a3..7413bbc70fe39 100644 --- a/site/src/components/DropdownArrow/DropdownArrow.stories.tsx +++ b/site/src/components/DropdownArrow/DropdownArrow.stories.tsx @@ -1,5 +1,5 @@ -import type { Meta, StoryObj } from "@storybook/react"; import { chromatic } from "testHelpers/chromatic"; +import type { Meta, StoryObj } from "@storybook/react-vite"; import { DropdownArrow } from "./DropdownArrow"; const meta: Meta = { diff --git a/site/src/components/DropdownMenu/DropdownMenu.stories.tsx b/site/src/components/DropdownMenu/DropdownMenu.stories.tsx index f9ba8cd290902..3276a5fbed97a 100644 --- a/site/src/components/DropdownMenu/DropdownMenu.stories.tsx +++ b/site/src/components/DropdownMenu/DropdownMenu.stories.tsx @@ -1,6 +1,6 @@ -import type { Meta, StoryObj } from "@storybook/react"; -import { userEvent, within } from "@storybook/test"; +import type { Meta, StoryObj } from "@storybook/react-vite"; import { Button } from "components/Button/Button"; +import { userEvent, within } from "storybook/test"; import { DropdownMenu, DropdownMenuContent, diff --git a/site/src/components/DropdownMenu/DropdownMenu.tsx b/site/src/components/DropdownMenu/DropdownMenu.tsx index 01547c30b17a6..8e0e1fb628dcc 100644 --- a/site/src/components/DropdownMenu/DropdownMenu.tsx +++ b/site/src/components/DropdownMenu/DropdownMenu.tsx @@ -11,8 +11,8 @@ import { Check, ChevronRight, Circle } from "lucide-react"; import { type ComponentPropsWithoutRef, type ElementRef, - type HTMLAttributes, forwardRef, + type HTMLAttributes, } from "react"; import { cn } from "utils/cn"; @@ -20,13 +20,13 @@ export const DropdownMenu = DropdownMenuPrimitive.Root; export const DropdownMenuTrigger = DropdownMenuPrimitive.Trigger; -const DropdownMenuGroup = DropdownMenuPrimitive.Group; +const _DropdownMenuGroup = DropdownMenuPrimitive.Group; -const DropdownMenuPortal = DropdownMenuPrimitive.Portal; +const _DropdownMenuPortal = DropdownMenuPrimitive.Portal; -const DropdownMenuSub = DropdownMenuPrimitive.Sub; +const _DropdownMenuSub = DropdownMenuPrimitive.Sub; -const DropdownMenuRadioGroup = DropdownMenuPrimitive.RadioGroup; +const _DropdownMenuRadioGroup = DropdownMenuPrimitive.RadioGroup; const DropdownMenuSubTrigger = forwardRef< ElementRef, diff --git a/site/src/components/DurationField/DurationField.stories.tsx b/site/src/components/DurationField/DurationField.stories.tsx index 60c441aa85d79..a68f3454ff838 100644 --- a/site/src/components/DurationField/DurationField.stories.tsx +++ b/site/src/components/DurationField/DurationField.stories.tsx @@ -1,6 +1,6 @@ -import type { Meta, StoryObj } from "@storybook/react"; -import { expect, userEvent, within } from "@storybook/test"; +import type { Meta, StoryObj } from "@storybook/react-vite"; import { useState } from "react"; +import { expect, userEvent, within } from "storybook/test"; import { DurationField } from "./DurationField"; const meta: Meta = { diff --git a/site/src/components/DurationField/DurationField.tsx b/site/src/components/DurationField/DurationField.tsx index 7ee5153964164..9f6a0fb5436a7 100644 --- a/site/src/components/DurationField/DurationField.tsx +++ b/site/src/components/DurationField/DurationField.tsx @@ -5,10 +5,10 @@ import TextField, { type TextFieldProps } from "@mui/material/TextField"; import { ChevronDownIcon } from "lucide-react"; import { type FC, useEffect, useReducer } from "react"; import { - type TimeUnit, durationInDays, durationInHours, suggestedTimeUnit, + type TimeUnit, } from "utils/time"; type DurationFieldProps = Omit & { diff --git a/site/src/components/EmptyState/EmptyState.stories.tsx b/site/src/components/EmptyState/EmptyState.stories.tsx index 8b9780bb44fca..5497ab8cbad01 100644 --- a/site/src/components/EmptyState/EmptyState.stories.tsx +++ b/site/src/components/EmptyState/EmptyState.stories.tsx @@ -1,4 +1,4 @@ -import type { Meta, StoryObj } from "@storybook/react"; +import type { Meta, StoryObj } from "@storybook/react-vite"; import { Button } from "components/Button/Button"; import { EmptyState } from "./EmptyState"; diff --git a/site/src/components/EmptyState/EmptyState.tsx b/site/src/components/EmptyState/EmptyState.tsx index 1371d7e9fa56e..3faede44dd4a2 100644 --- a/site/src/components/EmptyState/EmptyState.tsx +++ b/site/src/components/EmptyState/EmptyState.tsx @@ -1,4 +1,5 @@ import type { FC, HTMLAttributes, ReactNode } from "react"; +import { cn } from "utils/cn"; export interface EmptyStateProps extends HTMLAttributes { /** Text Message to display, placed inside Typography component */ @@ -21,44 +22,25 @@ export const EmptyState: FC = ({ cta, image, isCompact, + className, ...attrs }) => { return (
-
{message}
+
{message}
{description && ( -

({ - marginTop: 16, - fontSize: 16, - lineHeight: "140%", - maxWidth: 480, - color: theme.palette.text.secondary, - })} - > +

{description}

)} - {cta &&
{cta}
} + {cta &&
{cta}
} {image}
); diff --git a/site/src/components/ErrorBoundary/GlobalErrorBoundary.stories.tsx b/site/src/components/ErrorBoundary/GlobalErrorBoundary.stories.tsx index 9c6deed539c21..c013b1cfa543e 100644 --- a/site/src/components/ErrorBoundary/GlobalErrorBoundary.stories.tsx +++ b/site/src/components/ErrorBoundary/GlobalErrorBoundary.stories.tsx @@ -1,7 +1,7 @@ -import type { Meta, StoryObj } from "@storybook/react"; -import { expect, userEvent } from "@storybook/test"; +import type { Meta, StoryObj } from "@storybook/react-vite"; import { within } from "@testing-library/react"; -import type { ErrorResponse } from "react-router-dom"; +import type { ErrorResponse } from "react-router"; +import { expect, userEvent } from "storybook/test"; import { GlobalErrorBoundaryInner } from "./GlobalErrorBoundary"; /** diff --git a/site/src/components/ErrorBoundary/GlobalErrorBoundary.tsx b/site/src/components/ErrorBoundary/GlobalErrorBoundary.tsx index 009a87ba254e0..e9042eefb7d6b 100644 --- a/site/src/components/ErrorBoundary/GlobalErrorBoundary.tsx +++ b/site/src/components/ErrorBoundary/GlobalErrorBoundary.tsx @@ -9,7 +9,7 @@ import { isRouteErrorResponse, useLocation, useRouteError, -} from "react-router-dom"; +} from "react-router"; const errorPageTitle = "Something went wrong"; diff --git a/site/src/components/Expander/Expander.stories.tsx b/site/src/components/Expander/Expander.stories.tsx index 0f3e8f26e7029..b4f768e14df6b 100644 --- a/site/src/components/Expander/Expander.stories.tsx +++ b/site/src/components/Expander/Expander.stories.tsx @@ -1,4 +1,4 @@ -import type { Meta, StoryObj } from "@storybook/react"; +import type { Meta, StoryObj } from "@storybook/react-vite"; import { Expander } from "./Expander"; const meta: Meta = { diff --git a/site/src/components/ExternalImage/ExternalImage.tsx b/site/src/components/ExternalImage/ExternalImage.tsx index d85b227b999b4..537ad11cfb8a4 100644 --- a/site/src/components/ExternalImage/ExternalImage.tsx +++ b/site/src/components/ExternalImage/ExternalImage.tsx @@ -1,5 +1,5 @@ import { useTheme } from "@emotion/react"; -import { type ImgHTMLAttributes, forwardRef } from "react"; +import { forwardRef, type ImgHTMLAttributes } from "react"; import { getExternalImageStylesFromUrl } from "theme/externalImages"; export const ExternalImage = forwardRef< diff --git a/site/src/components/FeatureStageBadge/FeatureStageBadge.stories.tsx b/site/src/components/FeatureStageBadge/FeatureStageBadge.stories.tsx index c0f3aad774473..7804dcd77433f 100644 --- a/site/src/components/FeatureStageBadge/FeatureStageBadge.stories.tsx +++ b/site/src/components/FeatureStageBadge/FeatureStageBadge.stories.tsx @@ -1,4 +1,4 @@ -import type { Meta, StoryObj } from "@storybook/react"; +import type { Meta, StoryObj } from "@storybook/react-vite"; import { FeatureStageBadge } from "./FeatureStageBadge"; const meta: Meta = { diff --git a/site/src/components/FileUpload/FileUpload.stories.tsx b/site/src/components/FileUpload/FileUpload.stories.tsx index ab40e794bf76b..286a345b29f5f 100644 --- a/site/src/components/FileUpload/FileUpload.stories.tsx +++ b/site/src/components/FileUpload/FileUpload.stories.tsx @@ -1,5 +1,5 @@ import Link from "@mui/material/Link"; -import type { Meta, StoryObj } from "@storybook/react"; +import type { Meta, StoryObj } from "@storybook/react-vite"; import { FileUpload } from "./FileUpload"; const meta: Meta = { diff --git a/site/src/components/FileUpload/FileUpload.test.tsx b/site/src/components/FileUpload/FileUpload.test.tsx index 6292bc200a517..e3ebce085ce50 100644 --- a/site/src/components/FileUpload/FileUpload.test.tsx +++ b/site/src/components/FileUpload/FileUpload.test.tsx @@ -1,5 +1,5 @@ -import { fireEvent, screen } from "@testing-library/react"; import { renderComponent } from "testHelpers/renderHelpers"; +import { fireEvent, screen } from "@testing-library/react"; import { FileUpload } from "./FileUpload"; test("accepts files with the correct extension", async () => { diff --git a/site/src/components/FileUpload/FileUpload.tsx b/site/src/components/FileUpload/FileUpload.tsx index 67ec27054ade4..95c7baa816032 100644 --- a/site/src/components/FileUpload/FileUpload.tsx +++ b/site/src/components/FileUpload/FileUpload.tsx @@ -1,4 +1,4 @@ -import { type Interpolation, type Theme, css } from "@emotion/react"; +import { css, type Interpolation, type Theme } from "@emotion/react"; import CircularProgress from "@mui/material/CircularProgress"; import IconButton from "@mui/material/IconButton"; import { Stack } from "components/Stack/Stack"; diff --git a/site/src/components/Filter/Filter.tsx b/site/src/components/Filter/Filter.tsx index 736592116730d..1ee162acccf99 100644 --- a/site/src/components/Filter/Filter.tsx +++ b/site/src/components/Filter/Filter.tsx @@ -13,10 +13,8 @@ import { Button } from "components/Button/Button"; import { InputGroup } from "components/InputGroup/InputGroup"; import { SearchField } from "components/SearchField/SearchField"; import { useDebouncedFunction } from "hooks/debounce"; -import { ExternalLinkIcon } from "lucide-react"; -import { ChevronDownIcon } from "lucide-react"; +import { ChevronDownIcon, ExternalLinkIcon } from "lucide-react"; import { type FC, type ReactNode, useEffect, useRef, useState } from "react"; -import type { useSearchParams } from "react-router-dom"; type PresetFilter = { name: string; @@ -27,35 +25,55 @@ type FilterValues = Record; type UseFilterConfig = { /** - * The fallback value to use in the event that no filter params can be parsed - * from the search params object. This value is allowed to change on - * re-renders. + * The fallback value to use in the event that no filter params can be + * parsed from the search params object. */ fallbackFilter?: string; - searchParamsResult: ReturnType; + searchParams: URLSearchParams; + onSearchParamsChange: (newParams: URLSearchParams) => void; onUpdate?: (newValue: string) => void; }; +export type UseFilterResult = Readonly<{ + query: string; + values: FilterValues; + used: boolean; + update: (newValues: string | FilterValues) => void; + debounceUpdate: (newValues: string | FilterValues) => void; + cancelDebounce: () => void; +}>; + export const useFilterParamsKey = "filter"; export const useFilter = ({ fallbackFilter = "", - searchParamsResult, + searchParams, + onSearchParamsChange, onUpdate, -}: UseFilterConfig) => { - const [searchParams, setSearchParams] = searchParamsResult; +}: UseFilterConfig): UseFilterResult => { const query = searchParams.get(useFilterParamsKey) ?? fallbackFilter; const update = (newValues: string | FilterValues) => { const serialized = typeof newValues === "string" ? newValues : stringifyFilter(newValues); + const noUpdateNeeded = query === serialized; + if (noUpdateNeeded) { + return; + } + /** + * @todo 2025-07-15 - We have a slightly nasty bug here, where trying to + * update state via immutable state updates causes our code to break. + * + * In theory, it would be better to make a copy of the search params. We + * can then mutate and dispatch the copy instead of the original. Doing + * that causes other parts of our existing logic to break, though. + * That's a sign that our other code is slightly broken, and only just + * happens to work by chance right now. + */ searchParams.set(useFilterParamsKey, serialized); - setSearchParams(searchParams); - - if (onUpdate !== undefined) { - onUpdate(serialized); - } + onSearchParamsChange(searchParams); + onUpdate?.(serialized); }; const { debounced: debounceUpdate, cancelDebounce } = useDebouncedFunction( @@ -73,8 +91,6 @@ export const useFilter = ({ }; }; -export type UseFilterResult = ReturnType; - const parseFilterQuery = (filterQuery: string): FilterValues => { if (filterQuery === "") { return {}; diff --git a/site/src/components/Filter/SelectFilter.stories.tsx b/site/src/components/Filter/SelectFilter.stories.tsx index fcb187c1c098c..136332ccfa883 100644 --- a/site/src/components/Filter/SelectFilter.stories.tsx +++ b/site/src/components/Filter/SelectFilter.stories.tsx @@ -1,9 +1,9 @@ -import { action } from "@storybook/addon-actions"; -import type { Meta, StoryObj } from "@storybook/react"; -import { expect, userEvent, within } from "@storybook/test"; +import { withDesktopViewport } from "testHelpers/storybook"; +import type { Meta, StoryObj } from "@storybook/react-vite"; import { Avatar } from "components/Avatar/Avatar"; import { useState } from "react"; -import { withDesktopViewport } from "testHelpers/storybook"; +import { action } from "storybook/actions"; +import { expect, userEvent, within } from "storybook/test"; import { SelectFilter, type SelectFilterOption, diff --git a/site/src/components/Filter/storyHelpers.ts b/site/src/components/Filter/storyHelpers.ts index 9ee1bfaef96ac..a499fe2072521 100644 --- a/site/src/components/Filter/storyHelpers.ts +++ b/site/src/components/Filter/storyHelpers.ts @@ -1,4 +1,4 @@ -import { action } from "@storybook/addon-actions"; +import { action } from "storybook/actions"; import type { UseFilterResult } from "./Filter"; import type { UseFilterMenuResult } from "./menu"; diff --git a/site/src/components/Form/Form.stories.tsx b/site/src/components/Form/Form.stories.tsx index 46c783347b374..7ba89fa440747 100644 --- a/site/src/components/Form/Form.stories.tsx +++ b/site/src/components/Form/Form.stories.tsx @@ -1,5 +1,5 @@ import TextField from "@mui/material/TextField"; -import type { Meta, StoryObj } from "@storybook/react"; +import type { Meta, StoryObj } from "@storybook/react-vite"; import { Form, FormFields, FormSection } from "./Form"; const meta: Meta = { diff --git a/site/src/components/Form/Form.tsx b/site/src/components/Form/Form.tsx index faf900fb4f344..d535a63642324 100644 --- a/site/src/components/Form/Form.tsx +++ b/site/src/components/Form/Form.tsx @@ -3,11 +3,11 @@ import { AlphaBadge, DeprecatedBadge } from "components/Badges/Badges"; import { Stack } from "components/Stack/Stack"; import { type ComponentProps, + createContext, type FC, + forwardRef, type HTMLProps, type ReactNode, - createContext, - forwardRef, useContext, } from "react"; import { cn } from "utils/cn"; @@ -159,10 +159,13 @@ const styles = { position: "initial" as const, }, }), - formSectionInfoHorizontal: { + formSectionInfoHorizontal: (theme) => ({ maxWidth: 312, - position: "sticky", - }, + + [theme.breakpoints.up("lg")]: { + position: "sticky", + }, + }), formSectionInfoTitle: (theme) => ({ fontSize: 20, color: theme.palette.text.primary, diff --git a/site/src/components/FullPageForm/FullPageForm.stories.tsx b/site/src/components/FullPageForm/FullPageForm.stories.tsx index cb15173f07e46..5ef859d4c6a33 100644 --- a/site/src/components/FullPageForm/FullPageForm.stories.tsx +++ b/site/src/components/FullPageForm/FullPageForm.stories.tsx @@ -1,5 +1,5 @@ import TextField from "@mui/material/TextField"; -import type { Meta, StoryObj } from "@storybook/react"; +import type { Meta, StoryObj } from "@storybook/react-vite"; import { Button } from "components/Button/Button"; import { FormFooter } from "components/Form/Form"; import type { FC } from "react"; diff --git a/site/src/components/FullPageForm/FullPageForm.tsx b/site/src/components/FullPageForm/FullPageForm.tsx index 571cc56ea9b0f..b4825731bcd43 100644 --- a/site/src/components/FullPageForm/FullPageForm.tsx +++ b/site/src/components/FullPageForm/FullPageForm.tsx @@ -19,7 +19,7 @@ export const FullPageForm: FC = ({ }) => { return ( - + {title} {detail && {detail}} diff --git a/site/src/components/FullPageLayout/Sidebar.tsx b/site/src/components/FullPageLayout/Sidebar.tsx index 8852d796abaa0..f58e97ac607c2 100644 --- a/site/src/components/FullPageLayout/Sidebar.tsx +++ b/site/src/components/FullPageLayout/Sidebar.tsx @@ -1,30 +1,28 @@ -import { type Interpolation, type Theme, useTheme } from "@emotion/react"; import type { ComponentProps, FC, HTMLAttributes } from "react"; -import { Link, type LinkProps } from "react-router-dom"; +import { Link, type LinkProps } from "react-router"; +import { cn } from "utils/cn"; import { TopbarIconButton } from "./Topbar"; export const Sidebar: FC> = (props) => { - const theme = useTheme(); return (
); }; -export const SidebarLink: FC = (props) => { - return ; +export const SidebarLink: FC = ({ className, ...props }) => { + return ( + + ); }; interface SidebarItemProps extends HTMLAttributes { @@ -33,21 +31,16 @@ interface SidebarItemProps extends HTMLAttributes { export const SidebarItem: FC = ({ isActive, + className, ...buttonProps }) => { - const theme = useTheme(); - return (
@@ -335,7 +343,7 @@ export const AgentRow: FC = ({ Logs - + )} diff --git a/site/src/modules/resources/AgentRowPreview.stories.tsx b/site/src/modules/resources/AgentRowPreview.stories.tsx index fdcd84093bddb..8cd4f6bd7b78d 100644 --- a/site/src/modules/resources/AgentRowPreview.stories.tsx +++ b/site/src/modules/resources/AgentRowPreview.stories.tsx @@ -1,5 +1,5 @@ -import type { Meta, StoryObj } from "@storybook/react"; import { MockWorkspaceAgent, MockWorkspaceApp } from "testHelpers/entities"; +import type { Meta, StoryObj } from "@storybook/react-vite"; import { AgentRowPreview } from "./AgentRowPreview"; const meta: Meta = { diff --git a/site/src/modules/resources/AgentRowPreview.test.tsx b/site/src/modules/resources/AgentRowPreview.test.tsx index c1b876b72ef3b..23b34dc8c7fc8 100644 --- a/site/src/modules/resources/AgentRowPreview.test.tsx +++ b/site/src/modules/resources/AgentRowPreview.test.tsx @@ -1,11 +1,11 @@ +import { MockWorkspaceAgent } from "testHelpers/entities"; +import { renderComponent } from "testHelpers/renderHelpers"; import { screen } from "@testing-library/react"; import { type DisplayApp, DisplayApps, type WorkspaceAgent, } from "api/typesGenerated"; -import { MockWorkspaceAgent } from "testHelpers/entities"; -import { renderComponent } from "testHelpers/renderHelpers"; import { AgentRowPreview } from "./AgentRowPreview"; import { DisplayAppNameMap } from "./AppLink/AppLink"; diff --git a/site/src/modules/resources/AgentRowPreview.tsx b/site/src/modules/resources/AgentRowPreview.tsx index 70de1450322da..2df8b3a4d5460 100644 --- a/site/src/modules/resources/AgentRowPreview.tsx +++ b/site/src/modules/resources/AgentRowPreview.tsx @@ -93,10 +93,8 @@ export const AgentRowPreview: FC = ({ {/* We display all modules returned in agent.apps */} {agent.apps.map((app) => ( - <> - - {app.display_name} - + + {app.display_name} ))} {/* Additionally, we display any apps that are visible, e.g. diff --git a/site/src/modules/resources/AgentStatus.tsx b/site/src/modules/resources/AgentStatus.tsx index 7eb165d19f8c2..8f6b923e70d68 100644 --- a/site/src/modules/resources/AgentStatus.tsx +++ b/site/src/modules/resources/AgentStatus.tsx @@ -6,13 +6,13 @@ import type { WorkspaceAgentDevcontainer, } from "api/typesGenerated"; import { ChooseOne, Cond } from "components/Conditionals/ChooseOne"; +import { PopoverTrigger } from "components/deprecated/Popover/Popover"; import { HelpTooltip, HelpTooltipContent, HelpTooltipText, HelpTooltipTitle, } from "components/HelpTooltip/HelpTooltip"; -import { PopoverTrigger } from "components/deprecated/Popover/Popover"; import { TriangleAlertIcon } from "lucide-react"; import type { FC } from "react"; diff --git a/site/src/modules/resources/AppLink/AppLink.stories.tsx b/site/src/modules/resources/AppLink/AppLink.stories.tsx index 891ddd3c2af7d..32e3ee47ebe40 100644 --- a/site/src/modules/resources/AppLink/AppLink.stories.tsx +++ b/site/src/modules/resources/AppLink/AppLink.stories.tsx @@ -1,5 +1,3 @@ -import type { Meta, StoryObj } from "@storybook/react"; -import { getPreferredProxy } from "contexts/ProxyContext"; import { MockPrimaryWorkspaceProxy, MockWorkspace, @@ -8,6 +6,8 @@ import { MockWorkspaceProxies, } from "testHelpers/entities"; import { withGlobalSnackbar, withProxyProvider } from "testHelpers/storybook"; +import type { Meta, StoryObj } from "@storybook/react-vite"; +import { getPreferredProxy } from "contexts/ProxyContext"; import { AppLink } from "./AppLink"; const meta: Meta = { diff --git a/site/src/modules/resources/AppLink/AppLink.tsx b/site/src/modules/resources/AppLink/AppLink.tsx index 637f0287a4088..5d27eae8a9630 100644 --- a/site/src/modules/resources/AppLink/AppLink.tsx +++ b/site/src/modules/resources/AppLink/AppLink.tsx @@ -1,4 +1,3 @@ -import { useTheme } from "@emotion/react"; import type * as TypesGen from "api/typesGenerated"; import { DropdownMenuItem } from "components/DropdownMenu/DropdownMenu"; import { Spinner } from "components/Spinner/Spinner"; @@ -41,7 +40,6 @@ export const AppLink: FC = ({ const { proxy } = useProxy(); const host = proxy.preferredWildcardHostname; const [iconError, setIconError] = useState(false); - const theme = useTheme(); const link = useAppLink(app, { agent, workspace }); // canClick is ONLY false when it's a subdomain app and the admin hasn't @@ -64,8 +62,7 @@ export const AppLink: FC = ({ icon = (
+
- + Run the following commands to connect with SSH:
    - + = ({ helpText, codeExample }) => (
  1. - {helpText} + {helpText}
  2. ); - -const classNames = { - paper: (css, theme) => css` - padding: 16px 24px 24px; - width: 304px; - color: ${theme.palette.text.secondary}; - margin-top: 2px; - `, -} satisfies Record; - -const styles = { - codeExamples: { - marginTop: 12, - }, - - codeExampleLabel: { - fontSize: 12, - }, -} satisfies Record>; diff --git a/site/src/modules/resources/SensitiveValue.tsx b/site/src/modules/resources/SensitiveValue.tsx index 626c7a8623291..b1ec1b4410e3e 100644 --- a/site/src/modules/resources/SensitiveValue.tsx +++ b/site/src/modules/resources/SensitiveValue.tsx @@ -1,4 +1,4 @@ -import { type Interpolation, type Theme, css } from "@emotion/react"; +import { css, type Interpolation, type Theme } from "@emotion/react"; import IconButton from "@mui/material/IconButton"; import Tooltip from "@mui/material/Tooltip"; import { CopyableValue } from "components/CopyableValue/CopyableValue"; diff --git a/site/src/modules/resources/TerminalLink/TerminalLink.stories.tsx b/site/src/modules/resources/TerminalLink/TerminalLink.stories.tsx index 0073a732e6228..baef99efcb07e 100644 --- a/site/src/modules/resources/TerminalLink/TerminalLink.stories.tsx +++ b/site/src/modules/resources/TerminalLink/TerminalLink.stories.tsx @@ -1,5 +1,5 @@ -import type { Meta, StoryObj } from "@storybook/react"; import { MockWorkspace } from "testHelpers/entities"; +import type { Meta, StoryObj } from "@storybook/react-vite"; import { TerminalLink } from "./TerminalLink"; const meta: Meta = { diff --git a/site/src/modules/resources/VSCodeDesktopButton/VSCodeDesktopButton.stories.tsx b/site/src/modules/resources/VSCodeDesktopButton/VSCodeDesktopButton.stories.tsx index fe3f274b17d24..477a40d106242 100644 --- a/site/src/modules/resources/VSCodeDesktopButton/VSCodeDesktopButton.stories.tsx +++ b/site/src/modules/resources/VSCodeDesktopButton/VSCodeDesktopButton.stories.tsx @@ -1,5 +1,5 @@ -import type { Meta, StoryObj } from "@storybook/react"; import { MockWorkspace, MockWorkspaceAgent } from "testHelpers/entities"; +import type { Meta, StoryObj } from "@storybook/react-vite"; import { VSCodeDesktopButton } from "./VSCodeDesktopButton"; const meta: Meta = { diff --git a/site/src/modules/resources/VSCodeDevContainerButton/VSCodeDevContainerButton.stories.tsx b/site/src/modules/resources/VSCodeDevContainerButton/VSCodeDevContainerButton.stories.tsx index 0a3838e4251c0..f6cfd0850d7ed 100644 --- a/site/src/modules/resources/VSCodeDevContainerButton/VSCodeDevContainerButton.stories.tsx +++ b/site/src/modules/resources/VSCodeDevContainerButton/VSCodeDevContainerButton.stories.tsx @@ -1,5 +1,5 @@ -import type { Meta, StoryObj } from "@storybook/react"; import { MockWorkspace, MockWorkspaceAgent } from "testHelpers/entities"; +import type { Meta, StoryObj } from "@storybook/react-vite"; import { VSCodeDevContainerButton } from "./VSCodeDevContainerButton"; const meta: Meta = { diff --git a/site/src/modules/resources/useAgentContainers.test.tsx b/site/src/modules/resources/useAgentContainers.test.tsx index 363f8d93223c8..f00f7b242b6e3 100644 --- a/site/src/modules/resources/useAgentContainers.test.tsx +++ b/site/src/modules/resources/useAgentContainers.test.tsx @@ -1,17 +1,17 @@ +import { + MockWorkspaceAgent, + MockWorkspaceAgentDevcontainer, +} from "testHelpers/entities"; +import { createTestQueryClient } from "testHelpers/renderHelpers"; +import { server } from "testHelpers/server"; import { renderHook, waitFor } from "@testing-library/react"; import * as API from "api/api"; import type { WorkspaceAgentListContainersResponse } from "api/typesGenerated"; import * as GlobalSnackbar from "components/GlobalSnackbar/utils"; -import { http, HttpResponse } from "msw"; +import { HttpResponse, http } from "msw"; import type { FC, PropsWithChildren } from "react"; import { act } from "react"; import { QueryClientProvider } from "react-query"; -import { - MockWorkspaceAgent, - MockWorkspaceAgentDevcontainer, -} from "testHelpers/entities"; -import { createTestQueryClient } from "testHelpers/renderHelpers"; -import { server } from "testHelpers/server"; import type { OneWayWebSocket } from "utils/OneWayWebSocket"; import { useAgentContainers } from "./useAgentContainers"; diff --git a/site/src/modules/resources/useAgentLogs.test.ts b/site/src/modules/resources/useAgentLogs.test.ts index 186087c871299..c4943c6f9d50f 100644 --- a/site/src/modules/resources/useAgentLogs.test.ts +++ b/site/src/modules/resources/useAgentLogs.test.ts @@ -1,7 +1,7 @@ +import { MockWorkspaceAgent } from "testHelpers/entities"; import { renderHook, waitFor } from "@testing-library/react"; import type { WorkspaceAgentLog } from "api/typesGenerated"; import WS from "jest-websocket-mock"; -import { MockWorkspaceAgent } from "testHelpers/entities"; import { useAgentLogs } from "./useAgentLogs"; /** diff --git a/site/src/modules/tableFiltering/options.tsx b/site/src/modules/tableFiltering/options.tsx index 9bc55744edb54..7b6964b5cd851 100644 --- a/site/src/modules/tableFiltering/options.tsx +++ b/site/src/modules/tableFiltering/options.tsx @@ -9,15 +9,15 @@ */ import { API } from "api/api"; import { Avatar } from "components/Avatar/Avatar"; +import { + type UseFilterMenuOptions, + useFilterMenu, +} from "components/Filter/menu"; import { SelectFilter, type SelectFilterOption, SelectFilterSearch, } from "components/Filter/SelectFilter"; -import { - type UseFilterMenuOptions, - useFilterMenu, -} from "components/Filter/menu"; import type { FC } from "react"; // Organization helpers //////////////////////////////////////////////////////// diff --git a/site/src/modules/templates/TemplateExampleCard/TemplateExampleCard.stories.tsx b/site/src/modules/templates/TemplateExampleCard/TemplateExampleCard.stories.tsx index 248da641b0c29..775c82f806c37 100644 --- a/site/src/modules/templates/TemplateExampleCard/TemplateExampleCard.stories.tsx +++ b/site/src/modules/templates/TemplateExampleCard/TemplateExampleCard.stories.tsx @@ -1,9 +1,9 @@ -import type { Meta, StoryObj } from "@storybook/react"; import { chromatic } from "testHelpers/chromatic"; import { MockTemplateExample, MockTemplateExample2, } from "testHelpers/entities"; +import type { Meta, StoryObj } from "@storybook/react-vite"; import { TemplateExampleCard } from "./TemplateExampleCard"; const meta: Meta = { diff --git a/site/src/modules/templates/TemplateExampleCard/TemplateExampleCard.tsx b/site/src/modules/templates/TemplateExampleCard/TemplateExampleCard.tsx index bf5c03f96bd2d..6ecdc11ed84d9 100644 --- a/site/src/modules/templates/TemplateExampleCard/TemplateExampleCard.tsx +++ b/site/src/modules/templates/TemplateExampleCard/TemplateExampleCard.tsx @@ -5,7 +5,7 @@ import { Button } from "components/Button/Button"; import { ExternalImage } from "components/ExternalImage/ExternalImage"; import { Pill } from "components/Pill/Pill"; import type { FC, HTMLAttributes } from "react"; -import { Link as RouterLink } from "react-router-dom"; +import { Link as RouterLink } from "react-router"; type TemplateExampleCardProps = HTMLAttributes & { example: TemplateExample; diff --git a/site/src/modules/templates/TemplateFiles/TemplateFileTree.stories.tsx b/site/src/modules/templates/TemplateFiles/TemplateFileTree.stories.tsx index 3f3fcc4badfff..45191a2320a3f 100644 --- a/site/src/modules/templates/TemplateFiles/TemplateFileTree.stories.tsx +++ b/site/src/modules/templates/TemplateFiles/TemplateFileTree.stories.tsx @@ -1,6 +1,6 @@ -import { useTheme } from "@emotion/react"; -import type { Meta, StoryObj } from "@storybook/react"; import { chromatic } from "testHelpers/chromatic"; +import { useTheme } from "@emotion/react"; +import type { Meta, StoryObj } from "@storybook/react-vite"; import type { FileTree } from "utils/filetree"; import { TemplateFileTree } from "./TemplateFileTree"; diff --git a/site/src/modules/templates/TemplateFiles/TemplateFiles.stories.tsx b/site/src/modules/templates/TemplateFiles/TemplateFiles.stories.tsx index 71361d7ac03f0..3a7adcfacf52c 100644 --- a/site/src/modules/templates/TemplateFiles/TemplateFiles.stories.tsx +++ b/site/src/modules/templates/TemplateFiles/TemplateFiles.stories.tsx @@ -1,6 +1,6 @@ -import type { Meta, StoryObj } from "@storybook/react"; import { chromatic } from "testHelpers/chromatic"; import { withDashboardProvider } from "testHelpers/storybook"; +import type { Meta, StoryObj } from "@storybook/react-vite"; import { TemplateFiles } from "./TemplateFiles"; const exampleFiles = { diff --git a/site/src/modules/templates/TemplateFiles/TemplateFiles.tsx b/site/src/modules/templates/TemplateFiles/TemplateFiles.tsx index 95716e38827aa..a7abe1c7bc3e8 100644 --- a/site/src/modules/templates/TemplateFiles/TemplateFiles.tsx +++ b/site/src/modules/templates/TemplateFiles/TemplateFiles.tsx @@ -5,7 +5,7 @@ import { SyntaxHighlighter } from "components/SyntaxHighlighter/SyntaxHighlighte import set from "lodash/set"; import { linkToTemplate, useLinks } from "modules/navigation"; import { type FC, useCallback, useMemo } from "react"; -import { Link } from "react-router-dom"; +import { Link } from "react-router"; import type { FileTree } from "utils/filetree"; import type { TemplateVersionFiles } from "utils/templateVersion"; import { TemplateFileTree } from "./TemplateFileTree"; diff --git a/site/src/modules/templates/TemplateResourcesTable/TemplateResourcesTable.stories.tsx b/site/src/modules/templates/TemplateResourcesTable/TemplateResourcesTable.stories.tsx index 5e75547b1bdc2..9a0d1556e97ea 100644 --- a/site/src/modules/templates/TemplateResourcesTable/TemplateResourcesTable.stories.tsx +++ b/site/src/modules/templates/TemplateResourcesTable/TemplateResourcesTable.stories.tsx @@ -1,4 +1,3 @@ -import type { Meta, StoryObj } from "@storybook/react"; import { MockWorkspaceAgent, MockWorkspaceAgentConnecting, @@ -6,6 +5,7 @@ import { MockWorkspaceResource, MockWorkspaceVolumeResource, } from "testHelpers/entities"; +import type { Meta, StoryObj } from "@storybook/react-vite"; import { TemplateResourcesTable } from "./TemplateResourcesTable"; const meta: Meta = { diff --git a/site/src/modules/templates/TemplateUpdateMessage.stories.tsx b/site/src/modules/templates/TemplateUpdateMessage.stories.tsx index 22484a5200e70..2a299385338fd 100644 --- a/site/src/modules/templates/TemplateUpdateMessage.stories.tsx +++ b/site/src/modules/templates/TemplateUpdateMessage.stories.tsx @@ -1,4 +1,4 @@ -import type { Meta, StoryObj } from "@storybook/react"; +import type { Meta, StoryObj } from "@storybook/react-vite"; import { TemplateUpdateMessage } from "./TemplateUpdateMessage"; const meta: Meta = { diff --git a/site/src/modules/workspaces/ClassicParameterFlowDeprecationWarning/ClassicParameterFlowDeprecationWarning.test.tsx b/site/src/modules/workspaces/ClassicParameterFlowDeprecationWarning/ClassicParameterFlowDeprecationWarning.test.tsx new file mode 100644 index 0000000000000..f68bb273f26a0 --- /dev/null +++ b/site/src/modules/workspaces/ClassicParameterFlowDeprecationWarning/ClassicParameterFlowDeprecationWarning.test.tsx @@ -0,0 +1,39 @@ +import { render, screen } from "@testing-library/react"; +import { ClassicParameterFlowDeprecationWarning } from "./ClassicParameterFlowDeprecationWarning"; + +jest.mock("modules/navigation", () => ({ + useLinks: () => () => "/mock-link", + linkToTemplate: () => "/mock-template-link", +})); + +describe("ClassicParameterFlowDeprecationWarning", () => { + const defaultProps = { + organizationName: "test-org", + templateName: "test-template", + }; + + it("renders warning when enabled and user has template update permissions", () => { + render( + , + ); + + expect(screen.getByText("deprecated")).toBeInTheDocument(); + expect(screen.getByText("Go to Template Settings")).toBeInTheDocument(); + }); + + it("does not render when enabled is false", () => { + const { container } = render( + , + ); + + expect(container.firstChild).toBeNull(); + }); +}); diff --git a/site/src/modules/workspaces/ClassicParameterFlowDeprecationWarning/ClassicParameterFlowDeprecationWarning.tsx b/site/src/modules/workspaces/ClassicParameterFlowDeprecationWarning/ClassicParameterFlowDeprecationWarning.tsx new file mode 100644 index 0000000000000..d6afd3be464bf --- /dev/null +++ b/site/src/modules/workspaces/ClassicParameterFlowDeprecationWarning/ClassicParameterFlowDeprecationWarning.tsx @@ -0,0 +1,38 @@ +import { Alert } from "components/Alert/Alert"; +import { Link } from "components/Link/Link"; +import type { FC } from "react"; +import { docs } from "utils/docs"; + +interface ClassicParameterFlowDeprecationWarningProps { + templateSettingsLink: string; + isEnabled: boolean; +} + +export const ClassicParameterFlowDeprecationWarning: FC< + ClassicParameterFlowDeprecationWarningProps +> = ({ templateSettingsLink, isEnabled }) => { + if (!isEnabled) { + return null; + } + + return ( + +
    + + + Go to Template Settings + + + ); +}; diff --git a/site/src/modules/workspaces/DynamicParameter/DynamicParameter.stories.tsx b/site/src/modules/workspaces/DynamicParameter/DynamicParameter.stories.tsx index 5e077df642855..ac627c6130565 100644 --- a/site/src/modules/workspaces/DynamicParameter/DynamicParameter.stories.tsx +++ b/site/src/modules/workspaces/DynamicParameter/DynamicParameter.stories.tsx @@ -1,5 +1,5 @@ -import type { Meta, StoryObj } from "@storybook/react"; import { MockPreviewParameter } from "testHelpers/entities"; +import type { Meta, StoryObj } from "@storybook/react-vite"; import { DynamicParameter } from "./DynamicParameter"; const meta: Meta = { diff --git a/site/src/modules/workspaces/DynamicParameter/DynamicParameter.test.tsx b/site/src/modules/workspaces/DynamicParameter/DynamicParameter.test.tsx index e3bfd8dc80635..716fd26df4474 100644 --- a/site/src/modules/workspaces/DynamicParameter/DynamicParameter.test.tsx +++ b/site/src/modules/workspaces/DynamicParameter/DynamicParameter.test.tsx @@ -1,7 +1,7 @@ +import { render } from "testHelpers/renderHelpers"; import { act, fireEvent, screen, waitFor } from "@testing-library/react"; import userEvent from "@testing-library/user-event"; import type { PreviewParameter } from "api/typesGenerated"; -import { render } from "testHelpers/renderHelpers"; import { DynamicParameter } from "./DynamicParameter"; const createMockParameter = ( diff --git a/site/src/modules/workspaces/DynamicParameter/DynamicParameter.tsx b/site/src/modules/workspaces/DynamicParameter/DynamicParameter.tsx index fa8eab193b53a..f6d9862dd75db 100644 --- a/site/src/modules/workspaces/DynamicParameter/DynamicParameter.tsx +++ b/site/src/modules/workspaces/DynamicParameter/DynamicParameter.tsx @@ -453,6 +453,7 @@ const ParameterField: FC = ({ case "dropdown": { return ( onChange(value)} options={parameter.options.map((option) => ({ @@ -497,7 +498,10 @@ const ParameterField: FC = ({ return ( { diff --git a/site/src/modules/workspaces/EphemeralParametersDialog/EphemeralParametersDialog.tsx b/site/src/modules/workspaces/EphemeralParametersDialog/EphemeralParametersDialog.tsx index d1713d920f4a9..39fc52de730f4 100644 --- a/site/src/modules/workspaces/EphemeralParametersDialog/EphemeralParametersDialog.tsx +++ b/site/src/modules/workspaces/EphemeralParametersDialog/EphemeralParametersDialog.tsx @@ -9,7 +9,7 @@ import { DialogTitle, } from "components/Dialog/Dialog"; import type { FC } from "react"; -import { useNavigate } from "react-router-dom"; +import { useNavigate } from "react-router"; interface EphemeralParametersDialogProps { open: boolean; diff --git a/site/src/modules/workspaces/ErrorDialog/WorkspaceErrorDialog.tsx b/site/src/modules/workspaces/ErrorDialog/WorkspaceErrorDialog.tsx index 6d0390fbf902b..deeecb19ac4eb 100644 --- a/site/src/modules/workspaces/ErrorDialog/WorkspaceErrorDialog.tsx +++ b/site/src/modules/workspaces/ErrorDialog/WorkspaceErrorDialog.tsx @@ -9,7 +9,7 @@ import { DialogTitle, } from "components/Dialog/Dialog"; import type { FC } from "react"; -import { useNavigate } from "react-router-dom"; +import { useNavigate } from "react-router"; interface WorkspaceErrorDialogProps { open: boolean; diff --git a/site/src/modules/workspaces/WorkspaceAppStatus/WorkspaceAppStatus.stories.tsx b/site/src/modules/workspaces/WorkspaceAppStatus/WorkspaceAppStatus.stories.tsx index 9327ff6b46e98..3fbfc819b4f0a 100644 --- a/site/src/modules/workspaces/WorkspaceAppStatus/WorkspaceAppStatus.stories.tsx +++ b/site/src/modules/workspaces/WorkspaceAppStatus/WorkspaceAppStatus.stories.tsx @@ -1,6 +1,6 @@ -import type { Meta, StoryObj } from "@storybook/react"; import { MockWorkspaceAppStatus } from "testHelpers/entities"; import { withProxyProvider } from "testHelpers/storybook"; +import type { Meta, StoryObj } from "@storybook/react-vite"; import { WorkspaceAppStatus } from "./WorkspaceAppStatus"; const meta: Meta = { diff --git a/site/src/modules/workspaces/WorkspaceBuildData/WorkspaceBuildData.stories.tsx b/site/src/modules/workspaces/WorkspaceBuildData/WorkspaceBuildData.stories.tsx index 442cb9b6da5aa..ec984417e9140 100644 --- a/site/src/modules/workspaces/WorkspaceBuildData/WorkspaceBuildData.stories.tsx +++ b/site/src/modules/workspaces/WorkspaceBuildData/WorkspaceBuildData.stories.tsx @@ -1,5 +1,5 @@ -import type { Meta, StoryObj } from "@storybook/react"; import { MockWorkspaceBuild } from "testHelpers/entities"; +import type { Meta, StoryObj } from "@storybook/react-vite"; import { WorkspaceBuildData } from "./WorkspaceBuildData"; const meta: Meta = { diff --git a/site/src/modules/workspaces/WorkspaceBuildLogs/WorkspaceBuildLogs.stories.tsx b/site/src/modules/workspaces/WorkspaceBuildLogs/WorkspaceBuildLogs.stories.tsx index c7b0138fb4c1b..55ccc6ff6e6d7 100644 --- a/site/src/modules/workspaces/WorkspaceBuildLogs/WorkspaceBuildLogs.stories.tsx +++ b/site/src/modules/workspaces/WorkspaceBuildLogs/WorkspaceBuildLogs.stories.tsx @@ -1,6 +1,6 @@ -import type { Meta, StoryObj } from "@storybook/react"; import { chromatic } from "testHelpers/chromatic"; import { MockWorkspaceBuildLogs } from "testHelpers/entities"; +import type { Meta, StoryObj } from "@storybook/react-vite"; import { WorkspaceBuildLogs } from "./WorkspaceBuildLogs"; const meta: Meta = { diff --git a/site/src/modules/workspaces/WorkspaceBuildLogs/WorkspaceBuildLogs.tsx b/site/src/modules/workspaces/WorkspaceBuildLogs/WorkspaceBuildLogs.tsx index fcf6f0dbee549..161efe260ed96 100644 --- a/site/src/modules/workspaces/WorkspaceBuildLogs/WorkspaceBuildLogs.tsx +++ b/site/src/modules/workspaces/WorkspaceBuildLogs/WorkspaceBuildLogs.tsx @@ -1,9 +1,9 @@ import { type Interpolation, type Theme, useTheme } from "@emotion/react"; -import type { ProvisionerJobLog } from "api/typesGenerated"; +import type { ProvisionerJobLog, WorkspaceBuild } from "api/typesGenerated"; import type { Line } from "components/Logs/LogLine"; import { DEFAULT_LOG_LINE_SIDE_PADDING, Logs } from "components/Logs/Logs"; import dayjs from "dayjs"; -import { type FC, Fragment, type HTMLAttributes } from "react"; +import { type FC, Fragment, type HTMLAttributes, useMemo } from "react"; import { BODY_FONT_FAMILY, MONOSPACE_FONT_FAMILY } from "theme/constants"; const Language = { @@ -42,15 +42,37 @@ interface WorkspaceBuildLogsProps extends HTMLAttributes { hideTimestamps?: boolean; sticky?: boolean; logs: ProvisionerJobLog[]; + build?: WorkspaceBuild; } export const WorkspaceBuildLogs: FC = ({ hideTimestamps, sticky, logs, + build, ...attrs }) => { const theme = useTheme(); + + const _processedLogs = useMemo(() => { + const allLogs = logs || []; + + // Add synthetic overflow message if needed + if (build?.job?.logs_overflowed) { + allLogs.push({ + id: -1, + created_at: new Date().toISOString(), + log_level: "error", + log_source: "provisioner", + output: + "Provisioner logs exceeded the max size of 1MB. Will not continue to write provisioner logs for workspace build.", + stage: "overflow", + }); + } + + return allLogs; + }, [logs, build?.job?.logs_overflowed]); + const groupedLogsByStage = groupLogsByStage(logs); return ( diff --git a/site/src/modules/workspaces/WorkspaceDormantBadge/WorkspaceDormantBadge.stories.tsx b/site/src/modules/workspaces/WorkspaceDormantBadge/WorkspaceDormantBadge.stories.tsx index 94c380b3d1e23..c7a159c0ce1f7 100644 --- a/site/src/modules/workspaces/WorkspaceDormantBadge/WorkspaceDormantBadge.stories.tsx +++ b/site/src/modules/workspaces/WorkspaceDormantBadge/WorkspaceDormantBadge.stories.tsx @@ -1,6 +1,6 @@ -import type { Meta, StoryObj } from "@storybook/react"; -import { userEvent, within } from "@storybook/test"; import { MockDormantWorkspace } from "testHelpers/entities"; +import type { Meta, StoryObj } from "@storybook/react-vite"; +import { userEvent, within } from "storybook/test"; import { WorkspaceDormantBadge } from "./WorkspaceDormantBadge"; const meta: Meta = { diff --git a/site/src/modules/workspaces/WorkspaceMoreActions/ChangeWorkspaceVersionDialog.stories.tsx b/site/src/modules/workspaces/WorkspaceMoreActions/ChangeWorkspaceVersionDialog.stories.tsx index 45e85c3288292..7ff35868d0e75 100644 --- a/site/src/modules/workspaces/WorkspaceMoreActions/ChangeWorkspaceVersionDialog.stories.tsx +++ b/site/src/modules/workspaces/WorkspaceMoreActions/ChangeWorkspaceVersionDialog.stories.tsx @@ -1,10 +1,10 @@ -import type { Meta, StoryObj } from "@storybook/react"; -import { templateVersionsQueryKey } from "api/queries/templates"; import { MockTemplateVersion, MockTemplateVersionWithMarkdownMessage, MockWorkspace, } from "testHelpers/entities"; +import type { Meta, StoryObj } from "@storybook/react-vite"; +import { templateVersionsQueryKey } from "api/queries/templates"; import { ChangeWorkspaceVersionDialog } from "./ChangeWorkspaceVersionDialog"; const noMessage = { diff --git a/site/src/modules/workspaces/WorkspaceMoreActions/DownloadLogsDialog.stories.tsx b/site/src/modules/workspaces/WorkspaceMoreActions/DownloadLogsDialog.stories.tsx index c8eab563c58ef..447a812399024 100644 --- a/site/src/modules/workspaces/WorkspaceMoreActions/DownloadLogsDialog.stories.tsx +++ b/site/src/modules/workspaces/WorkspaceMoreActions/DownloadLogsDialog.stories.tsx @@ -1,8 +1,8 @@ -import type { Meta, StoryObj } from "@storybook/react"; -import { expect, fn, userEvent, waitFor, within } from "@storybook/test"; -import { agentLogsKey, buildLogsKey } from "api/queries/workspaces"; import { MockWorkspace, MockWorkspaceAgent } from "testHelpers/entities"; import { withDesktopViewport } from "testHelpers/storybook"; +import type { Meta, StoryObj } from "@storybook/react-vite"; +import { agentLogsKey, buildLogsKey } from "api/queries/workspaces"; +import { expect, fn, userEvent, waitFor, within } from "storybook/test"; import { DownloadLogsDialog } from "./DownloadLogsDialog"; const meta: Meta = { diff --git a/site/src/modules/workspaces/WorkspaceMoreActions/UpdateBuildParametersDialogExperimental.tsx b/site/src/modules/workspaces/WorkspaceMoreActions/UpdateBuildParametersDialogExperimental.tsx index 850f31185af2c..042379ca888f9 100644 --- a/site/src/modules/workspaces/WorkspaceMoreActions/UpdateBuildParametersDialogExperimental.tsx +++ b/site/src/modules/workspaces/WorkspaceMoreActions/UpdateBuildParametersDialogExperimental.tsx @@ -9,7 +9,7 @@ import { DialogTitle, } from "components/Dialog/Dialog"; import type { FC } from "react"; -import { useNavigate } from "react-router-dom"; +import { useNavigate } from "react-router"; type UpdateBuildParametersDialogExperimentalProps = { open: boolean; diff --git a/site/src/modules/workspaces/WorkspaceMoreActions/WorkspaceDeleteDialog.stories.tsx b/site/src/modules/workspaces/WorkspaceMoreActions/WorkspaceDeleteDialog.stories.tsx index e7b168e57e973..b5fcd44b7c9c8 100644 --- a/site/src/modules/workspaces/WorkspaceMoreActions/WorkspaceDeleteDialog.stories.tsx +++ b/site/src/modules/workspaces/WorkspaceMoreActions/WorkspaceDeleteDialog.stories.tsx @@ -1,5 +1,5 @@ -import type { Meta, StoryObj } from "@storybook/react"; import { MockFailedWorkspace, MockWorkspace } from "testHelpers/entities"; +import type { Meta, StoryObj } from "@storybook/react-vite"; import { daysAgo } from "utils/time"; import { WorkspaceDeleteDialog } from "./WorkspaceDeleteDialog"; diff --git a/site/src/modules/workspaces/WorkspaceMoreActions/WorkspaceMoreActions.tsx b/site/src/modules/workspaces/WorkspaceMoreActions/WorkspaceMoreActions.tsx index 19d12ab2a394e..ca0e9803336e2 100644 --- a/site/src/modules/workspaces/WorkspaceMoreActions/WorkspaceMoreActions.tsx +++ b/site/src/modules/workspaces/WorkspaceMoreActions/WorkspaceMoreActions.tsx @@ -1,6 +1,5 @@ import { MissingBuildParameters, ParameterValidationError } from "api/api"; -import { isApiError } from "api/errors"; -import { type ApiError, getErrorMessage } from "api/errors"; +import { type ApiError, getErrorMessage, isApiError } from "api/errors"; import { changeVersion, deleteWorkspace, @@ -26,14 +25,14 @@ import { } from "lucide-react"; import { type FC, useEffect, useState } from "react"; import { useMutation, useQuery, useQueryClient } from "react-query"; -import { Link as RouterLink } from "react-router-dom"; +import { Link as RouterLink } from "react-router"; import { WorkspaceErrorDialog } from "../ErrorDialog/WorkspaceErrorDialog"; import { ChangeWorkspaceVersionDialog } from "./ChangeWorkspaceVersionDialog"; import { DownloadLogsDialog } from "./DownloadLogsDialog"; import { UpdateBuildParametersDialog } from "./UpdateBuildParametersDialog"; import { UpdateBuildParametersDialogExperimental } from "./UpdateBuildParametersDialogExperimental"; -import { WorkspaceDeleteDialog } from "./WorkspaceDeleteDialog"; import { useWorkspaceDuplication } from "./useWorkspaceDuplication"; +import { WorkspaceDeleteDialog } from "./WorkspaceDeleteDialog"; type WorkspaceMoreActionsProps = { workspace: Workspace; @@ -205,7 +204,7 @@ export const WorkspaceMoreActions: FC = ({ workspaceName={workspace.name} templateVersionId={ changeVersionMutation.error instanceof ParameterValidationError - ? changeVersionMutation.error?.versionId + ? changeVersionMutation.error.versionId : undefined } /> diff --git a/site/src/modules/workspaces/WorkspaceMoreActions/useWorkspaceDuplication.test.tsx b/site/src/modules/workspaces/WorkspaceMoreActions/useWorkspaceDuplication.test.tsx index 8e06e10136f92..d0e7af6d1aafd 100644 --- a/site/src/modules/workspaces/WorkspaceMoreActions/useWorkspaceDuplication.test.tsx +++ b/site/src/modules/workspaces/WorkspaceMoreActions/useWorkspaceDuplication.test.tsx @@ -1,10 +1,10 @@ -import { act, waitFor } from "@testing-library/react"; -import type { Workspace } from "api/typesGenerated"; import * as M from "testHelpers/entities"; import { type GetLocationSnapshot, renderHookWithAuth, } from "testHelpers/hooks"; +import { act, waitFor } from "@testing-library/react"; +import type { Workspace } from "api/typesGenerated"; import CreateWorkspacePage from "../../../pages/CreateWorkspacePage/CreateWorkspacePage"; import { useWorkspaceDuplication } from "./useWorkspaceDuplication"; diff --git a/site/src/modules/workspaces/WorkspaceMoreActions/useWorkspaceDuplication.ts b/site/src/modules/workspaces/WorkspaceMoreActions/useWorkspaceDuplication.ts index abb34a44e5ad1..b2d104decebfc 100644 --- a/site/src/modules/workspaces/WorkspaceMoreActions/useWorkspaceDuplication.ts +++ b/site/src/modules/workspaces/WorkspaceMoreActions/useWorkspaceDuplication.ts @@ -3,7 +3,7 @@ import type { Workspace, WorkspaceBuildParameter } from "api/typesGenerated"; import { linkToTemplate, useLinks } from "modules/navigation"; import { useCallback } from "react"; import { useQuery } from "react-query"; -import { useNavigate } from "react-router-dom"; +import { useNavigate } from "react-router"; import type { CreateWorkspaceMode } from "../../../pages/CreateWorkspacePage/CreateWorkspacePage"; function getDuplicationUrlParams( diff --git a/site/src/modules/workspaces/WorkspaceOutdatedTooltip/WorkspaceOutdatedTooltip.stories.tsx b/site/src/modules/workspaces/WorkspaceOutdatedTooltip/WorkspaceOutdatedTooltip.stories.tsx index 843f8131b793f..fc0a28815752b 100644 --- a/site/src/modules/workspaces/WorkspaceOutdatedTooltip/WorkspaceOutdatedTooltip.stories.tsx +++ b/site/src/modules/workspaces/WorkspaceOutdatedTooltip/WorkspaceOutdatedTooltip.stories.tsx @@ -1,11 +1,11 @@ -import type { Meta, StoryObj } from "@storybook/react"; -import { expect, userEvent, waitFor, within } from "@storybook/test"; import { MockTemplate, MockTemplateVersion, MockWorkspace, } from "testHelpers/entities"; import { withDashboardProvider } from "testHelpers/storybook"; +import type { Meta, StoryObj } from "@storybook/react-vite"; +import { expect, userEvent, waitFor, within } from "storybook/test"; import { WorkspaceOutdatedTooltip } from "./WorkspaceOutdatedTooltip"; const meta: Meta = { diff --git a/site/src/modules/workspaces/WorkspaceOutdatedTooltip/WorkspaceOutdatedTooltip.tsx b/site/src/modules/workspaces/WorkspaceOutdatedTooltip/WorkspaceOutdatedTooltip.tsx index b79183acd7471..e1e83d502781a 100644 --- a/site/src/modules/workspaces/WorkspaceOutdatedTooltip/WorkspaceOutdatedTooltip.tsx +++ b/site/src/modules/workspaces/WorkspaceOutdatedTooltip/WorkspaceOutdatedTooltip.tsx @@ -4,6 +4,7 @@ import Skeleton from "@mui/material/Skeleton"; import { getErrorDetail, getErrorMessage } from "api/errors"; import { templateVersion } from "api/queries/templates"; import type { Workspace } from "api/typesGenerated"; +import { usePopover } from "components/deprecated/Popover/Popover"; import { displayError } from "components/GlobalSnackbar/utils"; import { HelpTooltip, @@ -14,15 +15,13 @@ import { HelpTooltipTitle, HelpTooltipTrigger, } from "components/HelpTooltip/HelpTooltip"; -import { usePopover } from "components/deprecated/Popover/Popover"; -import { InfoIcon } from "lucide-react"; -import { RotateCcwIcon } from "lucide-react"; +import { InfoIcon, RotateCcwIcon } from "lucide-react"; import { linkToTemplate, useLinks } from "modules/navigation"; import type { FC } from "react"; import { useQuery } from "react-query"; import { - WorkspaceUpdateDialogs, useWorkspaceUpdate, + WorkspaceUpdateDialogs, } from "../WorkspaceUpdateDialogs"; interface TooltipProps { diff --git a/site/src/modules/workspaces/WorkspaceStatusIndicator/WorkspaceStatusIndicator.stories.tsx b/site/src/modules/workspaces/WorkspaceStatusIndicator/WorkspaceStatusIndicator.stories.tsx index 5bdb870708447..61dd592886c9d 100644 --- a/site/src/modules/workspaces/WorkspaceStatusIndicator/WorkspaceStatusIndicator.stories.tsx +++ b/site/src/modules/workspaces/WorkspaceStatusIndicator/WorkspaceStatusIndicator.stories.tsx @@ -1,6 +1,6 @@ -import type { Meta, StoryObj } from "@storybook/react"; -import type { Workspace, WorkspaceStatus } from "api/typesGenerated"; import { MockWorkspace } from "testHelpers/entities"; +import type { Meta, StoryObj } from "@storybook/react-vite"; +import type { Workspace, WorkspaceStatus } from "api/typesGenerated"; import { WorkspaceStatusIndicator } from "./WorkspaceStatusIndicator"; const meta: Meta = { diff --git a/site/src/modules/workspaces/WorkspaceStatusIndicator/WorkspaceStatusIndicator.tsx b/site/src/modules/workspaces/WorkspaceStatusIndicator/WorkspaceStatusIndicator.tsx index 972096314e1ee..c7e9e53ba8ff5 100644 --- a/site/src/modules/workspaces/WorkspaceStatusIndicator/WorkspaceStatusIndicator.tsx +++ b/site/src/modules/workspaces/WorkspaceStatusIndicator/WorkspaceStatusIndicator.tsx @@ -10,8 +10,8 @@ import { TooltipProvider, TooltipTrigger, } from "components/Tooltip/Tooltip"; -import type { FC } from "react"; import type React from "react"; +import type { FC } from "react"; import { type DisplayWorkspaceStatusType, getDisplayWorkspaceStatus, diff --git a/site/src/modules/workspaces/WorkspaceTiming/Chart/Bar.tsx b/site/src/modules/workspaces/WorkspaceTiming/Chart/Bar.tsx index 2c3a1bf28b152..b76aa11a9883a 100644 --- a/site/src/modules/workspaces/WorkspaceTiming/Chart/Bar.tsx +++ b/site/src/modules/workspaces/WorkspaceTiming/Chart/Bar.tsx @@ -1,5 +1,5 @@ import type { Interpolation, Theme } from "@emotion/react"; -import { type ButtonHTMLAttributes, type HTMLProps, forwardRef } from "react"; +import { type ButtonHTMLAttributes, forwardRef, type HTMLProps } from "react"; export type BarColors = { stroke: string; diff --git a/site/src/modules/workspaces/WorkspaceTiming/Chart/Tooltip.tsx b/site/src/modules/workspaces/WorkspaceTiming/Chart/Tooltip.tsx index 85b556c786a07..ad86ce9d59ce0 100644 --- a/site/src/modules/workspaces/WorkspaceTiming/Chart/Tooltip.tsx +++ b/site/src/modules/workspaces/WorkspaceTiming/Chart/Tooltip.tsx @@ -5,7 +5,7 @@ import MUITooltip, { } from "@mui/material/Tooltip"; import { ExternalLinkIcon } from "lucide-react"; import type { FC, HTMLProps } from "react"; -import { Link, type LinkProps } from "react-router-dom"; +import { Link, type LinkProps } from "react-router"; export type TooltipProps = MUITooltipProps; diff --git a/site/src/modules/workspaces/WorkspaceTiming/Chart/XAxis.tsx b/site/src/modules/workspaces/WorkspaceTiming/Chart/XAxis.tsx index 82c385e533802..c7409f5238522 100644 --- a/site/src/modules/workspaces/WorkspaceTiming/Chart/XAxis.tsx +++ b/site/src/modules/workspaces/WorkspaceTiming/Chart/XAxis.tsx @@ -93,7 +93,7 @@ export const XAxisRow: FC = ({ yAxisLabelId, ...htmlProps }) => { }; return ( -
    { for (const s of scales) { diff --git a/site/src/modules/workspaces/WorkspaceTiming/ResourcesChart.tsx b/site/src/modules/workspaces/WorkspaceTiming/ResourcesChart.tsx index 8384d8c60857b..f2757ee48e5c0 100644 --- a/site/src/modules/workspaces/WorkspaceTiming/ResourcesChart.tsx +++ b/site/src/modules/workspaces/WorkspaceTiming/ResourcesChart.tsx @@ -11,6 +11,14 @@ import { ChartToolbar, } from "./Chart/Chart"; import { Tooltip, TooltipLink, TooltipTitle } from "./Chart/Tooltip"; +import { + calcDuration, + calcOffset, + formatTime, + makeTicks, + mergeTimeRanges, + type TimeRange, +} from "./Chart/utils"; import { XAxis, XAxisRow, XAxisSection } from "./Chart/XAxis"; import { YAxis, @@ -19,14 +27,6 @@ import { YAxisLabels, YAxisSection, } from "./Chart/YAxis"; -import { - type TimeRange, - calcDuration, - calcOffset, - formatTime, - makeTicks, - mergeTimeRanges, -} from "./Chart/utils"; import type { Stage } from "./StagesChart"; type ResourceTiming = { diff --git a/site/src/modules/workspaces/WorkspaceTiming/ScriptsChart.tsx b/site/src/modules/workspaces/WorkspaceTiming/ScriptsChart.tsx index 3756589a8056a..d0f6ac6045383 100644 --- a/site/src/modules/workspaces/WorkspaceTiming/ScriptsChart.tsx +++ b/site/src/modules/workspaces/WorkspaceTiming/ScriptsChart.tsx @@ -11,6 +11,14 @@ import { ChartToolbar, } from "./Chart/Chart"; import { Tooltip, TooltipTitle } from "./Chart/Tooltip"; +import { + calcDuration, + calcOffset, + formatTime, + makeTicks, + mergeTimeRanges, + type TimeRange, +} from "./Chart/utils"; import { XAxis, XAxisRow, XAxisSection } from "./Chart/XAxis"; import { YAxis, @@ -19,14 +27,6 @@ import { YAxisLabels, YAxisSection, } from "./Chart/YAxis"; -import { - type TimeRange, - calcDuration, - calcOffset, - formatTime, - makeTicks, - mergeTimeRanges, -} from "./Chart/utils"; import type { Stage } from "./StagesChart"; type ScriptTiming = { diff --git a/site/src/modules/workspaces/WorkspaceTiming/StagesChart.tsx b/site/src/modules/workspaces/WorkspaceTiming/StagesChart.tsx index c9e9f8d3a71b2..103d4717f20c6 100644 --- a/site/src/modules/workspaces/WorkspaceTiming/StagesChart.tsx +++ b/site/src/modules/workspaces/WorkspaceTiming/StagesChart.tsx @@ -1,7 +1,6 @@ import type { Interpolation, Theme } from "@emotion/react"; import type { TimingStage } from "api/typesGenerated"; -import { CircleAlertIcon } from "lucide-react"; -import { InfoIcon } from "lucide-react"; +import { CircleAlertIcon, InfoIcon } from "lucide-react"; import type { FC } from "react"; import { Bar, ClickableBar } from "./Chart/Bar"; import { Blocks } from "./Chart/Blocks"; @@ -12,6 +11,14 @@ import { TooltipShortDescription, TooltipTitle, } from "./Chart/Tooltip"; +import { + calcDuration, + calcOffset, + formatTime, + makeTicks, + mergeTimeRanges, + type TimeRange, +} from "./Chart/utils"; import { XAxis, XAxisRow, XAxisSection } from "./Chart/XAxis"; import { YAxis, @@ -20,14 +27,6 @@ import { YAxisLabels, YAxisSection, } from "./Chart/YAxis"; -import { - type TimeRange, - calcDuration, - calcOffset, - formatTime, - makeTicks, - mergeTimeRanges, -} from "./Chart/utils"; export type Stage = { /** diff --git a/site/src/modules/workspaces/WorkspaceTiming/WorkspaceTimings.stories.tsx b/site/src/modules/workspaces/WorkspaceTiming/WorkspaceTimings.stories.tsx index 36f08b36c0ca0..9c8ce12168631 100644 --- a/site/src/modules/workspaces/WorkspaceTiming/WorkspaceTimings.stories.tsx +++ b/site/src/modules/workspaces/WorkspaceTiming/WorkspaceTimings.stories.tsx @@ -1,8 +1,8 @@ -import type { Meta, StoryObj } from "@storybook/react"; -import { expect, userEvent, waitFor, within } from "@storybook/test"; import { chromatic } from "testHelpers/chromatic"; -import { WorkspaceTimings } from "./WorkspaceTimings"; +import type { Meta, StoryObj } from "@storybook/react-vite"; +import { expect, userEvent, waitFor, within } from "storybook/test"; import { WorkspaceTimingsResponse } from "./storybookData"; +import { WorkspaceTimings } from "./WorkspaceTimings"; const meta: Meta = { title: "modules/workspaces/WorkspaceTimings", diff --git a/site/src/modules/workspaces/WorkspaceTiming/WorkspaceTimings.tsx b/site/src/modules/workspaces/WorkspaceTiming/WorkspaceTimings.tsx index 8b3f42c7b93e3..847b531949c00 100644 --- a/site/src/modules/workspaces/WorkspaceTiming/WorkspaceTimings.tsx +++ b/site/src/modules/workspaces/WorkspaceTiming/WorkspaceTimings.tsx @@ -12,18 +12,18 @@ import uniqBy from "lodash/uniqBy"; import { ChevronDownIcon, ChevronUpIcon } from "lucide-react"; import { type FC, useState } from "react"; import { - type TimeRange, calcDuration, formatTime, mergeTimeRanges, + type TimeRange, } from "./Chart/utils"; -import { ResourcesChart, isCoderResource } from "./ResourcesChart"; +import { isCoderResource, ResourcesChart } from "./ResourcesChart"; import { ScriptsChart } from "./ScriptsChart"; import { - type Stage, - StagesChart, agentStages, provisioningStages, + type Stage, + StagesChart, } from "./StagesChart"; type TimingView = @@ -218,7 +218,7 @@ const toTimeRange = (timing: { }; }; -const humanizeDuration = (durationMs: number): string => { +const _humanizeDuration = (durationMs: number): string => { const seconds = Math.floor(durationMs / 1000); const minutes = Math.floor(seconds / 60); const hours = Math.floor(minutes / 60); diff --git a/site/src/modules/workspaces/actions.ts b/site/src/modules/workspaces/actions.ts index 8b17d3e937c74..533cf981ed6d8 100644 --- a/site/src/modules/workspaces/actions.ts +++ b/site/src/modules/workspaces/actions.ts @@ -63,6 +63,14 @@ export const abilitiesByWorkspaceStatus = ( }; } + if (workspace.latest_build.has_external_agent) { + return { + actions: [], + canCancel: false, + canAcceptJobs: true, + }; + } + const status = workspace.latest_build.status; switch (status) { diff --git a/site/src/modules/workspaces/generateWorkspaceName.ts b/site/src/modules/workspaces/generateWorkspaceName.ts index 00a6542180963..9dff54a59b4f5 100644 --- a/site/src/modules/workspaces/generateWorkspaceName.ts +++ b/site/src/modules/workspaces/generateWorkspaceName.ts @@ -1,11 +1,15 @@ +import isChromatic from "chromatic/isChromatic"; import { - NumberDictionary, animals, colors, + NumberDictionary, uniqueNamesGenerator, } from "unique-names-generator"; export const generateWorkspaceName = () => { + if (isChromatic()) { + return "yellow-bird-23"; + } const numberDictionary = NumberDictionary.generate({ min: 0, max: 99 }); return uniqueNamesGenerator({ dictionaries: [colors, animals, numberDictionary], diff --git a/site/src/pages/404Page/404Page.stories.tsx b/site/src/pages/404Page/404Page.stories.tsx index 8273db09c4da7..8f1b52ab7b629 100644 --- a/site/src/pages/404Page/404Page.stories.tsx +++ b/site/src/pages/404Page/404Page.stories.tsx @@ -1,4 +1,4 @@ -import type { Meta, StoryObj } from "@storybook/react"; +import type { Meta, StoryObj } from "@storybook/react-vite"; import NotFoundPage from "./404Page"; const meta: Meta = { diff --git a/site/src/pages/AuditPage/AuditFilter.tsx b/site/src/pages/AuditPage/AuditFilter.tsx index c625a7d60797e..973d2d7a8e7ba 100644 --- a/site/src/pages/AuditPage/AuditFilter.tsx +++ b/site/src/pages/AuditPage/AuditFilter.tsx @@ -1,14 +1,14 @@ import { AuditActions, ResourceTypes } from "api/typesGenerated"; import { Filter, MenuSkeleton, type useFilter } from "components/Filter/Filter"; +import { + type UseFilterMenuOptions, + useFilterMenu, +} from "components/Filter/menu"; import { SelectFilter, type SelectFilterOption, } from "components/Filter/SelectFilter"; import { type UserFilterMenu, UserMenu } from "components/Filter/UserFilter"; -import { - type UseFilterMenuOptions, - useFilterMenu, -} from "components/Filter/menu"; import capitalize from "lodash/capitalize"; import { type OrganizationsFilterMenu, diff --git a/site/src/pages/AuditPage/AuditLogRow/AuditLogDescription/AuditLogDescription.stories.tsx b/site/src/pages/AuditPage/AuditLogRow/AuditLogDescription/AuditLogDescription.stories.tsx index 99d4f900ca0d6..8abf5442eb9cf 100644 --- a/site/src/pages/AuditPage/AuditLogRow/AuditLogDescription/AuditLogDescription.stories.tsx +++ b/site/src/pages/AuditPage/AuditLogRow/AuditLogDescription/AuditLogDescription.stories.tsx @@ -1,4 +1,3 @@ -import type { Meta, StoryObj } from "@storybook/react"; import { MockAuditLog, MockAuditLogRequestPasswordReset, @@ -7,6 +6,7 @@ import { MockAuditLogWithWorkspaceBuild, MockWorkspaceCreateAuditLogForDifferentOwner, } from "testHelpers/entities"; +import type { Meta, StoryObj } from "@storybook/react-vite"; import { AuditLogDescription } from "./AuditLogDescription"; const meta: Meta = { diff --git a/site/src/pages/AuditPage/AuditLogRow/AuditLogDescription/AuditLogDescription.tsx b/site/src/pages/AuditPage/AuditLogRow/AuditLogDescription/AuditLogDescription.tsx index ed105989f1f02..81f4be980634e 100644 --- a/site/src/pages/AuditPage/AuditLogRow/AuditLogDescription/AuditLogDescription.tsx +++ b/site/src/pages/AuditPage/AuditLogRow/AuditLogDescription/AuditLogDescription.tsx @@ -1,7 +1,7 @@ import Link from "@mui/material/Link"; import type { AuditLog } from "api/typesGenerated"; import type { FC } from "react"; -import { Link as RouterLink } from "react-router-dom"; +import { Link as RouterLink } from "react-router"; import { BuildAuditDescription } from "./BuildAuditDescription"; interface AuditLogDescriptionProps { diff --git a/site/src/pages/AuditPage/AuditLogRow/AuditLogDescription/BuildAuditDescription.tsx b/site/src/pages/AuditPage/AuditLogRow/AuditLogDescription/BuildAuditDescription.tsx index 354eb59713174..1451177e4c14d 100644 --- a/site/src/pages/AuditPage/AuditLogRow/AuditLogDescription/BuildAuditDescription.tsx +++ b/site/src/pages/AuditPage/AuditLogRow/AuditLogDescription/BuildAuditDescription.tsx @@ -1,7 +1,7 @@ import Link from "@mui/material/Link"; import type { AuditLog } from "api/typesGenerated"; import { type FC, useMemo } from "react"; -import { Link as RouterLink } from "react-router-dom"; +import { Link as RouterLink } from "react-router"; import { systemBuildReasons } from "utils/workspace"; interface BuildAuditDescriptionProps { diff --git a/site/src/pages/AuditPage/AuditLogRow/AuditLogRow.stories.tsx b/site/src/pages/AuditPage/AuditLogRow/AuditLogRow.stories.tsx index ab5e55f8bbd84..03ccfcf38dbae 100644 --- a/site/src/pages/AuditPage/AuditLogRow/AuditLogRow.stories.tsx +++ b/site/src/pages/AuditPage/AuditLogRow/AuditLogRow.stories.tsx @@ -1,10 +1,3 @@ -import Table from "@mui/material/Table"; -import TableBody from "@mui/material/TableBody"; -import TableCell from "@mui/material/TableCell"; -import TableContainer from "@mui/material/TableContainer"; -import TableHead from "@mui/material/TableHead"; -import TableRow from "@mui/material/TableRow"; -import type { Meta, StoryObj } from "@storybook/react"; import { chromatic } from "testHelpers/chromatic"; import { MockAuditLog, @@ -15,6 +8,13 @@ import { MockAuditLogWithWorkspaceBuild, MockUserOwner, } from "testHelpers/entities"; +import Table from "@mui/material/Table"; +import TableBody from "@mui/material/TableBody"; +import TableCell from "@mui/material/TableCell"; +import TableContainer from "@mui/material/TableContainer"; +import TableHead from "@mui/material/TableHead"; +import TableRow from "@mui/material/TableRow"; +import type { Meta, StoryObj } from "@storybook/react-vite"; import { AuditLogRow } from "./AuditLogRow"; const meta: Meta = { diff --git a/site/src/pages/AuditPage/AuditLogRow/AuditLogRow.tsx b/site/src/pages/AuditPage/AuditLogRow/AuditLogRow.tsx index cccdcdf5e6e49..9661fbab59e75 100644 --- a/site/src/pages/AuditPage/AuditLogRow/AuditLogRow.tsx +++ b/site/src/pages/AuditPage/AuditLogRow/AuditLogRow.tsx @@ -9,10 +9,9 @@ import { DropdownArrow } from "components/DropdownArrow/DropdownArrow"; import { Stack } from "components/Stack/Stack"; import { StatusPill } from "components/StatusPill/StatusPill"; import { TimelineEntry } from "components/Timeline/TimelineEntry"; -import { InfoIcon } from "lucide-react"; -import { NetworkIcon } from "lucide-react"; +import { InfoIcon, NetworkIcon } from "lucide-react"; import { type FC, useState } from "react"; -import { Link as RouterLink } from "react-router-dom"; +import { Link as RouterLink } from "react-router"; import userAgentParser from "ua-parser-js"; import { buildReasonLabels } from "utils/workspace"; import { AuditLogDescription } from "./AuditLogDescription/AuditLogDescription"; diff --git a/site/src/pages/AuditPage/AuditPage.test.tsx b/site/src/pages/AuditPage/AuditPage.test.tsx index bcbc40da8af5c..ea7e5d9c44f06 100644 --- a/site/src/pages/AuditPage/AuditPage.test.tsx +++ b/site/src/pages/AuditPage/AuditPage.test.tsx @@ -1,8 +1,3 @@ -import { screen, waitFor } from "@testing-library/react"; -import userEvent from "@testing-library/user-event"; -import { API } from "api/api"; -import { DEFAULT_RECORDS_PER_PAGE } from "components/PaginationWidget/utils"; -import { http, HttpResponse } from "msw"; import { MockAuditLog, MockAuditLog2, @@ -13,6 +8,12 @@ import { waitForLoaderToBeRemoved, } from "testHelpers/renderHelpers"; import { server } from "testHelpers/server"; +import { screen, waitFor } from "@testing-library/react"; +import userEvent from "@testing-library/user-event"; +import { API } from "api/api"; +import type { AuditLogsRequest } from "api/typesGenerated"; +import { DEFAULT_RECORDS_PER_PAGE } from "components/PaginationWidget/utils"; +import { HttpResponse, http } from "msw"; import * as CreateDayString from "utils/createDayString"; import AuditPage from "./AuditPage"; @@ -106,7 +107,7 @@ describe("AuditPage", () => { await userEvent.type(filterField, query); await waitFor(() => - expect(getAuditLogsSpy).toBeCalledWith({ + expect(getAuditLogsSpy).toHaveBeenCalledWith<[AuditLogsRequest]>({ limit: DEFAULT_RECORDS_PER_PAGE, offset: 0, q: query, diff --git a/site/src/pages/AuditPage/AuditPage.tsx b/site/src/pages/AuditPage/AuditPage.tsx index f63adbcd4136b..6c8c52a679ada 100644 --- a/site/src/pages/AuditPage/AuditPage.tsx +++ b/site/src/pages/AuditPage/AuditPage.tsx @@ -8,7 +8,7 @@ import { useFeatureVisibility } from "modules/dashboard/useFeatureVisibility"; import { useOrganizationsFilterMenu } from "modules/tableFiltering/options"; import type { FC } from "react"; import { Helmet } from "react-helmet-async"; -import { useSearchParams } from "react-router-dom"; +import { useSearchParams } from "react-router"; import { pageTitle } from "utils/page"; import { useActionFilterMenu, useResourceTypeFilterMenu } from "./AuditFilter"; import { AuditPageView } from "./AuditPageView"; @@ -33,7 +33,8 @@ const AuditPage: FC = () => { const [searchParams, setSearchParams] = useSearchParams(); const auditsQuery = usePaginatedQuery(paginatedAudits(searchParams)); const filter = useFilter({ - searchParamsResult: [searchParams, setSearchParams], + searchParams, + onSearchParamsChange: setSearchParams, onUpdate: auditsQuery.goToFirstPage, }); diff --git a/site/src/pages/AuditPage/AuditPageView.stories.tsx b/site/src/pages/AuditPage/AuditPageView.stories.tsx index 323ae6d78bde8..29715db05280b 100644 --- a/site/src/pages/AuditPage/AuditPageView.stories.tsx +++ b/site/src/pages/AuditPage/AuditPageView.stories.tsx @@ -1,7 +1,14 @@ -import type { Meta, StoryObj } from "@storybook/react"; +import { chromaticWithTablet } from "testHelpers/chromatic"; +import { + MockAuditLog, + MockAuditLog2, + MockAuditLog3, + MockUserOwner, +} from "testHelpers/entities"; +import type { Meta, StoryObj } from "@storybook/react-vite"; import { - MockMenu, getDefaultFilterProps, + MockMenu, } from "components/Filter/storyHelpers"; import { mockInitialRenderResult, @@ -9,13 +16,6 @@ import { } from "components/PaginationWidget/PaginationContainer.mocks"; import type { UsePaginatedQueryResult } from "hooks/usePaginatedQuery"; import type { ComponentProps } from "react"; -import { chromaticWithTablet } from "testHelpers/chromatic"; -import { - MockAuditLog, - MockAuditLog2, - MockAuditLog3, - MockUserOwner, -} from "testHelpers/entities"; import { AuditPageView } from "./AuditPageView"; type FilterProps = ComponentProps["filterProps"]; diff --git a/site/src/pages/CliAuthPage/CliAuthPageView.stories.tsx b/site/src/pages/CliAuthPage/CliAuthPageView.stories.tsx index a38a1de7513f3..d0c17aba91546 100644 --- a/site/src/pages/CliAuthPage/CliAuthPageView.stories.tsx +++ b/site/src/pages/CliAuthPage/CliAuthPageView.stories.tsx @@ -1,4 +1,4 @@ -import type { Meta, StoryObj } from "@storybook/react"; +import type { Meta, StoryObj } from "@storybook/react-vite"; import { CliAuthPageView } from "./CliAuthPageView"; const meta: Meta = { diff --git a/site/src/pages/CliAuthPage/CliAuthPageView.tsx b/site/src/pages/CliAuthPage/CliAuthPageView.tsx index 716f97a70c888..e836127f61fc8 100644 --- a/site/src/pages/CliAuthPage/CliAuthPageView.tsx +++ b/site/src/pages/CliAuthPage/CliAuthPageView.tsx @@ -5,7 +5,7 @@ import { Welcome } from "components/Welcome/Welcome"; import { useClipboard } from "hooks"; import { CheckIcon, CopyIcon } from "lucide-react"; import type { FC } from "react"; -import { Link as RouterLink } from "react-router-dom"; +import { Link as RouterLink } from "react-router"; interface CliAuthPageViewProps { sessionToken?: string; diff --git a/site/src/pages/CliInstallPage/CliInstallPageView.stories.tsx b/site/src/pages/CliInstallPage/CliInstallPageView.stories.tsx index 25acfa457ff78..4dd303ba10c18 100644 --- a/site/src/pages/CliInstallPage/CliInstallPageView.stories.tsx +++ b/site/src/pages/CliInstallPage/CliInstallPageView.stories.tsx @@ -1,4 +1,4 @@ -import type { Meta, StoryObj } from "@storybook/react"; +import type { Meta, StoryObj } from "@storybook/react-vite"; import { CliInstallPageView } from "./CliInstallPageView"; const meta: Meta = { diff --git a/site/src/pages/CliInstallPage/CliInstallPageView.tsx b/site/src/pages/CliInstallPage/CliInstallPageView.tsx index db77abcb28f04..0dc7240870759 100644 --- a/site/src/pages/CliInstallPage/CliInstallPageView.tsx +++ b/site/src/pages/CliInstallPage/CliInstallPageView.tsx @@ -2,7 +2,7 @@ import type { Interpolation, Theme } from "@emotion/react"; import { CodeExample } from "components/CodeExample/CodeExample"; import { Welcome } from "components/Welcome/Welcome"; import type { FC } from "react"; -import { Link as RouterLink } from "react-router-dom"; +import { Link as RouterLink } from "react-router"; type CliInstallPageViewProps = { origin: string; diff --git a/site/src/pages/ConnectionLogPage/ConnectionLogFilter.tsx b/site/src/pages/ConnectionLogPage/ConnectionLogFilter.tsx index 9d049c4e6865b..fcf1efeb7dda0 100644 --- a/site/src/pages/ConnectionLogPage/ConnectionLogFilter.tsx +++ b/site/src/pages/ConnectionLogPage/ConnectionLogFilter.tsx @@ -1,14 +1,14 @@ import { ConnectionLogStatuses, ConnectionTypes } from "api/typesGenerated"; import { Filter, MenuSkeleton, type useFilter } from "components/Filter/Filter"; +import { + type UseFilterMenuOptions, + useFilterMenu, +} from "components/Filter/menu"; import { SelectFilter, type SelectFilterOption, } from "components/Filter/SelectFilter"; import { type UserFilterMenu, UserMenu } from "components/Filter/UserFilter"; -import { - type UseFilterMenuOptions, - useFilterMenu, -} from "components/Filter/menu"; import capitalize from "lodash/capitalize"; import { type OrganizationsFilterMenu, diff --git a/site/src/pages/ConnectionLogPage/ConnectionLogPage.test.tsx b/site/src/pages/ConnectionLogPage/ConnectionLogPage.test.tsx index 7beea3f033e30..2ce25e5a33369 100644 --- a/site/src/pages/ConnectionLogPage/ConnectionLogPage.test.tsx +++ b/site/src/pages/ConnectionLogPage/ConnectionLogPage.test.tsx @@ -1,8 +1,3 @@ -import { screen, waitFor } from "@testing-library/react"; -import userEvent from "@testing-library/user-event"; -import { API } from "api/api"; -import { DEFAULT_RECORDS_PER_PAGE } from "components/PaginationWidget/utils"; -import { http, HttpResponse } from "msw"; import { MockConnectedSSHConnectionLog, MockDisconnectedSSHConnectionLog, @@ -13,6 +8,11 @@ import { waitForLoaderToBeRemoved, } from "testHelpers/renderHelpers"; import { server } from "testHelpers/server"; +import { screen, waitFor } from "@testing-library/react"; +import userEvent from "@testing-library/user-event"; +import { API } from "api/api"; +import { DEFAULT_RECORDS_PER_PAGE } from "components/PaginationWidget/utils"; +import { HttpResponse, http } from "msw"; import * as CreateDayString from "utils/createDayString"; import ConnectionLogPage from "./ConnectionLogPage"; diff --git a/site/src/pages/ConnectionLogPage/ConnectionLogPage.tsx b/site/src/pages/ConnectionLogPage/ConnectionLogPage.tsx index 9cd27bac95bf4..fd7fc12e38901 100644 --- a/site/src/pages/ConnectionLogPage/ConnectionLogPage.tsx +++ b/site/src/pages/ConnectionLogPage/ConnectionLogPage.tsx @@ -8,7 +8,7 @@ import { useFeatureVisibility } from "modules/dashboard/useFeatureVisibility"; import { useOrganizationsFilterMenu } from "modules/tableFiltering/options"; import type { FC } from "react"; import { Helmet } from "react-helmet-async"; -import { useSearchParams } from "react-router-dom"; +import { useSearchParams } from "react-router"; import { pageTitle } from "utils/page"; import { useStatusFilterMenu, useTypeFilterMenu } from "./ConnectionLogFilter"; import { ConnectionLogPageView } from "./ConnectionLogPageView"; @@ -29,7 +29,8 @@ const ConnectionLogPage: FC = () => { paginatedConnectionLogs(searchParams), ); const filter = useFilter({ - searchParamsResult: [searchParams, setSearchParams], + searchParams, + onSearchParamsChange: setSearchParams, onUpdate: connectionlogsQuery.goToFirstPage, }); diff --git a/site/src/pages/ConnectionLogPage/ConnectionLogPageView.stories.tsx b/site/src/pages/ConnectionLogPage/ConnectionLogPageView.stories.tsx index 393127280409b..7376a75daec4a 100644 --- a/site/src/pages/ConnectionLogPage/ConnectionLogPageView.stories.tsx +++ b/site/src/pages/ConnectionLogPage/ConnectionLogPageView.stories.tsx @@ -1,7 +1,13 @@ -import type { Meta, StoryObj } from "@storybook/react"; +import { chromaticWithTablet } from "testHelpers/chromatic"; +import { + MockConnectedSSHConnectionLog, + MockDisconnectedSSHConnectionLog, + MockUserOwner, +} from "testHelpers/entities"; +import type { Meta, StoryObj } from "@storybook/react-vite"; import { - MockMenu, getDefaultFilterProps, + MockMenu, } from "components/Filter/storyHelpers"; import { mockInitialRenderResult, @@ -9,12 +15,6 @@ import { } from "components/PaginationWidget/PaginationContainer.mocks"; import type { UsePaginatedQueryResult } from "hooks/usePaginatedQuery"; import type { ComponentProps } from "react"; -import { chromaticWithTablet } from "testHelpers/chromatic"; -import { - MockConnectedSSHConnectionLog, - MockDisconnectedSSHConnectionLog, - MockUserOwner, -} from "testHelpers/entities"; import { ConnectionLogPageView } from "./ConnectionLogPageView"; type FilterProps = ComponentProps["filterProps"]; diff --git a/site/src/pages/ConnectionLogPage/ConnectionLogRow/ConnectionLogDescription/ConnectionLogDescription.stories.tsx b/site/src/pages/ConnectionLogPage/ConnectionLogRow/ConnectionLogDescription/ConnectionLogDescription.stories.tsx index 8c8263e7dbc68..1354960c7894f 100644 --- a/site/src/pages/ConnectionLogPage/ConnectionLogRow/ConnectionLogDescription/ConnectionLogDescription.stories.tsx +++ b/site/src/pages/ConnectionLogPage/ConnectionLogRow/ConnectionLogDescription/ConnectionLogDescription.stories.tsx @@ -1,8 +1,8 @@ -import type { Meta, StoryObj } from "@storybook/react"; import { MockConnectedSSHConnectionLog, MockWebConnectionLog, } from "testHelpers/entities"; +import type { Meta, StoryObj } from "@storybook/react-vite"; import { ConnectionLogDescription } from "./ConnectionLogDescription"; const meta: Meta = { diff --git a/site/src/pages/ConnectionLogPage/ConnectionLogRow/ConnectionLogDescription/ConnectionLogDescription.tsx b/site/src/pages/ConnectionLogPage/ConnectionLogRow/ConnectionLogDescription/ConnectionLogDescription.tsx index b862134624189..fba3a9c20cb27 100644 --- a/site/src/pages/ConnectionLogPage/ConnectionLogRow/ConnectionLogDescription/ConnectionLogDescription.tsx +++ b/site/src/pages/ConnectionLogPage/ConnectionLogRow/ConnectionLogDescription/ConnectionLogDescription.tsx @@ -1,7 +1,7 @@ import Link from "@mui/material/Link"; import type { ConnectionLog } from "api/typesGenerated"; import type { FC, ReactNode } from "react"; -import { Link as RouterLink } from "react-router-dom"; +import { Link as RouterLink } from "react-router"; import { connectionTypeToFriendlyName } from "utils/connection"; interface ConnectionLogDescriptionProps { diff --git a/site/src/pages/ConnectionLogPage/ConnectionLogRow/ConnectionLogRow.stories.tsx b/site/src/pages/ConnectionLogPage/ConnectionLogRow/ConnectionLogRow.stories.tsx index 4e9dd49ed3edf..03833917e5bf4 100644 --- a/site/src/pages/ConnectionLogPage/ConnectionLogRow/ConnectionLogRow.stories.tsx +++ b/site/src/pages/ConnectionLogPage/ConnectionLogRow/ConnectionLogRow.stories.tsx @@ -1,11 +1,11 @@ -import TableContainer from "@mui/material/TableContainer"; -import type { Meta, StoryObj } from "@storybook/react"; -import { Table, TableBody } from "components/Table/Table"; import { MockConnectedSSHConnectionLog, MockDisconnectedSSHConnectionLog, MockWebConnectionLog, } from "testHelpers/entities"; +import TableContainer from "@mui/material/TableContainer"; +import type { Meta, StoryObj } from "@storybook/react-vite"; +import { Table, TableBody } from "components/Table/Table"; import { ConnectionLogRow } from "./ConnectionLogRow"; const meta: Meta = { diff --git a/site/src/pages/ConnectionLogPage/ConnectionLogRow/ConnectionLogRow.tsx b/site/src/pages/ConnectionLogPage/ConnectionLogRow/ConnectionLogRow.tsx index ac847cff73b39..f66afde786e5f 100644 --- a/site/src/pages/ConnectionLogPage/ConnectionLogRow/ConnectionLogRow.tsx +++ b/site/src/pages/ConnectionLogPage/ConnectionLogRow/ConnectionLogRow.tsx @@ -7,10 +7,9 @@ import { Avatar } from "components/Avatar/Avatar"; import { Stack } from "components/Stack/Stack"; import { StatusPill } from "components/StatusPill/StatusPill"; import { TimelineEntry } from "components/Timeline/TimelineEntry"; -import { InfoIcon } from "lucide-react"; -import { NetworkIcon } from "lucide-react"; +import { InfoIcon, NetworkIcon } from "lucide-react"; import type { FC } from "react"; -import { Link as RouterLink } from "react-router-dom"; +import { Link as RouterLink } from "react-router"; import userAgentParser from "ua-parser-js"; import { connectionTypeIsWeb } from "utils/connection"; import { ConnectionLogDescription } from "./ConnectionLogDescription/ConnectionLogDescription"; diff --git a/site/src/pages/CreateTemplateGalleryPage/CreateTemplateGalleryPage.test.tsx b/site/src/pages/CreateTemplateGalleryPage/CreateTemplateGalleryPage.test.tsx index 61cf4d353e053..48f545ea1c3f2 100644 --- a/site/src/pages/CreateTemplateGalleryPage/CreateTemplateGalleryPage.test.tsx +++ b/site/src/pages/CreateTemplateGalleryPage/CreateTemplateGalleryPage.test.tsx @@ -1,13 +1,13 @@ -import { render, screen } from "@testing-library/react"; import { AppProviders } from "App"; -import { RequireAuth } from "contexts/auth/RequireAuth"; -import { http, HttpResponse } from "msw"; -import { RouterProvider, createMemoryRouter } from "react-router-dom"; import { MockTemplateExample, MockTemplateExample2, } from "testHelpers/entities"; import { server } from "testHelpers/server"; +import { render, screen } from "@testing-library/react"; +import { RequireAuth } from "contexts/auth/RequireAuth"; +import { HttpResponse, http } from "msw"; +import { createMemoryRouter, RouterProvider } from "react-router"; import CreateTemplateGalleryPage from "./CreateTemplateGalleryPage"; test("displays the scratch template", async () => { diff --git a/site/src/pages/CreateTemplateGalleryPage/CreateTemplateGalleryPageView.stories.tsx b/site/src/pages/CreateTemplateGalleryPage/CreateTemplateGalleryPageView.stories.tsx index 4db1d58e8e20e..b406daeb932d4 100644 --- a/site/src/pages/CreateTemplateGalleryPage/CreateTemplateGalleryPageView.stories.tsx +++ b/site/src/pages/CreateTemplateGalleryPage/CreateTemplateGalleryPageView.stories.tsx @@ -1,10 +1,10 @@ -import type { Meta, StoryObj } from "@storybook/react"; import { chromatic } from "testHelpers/chromatic"; import { MockTemplateExample, MockTemplateExample2, mockApiError, } from "testHelpers/entities"; +import type { Meta, StoryObj } from "@storybook/react-vite"; import { getTemplatesByTag } from "utils/starterTemplates"; import { CreateTemplateGalleryPageView } from "./CreateTemplateGalleryPageView"; diff --git a/site/src/pages/CreateTemplateGalleryPage/CreateTemplateGalleryPageView.tsx b/site/src/pages/CreateTemplateGalleryPage/CreateTemplateGalleryPageView.tsx index 25258421eaaf2..0ac220d4bcf67 100644 --- a/site/src/pages/CreateTemplateGalleryPage/CreateTemplateGalleryPageView.tsx +++ b/site/src/pages/CreateTemplateGalleryPage/CreateTemplateGalleryPageView.tsx @@ -11,7 +11,7 @@ import { Margins } from "components/Margins/Margins"; import { PageHeader, PageHeaderTitle } from "components/PageHeader/PageHeader"; import { ExternalLinkIcon } from "lucide-react"; import type { FC } from "react"; -import { Link as RouterLink } from "react-router-dom"; +import { Link as RouterLink } from "react-router"; import type { StarterTemplatesByTag } from "utils/starterTemplates"; import { StarterTemplates } from "./StarterTemplates"; diff --git a/site/src/pages/CreateTemplateGalleryPage/StarterTemplates.tsx b/site/src/pages/CreateTemplateGalleryPage/StarterTemplates.tsx index c293fea854abd..6cc78bf83754d 100644 --- a/site/src/pages/CreateTemplateGalleryPage/StarterTemplates.tsx +++ b/site/src/pages/CreateTemplateGalleryPage/StarterTemplates.tsx @@ -3,7 +3,7 @@ import type { TemplateExample } from "api/typesGenerated"; import { Stack } from "components/Stack/Stack"; import { TemplateExampleCard } from "modules/templates/TemplateExampleCard/TemplateExampleCard"; import type { FC } from "react"; -import { Link, useSearchParams } from "react-router-dom"; +import { Link, useSearchParams } from "react-router"; import type { StarterTemplatesByTag } from "utils/starterTemplates"; const getTagLabel = (tag: string) => { diff --git a/site/src/pages/CreateTemplatePage/BuildLogsDrawer.stories.tsx b/site/src/pages/CreateTemplatePage/BuildLogsDrawer.stories.tsx index f2a773c09c099..3febfa23d9314 100644 --- a/site/src/pages/CreateTemplatePage/BuildLogsDrawer.stories.tsx +++ b/site/src/pages/CreateTemplatePage/BuildLogsDrawer.stories.tsx @@ -1,11 +1,11 @@ -import type { Meta, StoryObj } from "@storybook/react"; -import { JobError } from "api/queries/templates"; import { MockProvisionerJob, MockTemplateVersion, MockWorkspaceBuildLogs, } from "testHelpers/entities"; import { withWebSocket } from "testHelpers/storybook"; +import type { Meta, StoryObj } from "@storybook/react-vite"; +import { JobError } from "api/queries/templates"; import { BuildLogsDrawer } from "./BuildLogsDrawer"; const meta: Meta = { diff --git a/site/src/pages/CreateTemplatePage/CreateTemplateForm.stories.tsx b/site/src/pages/CreateTemplatePage/CreateTemplateForm.stories.tsx index de5bba05bb303..17167ef79fdb7 100644 --- a/site/src/pages/CreateTemplatePage/CreateTemplateForm.stories.tsx +++ b/site/src/pages/CreateTemplatePage/CreateTemplateForm.stories.tsx @@ -1,10 +1,3 @@ -import { action } from "@storybook/addon-actions"; -import type { Meta, StoryObj } from "@storybook/react"; -import { screen, userEvent } from "@storybook/test"; -import { - getProvisionerDaemonsKey, - organizationsKey, -} from "api/queries/organizations"; import { MockDefaultOrganization, MockOrganization2, @@ -16,6 +9,13 @@ import { MockTemplateVersionVariable4, MockTemplateVersionVariable5, } from "testHelpers/entities"; +import type { Meta, StoryObj } from "@storybook/react-vite"; +import { + getProvisionerDaemonsKey, + organizationsKey, +} from "api/queries/organizations"; +import { action } from "storybook/actions"; +import { screen, userEvent } from "storybook/test"; import { CreateTemplateForm } from "./CreateTemplateForm"; const meta: Meta = { diff --git a/site/src/pages/CreateTemplatePage/CreateTemplateForm.tsx b/site/src/pages/CreateTemplatePage/CreateTemplateForm.tsx index 33cd89286fe75..ddd967554134b 100644 --- a/site/src/pages/CreateTemplatePage/CreateTemplateForm.tsx +++ b/site/src/pages/CreateTemplatePage/CreateTemplateForm.tsx @@ -29,7 +29,7 @@ import { ProvisionerTagsField } from "modules/provisioners/ProvisionerTagsField" import { SelectedTemplate } from "pages/CreateWorkspacePage/SelectedTemplate"; import { type FC, useState } from "react"; import { useQuery } from "react-query"; -import { useSearchParams } from "react-router-dom"; +import { useSearchParams } from "react-router"; import { docs } from "utils/docs"; import { displayNameValidator, @@ -38,9 +38,9 @@ import { onChangeTrimmed, } from "utils/formUtils"; import { + sortedDays, type TemplateAutostartRequirementDaysValue, type TemplateAutostopRequirementDaysValue, - sortedDays, } from "utils/schedule"; import * as Yup from "yup"; import { TemplateUpload, type TemplateUploadProps } from "./TemplateUpload"; diff --git a/site/src/pages/CreateTemplatePage/CreateTemplatePage.test.tsx b/site/src/pages/CreateTemplatePage/CreateTemplatePage.test.tsx index 2677f67d8df10..e403eab8bcb24 100644 --- a/site/src/pages/CreateTemplatePage/CreateTemplatePage.test.tsx +++ b/site/src/pages/CreateTemplatePage/CreateTemplatePage.test.tsx @@ -1,6 +1,3 @@ -import { fireEvent, screen, waitFor, within } from "@testing-library/react"; -import userEvent from "@testing-library/user-event"; -import { API } from "api/api"; import { MockTemplate, MockTemplateExample, @@ -11,6 +8,9 @@ import { mockApiError, } from "testHelpers/entities"; import { renderWithAuth } from "testHelpers/renderHelpers"; +import { fireEvent, screen, waitFor, within } from "@testing-library/react"; +import userEvent from "@testing-library/user-event"; +import { API } from "api/api"; import CreateTemplatePage from "./CreateTemplatePage"; const renderPage = async (searchParams: URLSearchParams) => { diff --git a/site/src/pages/CreateTemplatePage/CreateTemplatePage.tsx b/site/src/pages/CreateTemplatePage/CreateTemplatePage.tsx index 71d45d2ab148b..af71c1686e40d 100644 --- a/site/src/pages/CreateTemplatePage/CreateTemplatePage.tsx +++ b/site/src/pages/CreateTemplatePage/CreateTemplatePage.tsx @@ -5,13 +5,13 @@ import { linkToTemplate, useLinks } from "modules/navigation"; import { type FC, useRef, useState } from "react"; import { Helmet } from "react-helmet-async"; import { useMutation } from "react-query"; -import { useNavigate, useSearchParams } from "react-router-dom"; +import { useNavigate, useSearchParams } from "react-router"; import { pageTitle } from "utils/page"; import { BuildLogsDrawer } from "./BuildLogsDrawer"; import { DuplicateTemplateView } from "./DuplicateTemplateView"; import { ImportStarterTemplateView } from "./ImportStarterTemplateView"; -import { UploadTemplateView } from "./UploadTemplateView"; import type { CreateTemplatePageViewProps } from "./types"; +import { UploadTemplateView } from "./UploadTemplateView"; const CreateTemplatePage: FC = () => { const navigate = useNavigate(); diff --git a/site/src/pages/CreateTemplatePage/DuplicateTemplateView.tsx b/site/src/pages/CreateTemplatePage/DuplicateTemplateView.tsx index 61410452ea61d..bf9f1c51fe8c1 100644 --- a/site/src/pages/CreateTemplatePage/DuplicateTemplateView.tsx +++ b/site/src/pages/CreateTemplatePage/DuplicateTemplateView.tsx @@ -11,7 +11,7 @@ import { Loader } from "components/Loader/Loader"; import { useDashboard } from "modules/dashboard/useDashboard"; import type { FC } from "react"; import { useQuery } from "react-query"; -import { useNavigate, useSearchParams } from "react-router-dom"; +import { useNavigate, useSearchParams } from "react-router"; import { CreateTemplateForm } from "./CreateTemplateForm"; import type { CreateTemplatePageViewProps } from "./types"; import { firstVersionFromFile, getFormPermissions, newTemplate } from "./utils"; diff --git a/site/src/pages/CreateTemplatePage/ImportStarterTemplateView.tsx b/site/src/pages/CreateTemplatePage/ImportStarterTemplateView.tsx index cfc62e44d0cec..a1c095f6855ac 100644 --- a/site/src/pages/CreateTemplatePage/ImportStarterTemplateView.tsx +++ b/site/src/pages/CreateTemplatePage/ImportStarterTemplateView.tsx @@ -9,7 +9,7 @@ import { Loader } from "components/Loader/Loader"; import { useDashboard } from "modules/dashboard/useDashboard"; import type { FC } from "react"; import { keepPreviousData, useQuery } from "react-query"; -import { useNavigate, useSearchParams } from "react-router-dom"; +import { useNavigate, useSearchParams } from "react-router"; import { CreateTemplateForm } from "./CreateTemplateForm"; import type { CreateTemplatePageViewProps } from "./types"; import { diff --git a/site/src/pages/CreateTemplatePage/TemplateUpload.tsx b/site/src/pages/CreateTemplatePage/TemplateUpload.tsx index 800cab0ce0512..bc0160dca50b9 100644 --- a/site/src/pages/CreateTemplatePage/TemplateUpload.tsx +++ b/site/src/pages/CreateTemplatePage/TemplateUpload.tsx @@ -1,7 +1,7 @@ import Link from "@mui/material/Link"; import { FileUpload } from "components/FileUpload/FileUpload"; import type { FC } from "react"; -import { Link as RouterLink } from "react-router-dom"; +import { Link as RouterLink } from "react-router"; export interface TemplateUploadProps { isUploading: boolean; diff --git a/site/src/pages/CreateTemplatePage/UploadTemplateView.tsx b/site/src/pages/CreateTemplatePage/UploadTemplateView.tsx index 2b5f673c449d8..ccc44d879c489 100644 --- a/site/src/pages/CreateTemplatePage/UploadTemplateView.tsx +++ b/site/src/pages/CreateTemplatePage/UploadTemplateView.tsx @@ -9,7 +9,7 @@ import { displayError } from "components/GlobalSnackbar/utils"; import { useDashboard } from "modules/dashboard/useDashboard"; import type { FC } from "react"; import { useMutation, useQuery } from "react-query"; -import { useNavigate } from "react-router-dom"; +import { useNavigate } from "react-router"; import { CreateTemplateForm } from "./CreateTemplateForm"; import type { CreateTemplatePageViewProps } from "./types"; import { firstVersionFromFile, getFormPermissions, newTemplate } from "./utils"; diff --git a/site/src/pages/CreateTemplatePage/utils.ts b/site/src/pages/CreateTemplatePage/utils.ts index a10c52a70c16a..ab0336ef120e4 100644 --- a/site/src/pages/CreateTemplatePage/utils.ts +++ b/site/src/pages/CreateTemplatePage/utils.ts @@ -18,6 +18,7 @@ export const newTemplate = ( const safeTemplateData = { name: formData.name, max_port_share_level: null, + cors_behavior: null, display_name: formData.display_name, description: formData.description, icon: formData.icon, diff --git a/site/src/pages/CreateTokenPage/CreateTokenForm.tsx b/site/src/pages/CreateTokenPage/CreateTokenForm.tsx index 57d1587e92590..c414adf1672cd 100644 --- a/site/src/pages/CreateTokenPage/CreateTokenForm.tsx +++ b/site/src/pages/CreateTokenPage/CreateTokenForm.tsx @@ -14,14 +14,14 @@ import dayjs from "dayjs"; import utc from "dayjs/plugin/utc"; import type { FormikContextType } from "formik"; import { type FC, useEffect, useState } from "react"; -import { useNavigate } from "react-router-dom"; +import { useNavigate } from "react-router"; import { getFormHelpers, onChangeTrimmed } from "utils/formUtils"; import { type CreateTokenData, - NANO_HOUR, customLifetimeDay, determineDefaultLtValue, filterByMaxTokenLifetime, + NANO_HOUR, } from "./utils"; dayjs.extend(utc); @@ -80,15 +80,21 @@ export const CreateTokenForm: FC = ({ + The token will expire on{" "} + + {dayjs() + .add(form.values.lifetime, "days") + .utc() + .format("MMMM DD, YYYY")} + + + ) : ( + "Please set a token expiration." + ) } classes={{ sectionInfo: classNames.sectionInfo }} > diff --git a/site/src/pages/CreateTokenPage/CreateTokenPage.stories.tsx b/site/src/pages/CreateTokenPage/CreateTokenPage.stories.tsx index 2bca00577dac3..8885dc584180e 100644 --- a/site/src/pages/CreateTokenPage/CreateTokenPage.stories.tsx +++ b/site/src/pages/CreateTokenPage/CreateTokenPage.stories.tsx @@ -1,4 +1,4 @@ -import type { Meta, StoryObj } from "@storybook/react"; +import type { Meta, StoryObj } from "@storybook/react-vite"; import CreateTokenPage from "./CreateTokenPage"; const meta: Meta = { diff --git a/site/src/pages/CreateTokenPage/CreateTokenPage.test.tsx b/site/src/pages/CreateTokenPage/CreateTokenPage.test.tsx index 59bda3d458014..042b09bf24dff 100644 --- a/site/src/pages/CreateTokenPage/CreateTokenPage.test.tsx +++ b/site/src/pages/CreateTokenPage/CreateTokenPage.test.tsx @@ -1,10 +1,10 @@ -import { screen, within } from "@testing-library/react"; -import userEvent from "@testing-library/user-event"; -import { API } from "api/api"; import { renderWithAuth, waitForLoaderToBeRemoved, } from "testHelpers/renderHelpers"; +import { screen, within } from "@testing-library/react"; +import userEvent from "@testing-library/user-event"; +import { API } from "api/api"; import CreateTokenPage from "./CreateTokenPage"; describe("TokenPage", () => { diff --git a/site/src/pages/CreateTokenPage/CreateTokenPage.tsx b/site/src/pages/CreateTokenPage/CreateTokenPage.tsx index 57e68600e0bf8..f80e152a58bbe 100644 --- a/site/src/pages/CreateTokenPage/CreateTokenPage.tsx +++ b/site/src/pages/CreateTokenPage/CreateTokenPage.tsx @@ -9,7 +9,7 @@ import { useFormik } from "formik"; import { type FC, useState } from "react"; import { Helmet } from "react-helmet-async"; import { useMutation, useQuery } from "react-query"; -import { useNavigate } from "react-router-dom"; +import { useNavigate } from "react-router"; import { pageTitle } from "utils/page"; import { CreateTokenForm } from "./CreateTokenForm"; import { type CreateTokenData, NANO_HOUR } from "./utils"; diff --git a/site/src/pages/CreateTokenPage/utils.test.tsx b/site/src/pages/CreateTokenPage/utils.test.tsx index a8cfbbd855e96..b09e72a812f7d 100644 --- a/site/src/pages/CreateTokenPage/utils.test.tsx +++ b/site/src/pages/CreateTokenPage/utils.test.tsx @@ -1,9 +1,9 @@ import { - type LifetimeDay, - NANO_HOUR, determineDefaultLtValue, filterByMaxTokenLifetime, + type LifetimeDay, lifetimeDayPresets, + NANO_HOUR, } from "./utils"; describe("unit/CreateTokenForm", () => { diff --git a/site/src/pages/CreateUserPage/CreateUserForm.stories.tsx b/site/src/pages/CreateUserPage/CreateUserForm.stories.tsx index f836a7bde8fc7..d112fbae47966 100644 --- a/site/src/pages/CreateUserPage/CreateUserForm.stories.tsx +++ b/site/src/pages/CreateUserPage/CreateUserForm.stories.tsx @@ -1,13 +1,13 @@ -import { action } from "@storybook/addon-actions"; -import type { Meta, StoryObj } from "@storybook/react"; -import { userEvent, within } from "@storybook/test"; -import { organizationsKey } from "api/queries/organizations"; -import type { Organization } from "api/typesGenerated"; import { MockOrganization, MockOrganization2, mockApiError, } from "testHelpers/entities"; +import type { Meta, StoryObj } from "@storybook/react-vite"; +import { organizationsKey } from "api/queries/organizations"; +import type { Organization } from "api/typesGenerated"; +import { action } from "storybook/actions"; +import { userEvent, within } from "storybook/test"; import { CreateUserForm } from "./CreateUserForm"; const meta: Meta = { diff --git a/site/src/pages/CreateUserPage/CreateUserPage.test.tsx b/site/src/pages/CreateUserPage/CreateUserPage.test.tsx index b1044630d798b..271376b3a28a8 100644 --- a/site/src/pages/CreateUserPage/CreateUserPage.test.tsx +++ b/site/src/pages/CreateUserPage/CreateUserPage.test.tsx @@ -1,9 +1,9 @@ -import { fireEvent, screen } from "@testing-library/react"; -import userEvent from "@testing-library/user-event"; import { renderWithAuth, waitForLoaderToBeRemoved, } from "testHelpers/renderHelpers"; +import { fireEvent, screen } from "@testing-library/react"; +import userEvent from "@testing-library/user-event"; import CreateUserPage from "./CreateUserPage"; import { Language as FormLanguage } from "./Language"; diff --git a/site/src/pages/CreateUserPage/CreateUserPage.tsx b/site/src/pages/CreateUserPage/CreateUserPage.tsx index a90059fea6410..9c47a7c1f0337 100644 --- a/site/src/pages/CreateUserPage/CreateUserPage.tsx +++ b/site/src/pages/CreateUserPage/CreateUserPage.tsx @@ -5,11 +5,11 @@ import { useDashboard } from "modules/dashboard/useDashboard"; import type { FC } from "react"; import { Helmet } from "react-helmet-async"; import { useMutation, useQuery, useQueryClient } from "react-query"; -import { useNavigate } from "react-router-dom"; +import { useNavigate } from "react-router"; import { pageTitle } from "utils/page"; import { CreateUserForm } from "./CreateUserForm"; -const Language = { +const _Language = { unknownError: "Oops, an unknown error occurred.", }; diff --git a/site/src/pages/CreateWorkspacePage/CreateWorkspaceExperimentRouter.tsx b/site/src/pages/CreateWorkspacePage/CreateWorkspaceExperimentRouter.tsx index a0dd3dbf715c4..601bf77ca951e 100644 --- a/site/src/pages/CreateWorkspacePage/CreateWorkspaceExperimentRouter.tsx +++ b/site/src/pages/CreateWorkspacePage/CreateWorkspaceExperimentRouter.tsx @@ -3,7 +3,7 @@ import { ErrorAlert } from "components/Alert/ErrorAlert"; import { Loader } from "components/Loader/Loader"; import type { FC } from "react"; import { useQuery } from "react-query"; -import { useParams } from "react-router-dom"; +import { useParams } from "react-router"; import CreateWorkspacePage from "./CreateWorkspacePage"; import CreateWorkspacePageExperimental from "./CreateWorkspacePageExperimental"; diff --git a/site/src/pages/CreateWorkspacePage/CreateWorkspacePage.test.tsx b/site/src/pages/CreateWorkspacePage/CreateWorkspacePage.test.tsx index 868aa85c751bd..5199854cface6 100644 --- a/site/src/pages/CreateWorkspacePage/CreateWorkspacePage.test.tsx +++ b/site/src/pages/CreateWorkspacePage/CreateWorkspacePage.test.tsx @@ -1,6 +1,3 @@ -import { fireEvent, screen, waitFor } from "@testing-library/react"; -import userEvent from "@testing-library/user-event"; -import { API } from "api/api"; import { MockTemplate, MockTemplateVersionExternalAuthGithub, @@ -18,6 +15,9 @@ import { renderWithAuth, waitForLoaderToBeRemoved, } from "testHelpers/renderHelpers"; +import { fireEvent, screen, waitFor } from "@testing-library/react"; +import userEvent from "@testing-library/user-event"; +import { API } from "api/api"; import CreateWorkspacePage from "./CreateWorkspacePage"; import { Language } from "./CreateWorkspacePageView"; diff --git a/site/src/pages/CreateWorkspacePage/CreateWorkspacePage.tsx b/site/src/pages/CreateWorkspacePage/CreateWorkspacePage.tsx index 6d057a73d1a50..c3685f9735cbb 100644 --- a/site/src/pages/CreateWorkspacePage/CreateWorkspacePage.tsx +++ b/site/src/pages/CreateWorkspacePage/CreateWorkspacePage.tsx @@ -22,7 +22,7 @@ import { generateWorkspaceName } from "modules/workspaces/generateWorkspaceName" import { type FC, useCallback, useEffect, useRef, useState } from "react"; import { Helmet } from "react-helmet-async"; import { useMutation, useQuery, useQueryClient } from "react-query"; -import { useNavigate, useParams, useSearchParams } from "react-router-dom"; +import { useNavigate, useParams, useSearchParams } from "react-router"; import { pageTitle } from "utils/page"; import type { AutofillBuildParameter } from "utils/richParameters"; import { paramsUsedToCreateWorkspace } from "utils/workspace"; @@ -72,6 +72,20 @@ const CreateWorkspacePage: FC = () => { }), enabled: !!templateQuery.data, }); + const templatePermissionsQuery = useQuery({ + ...checkAuthorization({ + checks: { + canUpdateTemplate: { + object: { + resource_type: "template", + resource_id: templateQuery.data?.id ?? "", + }, + action: "update", + }, + }, + }), + enabled: !!templateQuery.data, + }); const realizedVersionId = customVersionId ?? templateQuery.data?.active_version_id; const organizationId = templateQuery.data?.organization_id; @@ -93,9 +107,13 @@ const CreateWorkspacePage: FC = () => { const isLoadingFormData = templateQuery.isLoading || permissionsQuery.isLoading || + templatePermissionsQuery.isLoading || richParametersQuery.isLoading; const loadFormDataError = - templateQuery.error ?? permissionsQuery.error ?? richParametersQuery.error; + templateQuery.error ?? + permissionsQuery.error ?? + templatePermissionsQuery.error ?? + richParametersQuery.error; const title = autoCreateWorkspaceMutation.isPending ? "Creating workspace..." @@ -211,7 +229,9 @@ const CreateWorkspacePage: FC = () => { startPollingExternalAuth={startPollingExternalAuth} hasAllRequiredExternalAuth={hasAllRequiredExternalAuth} permissions={permissionsQuery.data as CreateWorkspacePermissions} - canUpdateTemplate={permissionsQuery.data?.canUpdateTemplate} + templatePermissions={ + templatePermissionsQuery.data as { canUpdateTemplate: boolean } + } parameters={realizedParameters as TemplateVersionParameter[]} presets={templateVersionPresetsQuery.data ?? []} creatingWorkspace={createWorkspaceMutation.isPending} diff --git a/site/src/pages/CreateWorkspacePage/CreateWorkspacePageExperimental.test.tsx b/site/src/pages/CreateWorkspacePage/CreateWorkspacePageExperimental.test.tsx new file mode 100644 index 0000000000000..b60c3ca3e7c7f --- /dev/null +++ b/site/src/pages/CreateWorkspacePage/CreateWorkspacePageExperimental.test.tsx @@ -0,0 +1,600 @@ +import { + MockDropdownParameter, + MockDynamicParametersResponse, + MockDynamicParametersResponseWithError, + MockPermissions, + MockSliderParameter, + MockTemplate, + MockTemplateVersionExternalAuthGithub, + MockTemplateVersionExternalAuthGithubAuthenticated, + MockUserOwner, + MockValidationParameter, + MockWorkspace, +} from "testHelpers/entities"; +import { + renderWithAuth, + waitForLoaderToBeRemoved, +} from "testHelpers/renderHelpers"; +import { createMockWebSocket } from "testHelpers/websockets"; +import { screen, waitFor } from "@testing-library/react"; +import userEvent from "@testing-library/user-event"; +import { API } from "api/api"; +import type { DynamicParametersResponse } from "api/typesGenerated"; +import CreateWorkspacePageExperimental from "./CreateWorkspacePageExperimental"; + +describe("CreateWorkspacePageExperimental", () => { + const renderCreateWorkspacePageExperimental = ( + route = `/templates/${MockTemplate.name}/workspace`, + ) => { + return renderWithAuth(, { + route, + path: "/templates/:template/workspace", + extraRoutes: [ + { + path: "/:username/:workspace", + element:
    Workspace Page
    , + }, + ], + }); + }; + + beforeEach(() => { + jest.clearAllMocks(); + + jest.spyOn(API, "getTemplate").mockResolvedValue(MockTemplate); + jest.spyOn(API, "getTemplateVersionExternalAuth").mockResolvedValue([]); + jest.spyOn(API, "getTemplateVersionPresets").mockResolvedValue([]); + jest.spyOn(API, "createWorkspace").mockResolvedValue(MockWorkspace); + jest.spyOn(API, "checkAuthorization").mockResolvedValue(MockPermissions); + + jest + .spyOn(API, "templateVersionDynamicParameters") + .mockImplementation((_versionId, _ownerId, callbacks) => { + const [mockWebSocket, publisher] = createMockWebSocket("ws://test"); + + mockWebSocket.addEventListener("message", (event) => { + callbacks.onMessage(JSON.parse(event.data)); + }); + mockWebSocket.addEventListener("error", () => { + callbacks.onError( + new Error("Connection for dynamic parameters failed."), + ); + }); + mockWebSocket.addEventListener("close", () => { + callbacks.onClose(); + }); + + publisher.publishOpen(new Event("open")); + publisher.publishMessage( + new MessageEvent("message", { + data: JSON.stringify(MockDynamicParametersResponse), + }), + ); + + return mockWebSocket; + }); + }); + + afterEach(() => { + jest.restoreAllMocks(); + }); + + describe("WebSocket Integration", () => { + it("establishes WebSocket connection and receives initial parameters", async () => { + renderCreateWorkspacePageExperimental(); + + await waitForLoaderToBeRemoved(); + + expect(API.templateVersionDynamicParameters).toHaveBeenCalledWith( + MockTemplate.active_version_id, + MockUserOwner.id, + expect.objectContaining({ + onMessage: expect.any(Function), + onError: expect.any(Function), + onClose: expect.any(Function), + }), + ); + + await waitFor(() => { + expect(screen.getByText(/instance type/i)).toBeInTheDocument(); + expect(screen.getByText("CPU Count")).toBeInTheDocument(); + expect(screen.getByText("Enable Monitoring")).toBeInTheDocument(); + expect(screen.getByText("Tags")).toBeInTheDocument(); + }); + }); + + it("sends parameter updates via WebSocket when form values change", async () => { + const [mockWebSocket, publisher] = createMockWebSocket("ws://test"); + + jest + .spyOn(API, "templateVersionDynamicParameters") + .mockImplementation((_versionId, _ownerId, callbacks) => { + mockWebSocket.addEventListener("message", (event) => { + callbacks.onMessage(JSON.parse(event.data)); + }); + mockWebSocket.addEventListener("error", () => { + callbacks.onError( + new Error("Connection for dynamic parameters failed."), + ); + }); + mockWebSocket.addEventListener("close", () => { + callbacks.onClose(); + }); + + publisher.publishOpen(new Event("open")); + publisher.publishMessage( + new MessageEvent("message", { + data: JSON.stringify(MockDynamicParametersResponse), + }), + ); + + return mockWebSocket; + }); + + renderCreateWorkspacePageExperimental(); + await waitForLoaderToBeRemoved(); + + expect(screen.getByText(/instance type/i)).toBeInTheDocument(); + + const instanceTypeSelect = screen.getByRole("button", { + name: /instance type/i, + }); + expect(instanceTypeSelect).toBeInTheDocument(); + + await waitFor(async () => { + await userEvent.click(instanceTypeSelect); + }); + + let mediumOption: Element | null = null; + await waitFor(() => { + mediumOption = screen.queryByRole("option", { name: /t3\.medium/i }); + expect(mediumOption).toBeTruthy(); + }); + + await waitFor(async () => { + await userEvent.click(mediumOption!); + }); + + expect(mockWebSocket.send).toHaveBeenCalledWith( + expect.stringContaining('"instance_type":"t3.medium"'), + ); + }); + + it("handles WebSocket error gracefully", async () => { + const [mockWebSocket, mockPublisher] = createMockWebSocket("ws://test"); + + jest + .spyOn(API, "templateVersionDynamicParameters") + .mockImplementation((_versionId, _ownerId, callbacks) => { + mockWebSocket.addEventListener("error", () => { + callbacks.onError(new Error("Connection failed")); + }); + + return mockWebSocket; + }); + + renderCreateWorkspacePageExperimental(); + + await waitFor(() => { + expect(mockPublisher).toBeDefined(); + mockPublisher.publishError(new Event("Connection failed")); + expect(screen.getByText(/connection failed/i)).toBeInTheDocument(); + }); + }); + + it("handles WebSocket close event", async () => { + const [mockWebSocket, mockPublisher] = createMockWebSocket("ws://test"); + + jest + .spyOn(API, "templateVersionDynamicParameters") + .mockImplementation((_versionId, _ownerId, callbacks) => { + mockWebSocket.addEventListener("close", () => { + callbacks.onClose(); + }); + + return mockWebSocket; + }); + + renderCreateWorkspacePageExperimental(); + + await waitFor(() => { + expect(mockPublisher).toBeDefined(); + mockPublisher.publishClose(new Event("close") as CloseEvent); + expect( + screen.getByText(/websocket connection.*unexpectedly closed/i), + ).toBeInTheDocument(); + }); + }); + + it("only parameters from latest response are displayed", async () => { + const [mockWebSocket, mockPublisher] = createMockWebSocket("ws://test"); + jest + .spyOn(API, "templateVersionDynamicParameters") + .mockImplementation((_versionId, _ownerId, callbacks) => { + mockWebSocket.addEventListener("message", (event) => { + callbacks.onMessage(JSON.parse(event.data)); + }); + + mockPublisher.publishOpen(new Event("open")); + mockPublisher.publishMessage( + new MessageEvent("message", { + data: JSON.stringify({ + id: 0, + parameters: [MockDropdownParameter], + diagnostics: [], + }), + }), + ); + + return mockWebSocket; + }); + + renderCreateWorkspacePageExperimental(); + await waitForLoaderToBeRemoved(); + + const response1: DynamicParametersResponse = { + id: 1, + parameters: [MockDropdownParameter], + diagnostics: [], + }; + const response2: DynamicParametersResponse = { + id: 4, + parameters: [MockSliderParameter], + diagnostics: [], + }; + + await waitFor(() => { + mockPublisher.publishMessage( + new MessageEvent("message", { data: JSON.stringify(response1) }), + ); + + mockPublisher.publishMessage( + new MessageEvent("message", { data: JSON.stringify(response2) }), + ); + }); + + expect(screen.queryByText("CPU Count")).toBeInTheDocument(); + expect(screen.queryByText("Instance Type")).not.toBeInTheDocument(); + }); + }); + + describe("Dynamic Parameter Types", () => { + it("displays parameter validation errors", async () => { + jest + .spyOn(API, "templateVersionDynamicParameters") + .mockImplementation((_versionId, _ownerId, callbacks) => { + const [mockWebSocket, publisher] = createMockWebSocket("ws://test"); + + mockWebSocket.addEventListener("message", (event) => { + callbacks.onMessage(JSON.parse(event.data)); + }); + + publisher.publishMessage( + new MessageEvent("message", { + data: JSON.stringify(MockDynamicParametersResponseWithError), + }), + ); + + return mockWebSocket; + }); + + renderCreateWorkspacePageExperimental(); + await waitForLoaderToBeRemoved(); + + await waitFor(() => { + expect(screen.getByText("Validation failed")).toBeInTheDocument(); + expect( + screen.getByText( + "The selected instance type is not available in this region", + ), + ).toBeInTheDocument(); + }); + }); + + it("displays parameter validation errors for min/max constraints", async () => { + const mockResponseInitial: DynamicParametersResponse = { + id: 1, + parameters: [MockValidationParameter], + diagnostics: [], + }; + + const mockResponseWithError: DynamicParametersResponse = { + id: 2, + parameters: [ + { + ...MockValidationParameter, + value: { value: "200", valid: false }, + diagnostics: [ + { + severity: "error", + summary: + "Invalid parameter value according to 'validation' block", + detail: "value 200 is more than the maximum 100", + extra: { + code: "", + }, + }, + ], + }, + ], + diagnostics: [], + }; + + jest + .spyOn(API, "templateVersionDynamicParameters") + .mockImplementation((_versionId, _ownerId, callbacks) => { + const [mockWebSocket, publisher] = createMockWebSocket("ws://test"); + + mockWebSocket.addEventListener("message", (event) => { + callbacks.onMessage(JSON.parse(event.data)); + }); + + publisher.publishOpen(new Event("open")); + + publisher.publishMessage( + new MessageEvent("message", { + data: JSON.stringify(mockResponseInitial), + }), + ); + + const originalSend = mockWebSocket.send; + mockWebSocket.send = jest.fn((data) => { + originalSend.call(mockWebSocket, data); + + if (typeof data === "string" && data.includes('"200"')) { + publisher.publishMessage( + new MessageEvent("message", { + data: JSON.stringify(mockResponseWithError), + }), + ); + } + }); + + return mockWebSocket; + }); + + renderCreateWorkspacePageExperimental(); + await waitForLoaderToBeRemoved(); + + await waitFor(() => { + expect(screen.getByText("Invalid Parameter")).toBeInTheDocument(); + }); + + const numberInput = screen.getByDisplayValue("50"); + expect(numberInput).toBeInTheDocument(); + + await waitFor(async () => { + await userEvent.clear(numberInput); + await userEvent.type(numberInput, "200"); + }); + + await waitFor(() => { + expect(screen.getByDisplayValue("200")).toBeInTheDocument(); + }); + + await waitFor(() => { + expect( + screen.getByText( + "Invalid parameter value according to 'validation' block", + ), + ).toBeInTheDocument(); + }); + + await waitFor(() => { + expect( + screen.getByText("value 200 is more than the maximum 100"), + ).toBeInTheDocument(); + }); + + const errorElement = screen.getByText( + "value 200 is more than the maximum 100", + ); + expect(errorElement.closest("div")).toHaveClass( + "text-content-destructive", + ); + }); + }); + + describe("External Authentication", () => { + it("displays external auth providers", async () => { + jest + .spyOn(API, "getTemplateVersionExternalAuth") + .mockResolvedValue([MockTemplateVersionExternalAuthGithub]); + + renderCreateWorkspacePageExperimental(); + await waitForLoaderToBeRemoved(); + + await waitFor(() => { + expect(screen.getByText("GitHub")).toBeInTheDocument(); + expect( + screen.getByRole("button", { name: /login with github/i }), + ).toBeInTheDocument(); + }); + }); + + it("shows authenticated state for connected providers", async () => { + jest + .spyOn(API, "getTemplateVersionExternalAuth") + .mockResolvedValue([ + MockTemplateVersionExternalAuthGithubAuthenticated, + ]); + + renderCreateWorkspacePageExperimental(); + await waitForLoaderToBeRemoved(); + + await waitFor(() => { + expect(screen.getByText("GitHub")).toBeInTheDocument(); + expect(screen.getByText(/authenticated/i)).toBeInTheDocument(); + }); + }); + + it("prevents auto-creation when required external auth is missing", async () => { + jest + .spyOn(API, "getTemplateVersionExternalAuth") + .mockResolvedValue([MockTemplateVersionExternalAuthGithub]); + + renderCreateWorkspacePageExperimental( + `/templates/${MockTemplate.name}/workspace?mode=auto`, + ); + await waitForLoaderToBeRemoved(); + + await waitFor(() => { + expect( + screen.getByText( + /external authentication providers that are not connected/i, + ), + ).toBeInTheDocument(); + expect( + screen.getByText(/auto-creation has been disabled/i), + ).toBeInTheDocument(); + }); + }); + }); + + describe("Auto-creation Mode", () => { + it("falls back to form mode when auto-creation fails", async () => { + jest + .spyOn(API, "getTemplateVersionExternalAuth") + .mockResolvedValue([ + MockTemplateVersionExternalAuthGithubAuthenticated, + ]); + jest + .spyOn(API, "createWorkspace") + .mockRejectedValue(new Error("Auto-creation failed")); + + renderCreateWorkspacePageExperimental( + `/templates/${MockTemplate.name}/workspace?mode=auto`, + ); + + await waitForLoaderToBeRemoved(); + + expect(screen.getByText(/instance type/i)).toBeInTheDocument(); + + await waitFor(() => { + expect(screen.getByText("Create workspace")).toBeInTheDocument(); + expect( + screen.getByRole("button", { name: /create workspace/i }), + ).toBeInTheDocument(); + }); + }); + }); + + describe("Form Submission", () => { + it("creates workspace with correct parameters", async () => { + renderCreateWorkspacePageExperimental(); + await waitForLoaderToBeRemoved(); + + expect(screen.getByText(/instance type/i)).toBeInTheDocument(); + + const nameInput = screen.getByRole("textbox", { + name: /workspace name/i, + }); + await waitFor(async () => { + await userEvent.clear(nameInput); + await userEvent.type(nameInput, "my-test-workspace"); + }); + + const createButton = screen.getByRole("button", { + name: /create workspace/i, + }); + await waitFor(async () => { + await userEvent.click(createButton); + }); + + await waitFor(() => { + expect(API.createWorkspace).toHaveBeenCalledWith( + "test-user", + expect.objectContaining({ + name: "my-test-workspace", + template_version_id: MockTemplate.active_version_id, + template_id: undefined, + rich_parameter_values: [ + expect.objectContaining({ name: "instance_type", value: "" }), + expect.objectContaining({ name: "cpu_count", value: "2" }), + expect.objectContaining({ + name: "enable_monitoring", + value: "true", + }), + expect.objectContaining({ name: "tags", value: "[]" }), + expect.objectContaining({ name: "ides", value: "[]" }), + ], + }), + ); + }); + }); + }); + + describe("URL Parameters", () => { + it("pre-fills parameters from URL", async () => { + renderCreateWorkspacePageExperimental( + `/templates/${MockTemplate.name}/workspace?param.instance_type=t3.large¶m.cpu_count=4`, + ); + await waitForLoaderToBeRemoved(); + + expect(screen.getByText(/instance type/i)).toBeInTheDocument(); + expect(screen.getByText("CPU Count")).toBeInTheDocument(); + }); + + it("uses custom template version when specified", async () => { + const customVersionId = "custom-version-123"; + + renderCreateWorkspacePageExperimental( + `/templates/${MockTemplate.name}/workspace?version=${customVersionId}`, + ); + + await waitFor(() => { + expect(API.templateVersionDynamicParameters).toHaveBeenCalledWith( + customVersionId, + MockUserOwner.id, + expect.any(Object), + ); + }); + }); + + it("pre-fills workspace name from URL", async () => { + const workspaceName = "my-custom-workspace"; + + renderCreateWorkspacePageExperimental( + `/templates/${MockTemplate.name}/workspace?name=${workspaceName}`, + ); + await waitForLoaderToBeRemoved(); + + await waitFor(() => { + const nameInput = screen.getByRole("textbox", { + name: /workspace name/i, + }); + expect(nameInput).toHaveValue(workspaceName); + }); + }); + }); + + describe("Navigation", () => { + it("navigates to workspace after successful creation", async () => { + const { router } = renderCreateWorkspacePageExperimental(); + await waitForLoaderToBeRemoved(); + + const nameInput = screen.getByRole("textbox", { + name: /workspace name/i, + }); + + await waitFor(async () => { + await userEvent.clear(nameInput); + await userEvent.type(nameInput, "my-test-workspace"); + }); + + // Submit form + const createButton = screen.getByRole("button", { + name: /create workspace/i, + }); + await waitFor(async () => { + await userEvent.click(createButton); + }); + + await waitFor(() => { + expect(router.state.location.pathname).toBe( + `/@${MockWorkspace.owner_name}/${MockWorkspace.name}`, + ); + }); + }); + }); +}); diff --git a/site/src/pages/CreateWorkspacePage/CreateWorkspacePageExperimental.tsx b/site/src/pages/CreateWorkspacePage/CreateWorkspacePageExperimental.tsx index b69ef084a77f7..588606f527dc4 100644 --- a/site/src/pages/CreateWorkspacePage/CreateWorkspacePageExperimental.tsx +++ b/site/src/pages/CreateWorkspacePage/CreateWorkspacePageExperimental.tsx @@ -28,7 +28,7 @@ import { } from "react"; import { Helmet } from "react-helmet-async"; import { useMutation, useQuery, useQueryClient } from "react-query"; -import { useNavigate, useParams, useSearchParams } from "react-router-dom"; +import { useNavigate, useParams, useSearchParams } from "react-router"; import { pageTitle } from "utils/page"; import type { AutofillBuildParameter } from "utils/richParameters"; import { CreateWorkspacePageViewExperimental } from "./CreateWorkspacePageViewExperimental"; @@ -274,16 +274,19 @@ const CreateWorkspacePageExperimental: FC = () => { return [...latestResponse.parameters].sort((a, b) => a.order - b.order); }, [latestResponse?.parameters]); + const shouldShowLoader = + !templateQuery.data || + isLoadingFormData || + isLoadingExternalAuth || + autoCreateReady || + (!latestResponse && !wsError); + return ( <> {pageTitle(title)} - {!latestResponse || - !templateQuery.data || - isLoadingFormData || - isLoadingExternalAuth || - autoCreateReady ? ( + {shouldShowLoader ? ( ) : ( = { @@ -31,7 +33,9 @@ const meta: Meta = { canUpdateTemplate: false, }, onCancel: action("onCancel"), + templatePermissions: { canUpdateTemplate: true }, }, + decorators: [withDashboardProvider], }; export default meta; @@ -126,6 +130,8 @@ export const PresetsButNoneSelected: Story = { { ID: "preset-1", Name: "Preset 1", + Description: "Preset 1 description", + Icon: "/emojis/0031-fe0f-20e3.png", Default: false, Parameters: [ { @@ -138,6 +144,8 @@ export const PresetsButNoneSelected: Story = { { ID: "preset-2", Name: "Preset 2", + Description: "Preset 2 description", + Icon: "/emojis/0032-fe0f-20e3.png", Default: false, Parameters: [ { @@ -157,21 +165,12 @@ export const PresetsButNoneSelected: Story = { }; export const PresetSelected: Story = { - args: PresetsButNoneSelected.args, - play: async ({ canvasElement }) => { - const canvas = within(canvasElement); - await userEvent.click(canvas.getByLabelText("Preset")); - await userEvent.click(canvas.getByText("Preset 1")); - }, -}; - -export const PresetSelectedWithHiddenParameters: Story = { args: PresetsButNoneSelected.args, play: async ({ canvasElement }) => { const canvas = within(canvasElement); // Select a preset - await userEvent.click(canvas.getByLabelText("Preset")); - await userEvent.click(canvas.getByText("Preset 1")); + await userEvent.click(canvas.getByRole("button", { name: "None" })); + await userEvent.click(screen.getByText("Preset 1")); }, }; @@ -180,8 +179,8 @@ export const PresetSelectedWithVisibleParameters: Story = { play: async ({ canvasElement }) => { const canvas = within(canvasElement); // Select a preset - await userEvent.click(canvas.getByLabelText("Preset")); - await userEvent.click(canvas.getByText("Preset 1")); + await userEvent.click(canvas.getByRole("button", { name: "None" })); + await userEvent.click(screen.getByText("Preset 1")); // Toggle off the show preset parameters switch await userEvent.click(canvas.getByLabelText("Show preset parameters")); }, @@ -193,16 +192,12 @@ export const PresetReselected: Story = { const canvas = within(canvasElement); // First selection of Preset 1 - await userEvent.click(canvas.getByLabelText("Preset")); - await userEvent.click( - canvas.getByText("Preset 1", { selector: ".MuiMenuItem-root" }), - ); + await userEvent.click(canvas.getByRole("button", { name: "None" })); + await userEvent.click(screen.getByText("Preset 1")); // Reselect the same preset - await userEvent.click(canvas.getByLabelText("Preset")); - await userEvent.click( - canvas.getByText("Preset 1", { selector: ".MuiMenuItem-root" }), - ); + await userEvent.click(canvas.getByRole("button", { name: "Preset 1" })); + await userEvent.click(canvas.getByText("Preset 1")); }, }; @@ -210,11 +205,6 @@ export const PresetNoneSelected: Story = { args: { ...PresetsButNoneSelected.args, onSubmit: (request, owner) => { - // Assert that template_version_preset_id is not present in the request - console.assert( - !("template_version_preset_id" in request), - 'template_version_preset_id should not be present when "None" is selected', - ); action("onSubmit")(request, owner); }, }, @@ -222,12 +212,11 @@ export const PresetNoneSelected: Story = { const canvas = within(canvasElement); // First select a preset to set the field value - await userEvent.click(canvas.getByLabelText("Preset")); - await userEvent.click(canvas.getByText("Preset 1")); + await userEvent.click(canvas.getByRole("button", { name: "None" })); + await userEvent.click(screen.getByText("Preset 1")); // Then select "None" to unset the field value - await userEvent.click(canvas.getByLabelText("Preset")); - await userEvent.click(canvas.getByText("None")); + await userEvent.click(screen.getByText("None")); // Fill in required fields and submit to test the API call await userEvent.type( @@ -252,6 +241,8 @@ export const PresetsWithDefault: Story = { { ID: "preset-1", Name: "Preset 1", + Description: "Preset 1 description", + Icon: "/emojis/0031-fe0f-20e3.png", Default: false, Parameters: [ { @@ -264,6 +255,8 @@ export const PresetsWithDefault: Story = { { ID: "preset-2", Name: "Preset 2", + Description: "Preset 2 description", + Icon: "/emojis/0032-fe0f-20e3.png", Default: true, Parameters: [ { @@ -282,6 +275,10 @@ export const PresetsWithDefault: Story = { }, play: async ({ canvasElement }) => { const canvas = within(canvasElement); + // Should have the default preset listed first + await waitFor(() => + expect(canvas.getByRole("button", { name: "Preset 2 (Default)" })), + ); // Wait for the switch to be available since preset parameters are populated asynchronously await canvas.findByLabelText("Show preset parameters"); // Toggle off the show preset parameters switch diff --git a/site/src/pages/CreateWorkspacePage/CreateWorkspacePageView.tsx b/site/src/pages/CreateWorkspacePage/CreateWorkspacePageView.tsx index ceac49988c0a5..2f15f0e097a08 100644 --- a/site/src/pages/CreateWorkspacePage/CreateWorkspacePageView.tsx +++ b/site/src/pages/CreateWorkspacePage/CreateWorkspacePageView.tsx @@ -6,7 +6,7 @@ import { Alert } from "components/Alert/Alert"; import { ErrorAlert } from "components/Alert/ErrorAlert"; import { Avatar } from "components/Avatar/Avatar"; import { Button } from "components/Button/Button"; -import { SelectFilter } from "components/Filter/SelectFilter"; +import { Combobox } from "components/Combobox/Combobox"; import { FormFields, FormFooter, @@ -28,9 +28,11 @@ import { UserAutocomplete } from "components/UserAutocomplete/UserAutocomplete"; import { type FormikContextType, useFormik } from "formik"; import type { ExternalAuthPollingState } from "hooks/useExternalAuth"; import { ExternalLinkIcon } from "lucide-react"; +import { linkToTemplate, useLinks } from "modules/navigation"; +import { ClassicParameterFlowDeprecationWarning } from "modules/workspaces/ClassicParameterFlowDeprecationWarning/ClassicParameterFlowDeprecationWarning"; import { generateWorkspaceName } from "modules/workspaces/generateWorkspaceName"; import { type FC, useCallback, useEffect, useMemo, useState } from "react"; -import { Link } from "react-router-dom"; +import { Link } from "react-router"; import { getFormHelpers, nameValidator, @@ -68,6 +70,7 @@ interface CreateWorkspacePageViewProps { autofillParameters: AutofillBuildParameter[]; presets: TypesGen.Preset[]; permissions: CreateWorkspacePermissions; + templatePermissions: { canUpdateTemplate: boolean }; creatingWorkspace: boolean; canUpdateTemplate?: boolean; onCancel: () => void; @@ -94,11 +97,13 @@ export const CreateWorkspacePageView: FC = ({ autofillParameters, presets = [], permissions, + templatePermissions, creatingWorkspace, canUpdateTemplate, onSubmit, onCancel, }) => { + const getLink = useLinks(); const [owner, setOwner] = useState(defaultOwner); const [suggestedName, setSuggestedName] = useState(() => generateWorkspaceName(), @@ -153,16 +158,18 @@ export const CreateWorkspacePageView: FC = ({ ); const [presetOptions, setPresetOptions] = useState([ - { label: "None", value: "" }, + { displayName: "None", value: "undefined", icon: "", description: "" }, ]); const [selectedPresetIndex, setSelectedPresetIndex] = useState(0); // Build options and keep default label/value in sync useEffect(() => { const options = [ - { label: "None", value: "" }, - ...presets.map((p) => ({ - label: p.Default ? `${p.Name} (Default)` : p.Name, - value: p.ID, + { displayName: "None", value: "undefined", icon: "", description: "" }, + ...presets.map((preset) => ({ + displayName: preset.Default ? `${preset.Name} (Default)` : preset.Name, + value: preset.ID, + icon: preset.Icon, + description: preset.Description, })), ]; setPresetOptions(options); @@ -261,6 +268,13 @@ export const CreateWorkspacePageView: FC = ({ + + = ({ - { + placeholder="Select a preset" + onSelect={(value) => { const index = presetOptions.findIndex( - (preset) => preset.value === option?.value, + (preset) => preset.value === value, ); if (index === -1) { return; @@ -393,12 +410,13 @@ export const CreateWorkspacePageView: FC = ({ setSelectedPresetIndex(index); form.setFieldValue( "template_version_preset_id", - // Empty string is equivalent to using None - option?.value === "" ? undefined : option?.value, + // "undefined" string is equivalent to using None option + // Combobox requires a value in order to correctly highlight the None option + presetOptions[index].value === "undefined" + ? undefined + : presetOptions[index].value, ); }} - placeholder="Select a preset" - selectedOption={presetOptions[selectedPresetIndex]} /> {/* Only show the preset parameter visibility toggle if preset parameters are actually being modified, otherwise it has no effect. */} diff --git a/site/src/pages/CreateWorkspacePage/CreateWorkspacePageViewExperimental.stories.tsx b/site/src/pages/CreateWorkspacePage/CreateWorkspacePageViewExperimental.stories.tsx index 0fcf5d7fbb854..5faf991b876f1 100644 --- a/site/src/pages/CreateWorkspacePage/CreateWorkspacePageViewExperimental.stories.tsx +++ b/site/src/pages/CreateWorkspacePage/CreateWorkspacePageViewExperimental.stories.tsx @@ -1,7 +1,7 @@ -import type { Meta, StoryObj } from "@storybook/react"; -import { DetailedError } from "api/errors"; import { chromatic } from "testHelpers/chromatic"; import { MockTemplate, MockUserOwner } from "testHelpers/entities"; +import type { Meta, StoryObj } from "@storybook/react-vite"; +import { DetailedError } from "api/errors"; import { CreateWorkspacePageViewExperimental } from "./CreateWorkspacePageViewExperimental"; const meta: Meta = { diff --git a/site/src/pages/CreateWorkspacePage/CreateWorkspacePageViewExperimental.tsx b/site/src/pages/CreateWorkspacePage/CreateWorkspacePageViewExperimental.tsx index 117f67e5d931a..cf1fd1746ce44 100644 --- a/site/src/pages/CreateWorkspacePage/CreateWorkspacePageViewExperimental.tsx +++ b/site/src/pages/CreateWorkspacePage/CreateWorkspacePageViewExperimental.tsx @@ -5,16 +5,10 @@ import { ErrorAlert } from "components/Alert/ErrorAlert"; import { Avatar } from "components/Avatar/Avatar"; import { Badge } from "components/Badge/Badge"; import { Button } from "components/Button/Button"; +import { Combobox } from "components/Combobox/Combobox"; import { Input } from "components/Input/Input"; import { Label } from "components/Label/Label"; import { Link } from "components/Link/Link"; -import { - Select, - SelectContent, - SelectItem, - SelectTrigger, - SelectValue, -} from "components/Select/Select"; import { Spinner } from "components/Spinner/Spinner"; import { Switch } from "components/Switch/Switch"; import { @@ -43,7 +37,7 @@ import { useRef, useState, } from "react"; -import { Link as RouterLink } from "react-router-dom"; +import { Link as RouterLink } from "react-router"; import { docs } from "utils/docs"; import { nameValidator } from "utils/formUtils"; import type { AutofillBuildParameter } from "utils/richParameters"; @@ -110,9 +104,7 @@ export const CreateWorkspacePageViewExperimental: FC< owner, setOwner, }) => { - const [suggestedName, setSuggestedName] = useState(() => - generateWorkspaceName(), - ); + const [suggestedName, setSuggestedName] = useState(generateWorkspaceName); const [showPresetParameters, setShowPresetParameters] = useState(false); const id = useId(); const workspaceNameInputRef = useRef(null); @@ -126,14 +118,8 @@ export const CreateWorkspacePageViewExperimental: FC< // Only touched fields are sent to the websocket // Autofilled parameters are marked as touched since they have been modified - const initialTouched = parameters.reduce( - (touched, parameter) => { - if (autofillByName[parameter.name] !== undefined) { - touched[parameter.name] = true; - } - return touched; - }, - {} as Record, + const initialTouched = Object.fromEntries( + parameters.filter((p) => autofillByName[p.name]).map((p) => [p, true]), ); // The form parameters values hold the working state of the parameters that will be submitted when creating a workspace @@ -186,35 +172,66 @@ export const CreateWorkspacePageViewExperimental: FC< }, [form.submitCount, form.errors]); const [presetOptions, setPresetOptions] = useState([ - { label: "None", value: "None" }, + { displayName: "None", value: "undefined", icon: "", description: "" }, ]); + const [selectedPresetIndex, setSelectedPresetIndex] = useState(0); + // Build options and keep default label/value in sync useEffect(() => { - setPresetOptions([ - { label: "None", value: "None" }, + const options = [ + { displayName: "None", value: "undefined", icon: "", description: "" }, ...presets.map((preset) => ({ - label: preset.Default ? `${preset.Name} (Default)` : preset.Name, + displayName: preset.Default ? `${preset.Name} (Default)` : preset.Name, value: preset.ID, + icon: preset.Icon, + description: preset.Description, })), - ]); - }, [presets]); - - const [selectedPresetIndex, setSelectedPresetIndex] = useState(0); - - // Set default preset when presets are loaded - useEffect(() => { - const defaultPreset = presets.find((preset) => preset.Default); + ]; + setPresetOptions(options); + const defaultPreset = presets.find((p) => p.Default); if (defaultPreset) { - // +1 because "None" is at index 0 - const defaultIndex = - presets.findIndex((preset) => preset.ID === defaultPreset.ID) + 1; - setSelectedPresetIndex(defaultIndex); + const idx = presets.indexOf(defaultPreset) + 1; // +1 for "None" + setSelectedPresetIndex(idx); + form.setFieldValue("template_version_preset_id", defaultPreset.ID); + } else { + setSelectedPresetIndex(0); // Explicitly set to "None" + form.setFieldValue("template_version_preset_id", undefined); } - }, [presets]); + }, [presets, form.setFieldValue]); const [presetParameterNames, setPresetParameterNames] = useState( [], ); + // include any modified parameters and all touched parameters to the websocket request + const sendDynamicParamsRequest = useCallback( + ( + parameters: Array<{ parameter: PreviewParameter; value: string }>, + ownerId?: string, + ) => { + const formInputs: Record = {}; + const formParameters = form.values.rich_parameter_values ?? []; + + for (const { parameter, value } of parameters) { + formInputs[parameter.name] = value; + } + + for (const [fieldName, isTouched] of Object.entries(form.touched)) { + if ( + isTouched && + !parameters.some((p) => p.parameter.name === fieldName) + ) { + const param = formParameters.find((p) => p.name === fieldName); + if (param?.value) { + formInputs[fieldName] = param.value; + } + } + } + + sendMessage(formInputs, ownerId); + }, + [form.touched, form.values.rich_parameter_values, sendMessage], + ); + useEffect(() => { const selectedPresetOption = presetOptions[selectedPresetIndex]; let selectedPreset: TypesGen.Preset | undefined; @@ -287,35 +304,9 @@ export const CreateWorkspacePageViewExperimental: FC< form.setFieldTouched, parameters, form.values.rich_parameter_values, + sendDynamicParamsRequest, ]); - // include any modified parameters and all touched parameters to the websocket request - const sendDynamicParamsRequest = ( - parameters: Array<{ parameter: PreviewParameter; value: string }>, - ownerId?: string, - ) => { - const formInputs: Record = {}; - const formParameters = form.values.rich_parameter_values ?? []; - - for (const { parameter, value } of parameters) { - formInputs[parameter.name] = value; - } - - for (const [fieldName, isTouched] of Object.entries(form.touched)) { - if ( - isTouched && - !parameters.some((p) => p.parameter.name === fieldName) - ) { - const param = formParameters.find((p) => p.name === fieldName); - if (param?.value) { - formInputs[fieldName] = param.value; - } - } - } - - sendMessage(formInputs, ownerId); - }; - const handleOwnerChange = (user: TypesGen.User) => { setOwner(user); sendDynamicParamsRequest([], user.id); @@ -407,7 +398,7 @@ export const CreateWorkspacePageViewExperimental: FC<
    View docs @@ -555,7 +546,7 @@ export const CreateWorkspacePageViewExperimental: FC< parameters cannot be modified once the workspace is created. View docs @@ -572,11 +563,15 @@ export const CreateWorkspacePageViewExperimental: FC<
    - + />
    {/* Only show the preset parameter visibility toggle if preset parameters are actually being modified, otherwise it is ineffectual */} {presetParameterNames.length > 0 && ( diff --git a/site/src/pages/CreateWorkspacePage/ExternalAuthButton.stories.tsx b/site/src/pages/CreateWorkspacePage/ExternalAuthButton.stories.tsx index 0345bc34a2e74..ac0e3a28d988c 100644 --- a/site/src/pages/CreateWorkspacePage/ExternalAuthButton.stories.tsx +++ b/site/src/pages/CreateWorkspacePage/ExternalAuthButton.stories.tsx @@ -1,4 +1,4 @@ -import type { Meta, StoryObj } from "@storybook/react"; +import type { Meta, StoryObj } from "@storybook/react-vite"; import type { TemplateVersionExternalAuth } from "api/typesGenerated"; import { ExternalAuthButton } from "./ExternalAuthButton"; diff --git a/site/src/pages/CreateWorkspacePage/SelectedTemplate.stories.tsx b/site/src/pages/CreateWorkspacePage/SelectedTemplate.stories.tsx index 142a809654b51..b4125ac999b2c 100644 --- a/site/src/pages/CreateWorkspacePage/SelectedTemplate.stories.tsx +++ b/site/src/pages/CreateWorkspacePage/SelectedTemplate.stories.tsx @@ -1,5 +1,5 @@ -import type { Meta, StoryObj } from "@storybook/react"; import { MockTemplate } from "testHelpers/entities"; +import type { Meta, StoryObj } from "@storybook/react-vite"; import { SelectedTemplate } from "./SelectedTemplate"; const meta: Meta = { diff --git a/site/src/pages/DeploymentSettingsPage/AppearanceSettingsPage/AnnouncementBannerDialog.stories.tsx b/site/src/pages/DeploymentSettingsPage/AppearanceSettingsPage/AnnouncementBannerDialog.stories.tsx index 619a20ec9d69c..1c2a3e9de61b1 100644 --- a/site/src/pages/DeploymentSettingsPage/AppearanceSettingsPage/AnnouncementBannerDialog.stories.tsx +++ b/site/src/pages/DeploymentSettingsPage/AppearanceSettingsPage/AnnouncementBannerDialog.stories.tsx @@ -1,5 +1,5 @@ -import { action } from "@storybook/addon-actions"; -import type { Meta, StoryObj } from "@storybook/react"; +import type { Meta, StoryObj } from "@storybook/react-vite"; +import { action } from "storybook/actions"; import { AnnouncementBannerDialog } from "./AnnouncementBannerDialog"; const meta: Meta = { diff --git a/site/src/pages/DeploymentSettingsPage/AppearanceSettingsPage/AnnouncementBannerSettings.tsx b/site/src/pages/DeploymentSettingsPage/AppearanceSettingsPage/AnnouncementBannerSettings.tsx index 3eccfb31756fd..d2625973065a6 100644 --- a/site/src/pages/DeploymentSettingsPage/AppearanceSettingsPage/AnnouncementBannerSettings.tsx +++ b/site/src/pages/DeploymentSettingsPage/AppearanceSettingsPage/AnnouncementBannerSettings.tsx @@ -125,7 +125,7 @@ export const AnnouncementBannerSettings: FC< {!isEntitled || banners.length < 1 ? ( @@ -161,7 +161,7 @@ export const AnnouncementBannerSettings: FC< }, ]} > -
    +

    Your license does not include Service Banners.{" "} Contact sales to diff --git a/site/src/pages/DeploymentSettingsPage/AppearanceSettingsPage/AppearanceSettingsPageView.stories.tsx b/site/src/pages/DeploymentSettingsPage/AppearanceSettingsPage/AppearanceSettingsPageView.stories.tsx index 1670006dbf060..72c5bd4f59d2f 100644 --- a/site/src/pages/DeploymentSettingsPage/AppearanceSettingsPage/AppearanceSettingsPageView.stories.tsx +++ b/site/src/pages/DeploymentSettingsPage/AppearanceSettingsPage/AppearanceSettingsPageView.stories.tsx @@ -1,4 +1,4 @@ -import type { Meta, StoryObj } from "@storybook/react"; +import type { Meta, StoryObj } from "@storybook/react-vite"; import { AppearanceSettingsPageView } from "./AppearanceSettingsPageView"; const meta: Meta = { diff --git a/site/src/pages/DeploymentSettingsPage/AppearanceSettingsPage/AppearanceSettingsPageView.tsx b/site/src/pages/DeploymentSettingsPage/AppearanceSettingsPage/AppearanceSettingsPageView.tsx index 4988f95ea7cc2..010c7a999e98f 100644 --- a/site/src/pages/DeploymentSettingsPage/AppearanceSettingsPage/AppearanceSettingsPageView.tsx +++ b/site/src/pages/DeploymentSettingsPage/AppearanceSettingsPage/AppearanceSettingsPageView.tsx @@ -7,17 +7,17 @@ import { PremiumBadge, } from "components/Badges/Badges"; import { Button } from "components/Button/Button"; +import { + Popover, + PopoverContent, + PopoverTrigger, +} from "components/deprecated/Popover/Popover"; import { PopoverPaywall } from "components/Paywall/PopoverPaywall"; import { SettingsHeader, SettingsHeaderDescription, SettingsHeaderTitle, } from "components/SettingsHeader/SettingsHeader"; -import { - Popover, - PopoverContent, - PopoverTrigger, -} from "components/deprecated/Popover/Popover"; import { useFormik } from "formik"; import type { FC } from "react"; import { getFormHelpers } from "utils/formUtils"; diff --git a/site/src/pages/DeploymentSettingsPage/ExternalAuthSettingsPage/ExternalAuthSettingsPageView.stories.tsx b/site/src/pages/DeploymentSettingsPage/ExternalAuthSettingsPage/ExternalAuthSettingsPageView.stories.tsx index 38a74a8e735f4..5184219b38cca 100644 --- a/site/src/pages/DeploymentSettingsPage/ExternalAuthSettingsPage/ExternalAuthSettingsPageView.stories.tsx +++ b/site/src/pages/DeploymentSettingsPage/ExternalAuthSettingsPage/ExternalAuthSettingsPageView.stories.tsx @@ -1,4 +1,4 @@ -import type { Meta, StoryObj } from "@storybook/react"; +import type { Meta, StoryObj } from "@storybook/react-vite"; import { ExternalAuthSettingsPageView } from "./ExternalAuthSettingsPageView"; const meta: Meta = { diff --git a/site/src/pages/DeploymentSettingsPage/IdpOrgSyncPage/ExportPolicyButton.stories.tsx b/site/src/pages/DeploymentSettingsPage/IdpOrgSyncPage/ExportPolicyButton.stories.tsx index a4206c3b04a1e..8257e658ae8e8 100644 --- a/site/src/pages/DeploymentSettingsPage/IdpOrgSyncPage/ExportPolicyButton.stories.tsx +++ b/site/src/pages/DeploymentSettingsPage/IdpOrgSyncPage/ExportPolicyButton.stories.tsx @@ -1,6 +1,6 @@ -import type { Meta, StoryObj } from "@storybook/react"; -import { expect, fn, userEvent, waitFor, within } from "@storybook/test"; import { MockOrganizationSyncSettings } from "testHelpers/entities"; +import type { Meta, StoryObj } from "@storybook/react-vite"; +import { expect, fn, userEvent, waitFor, within } from "storybook/test"; import { ExportPolicyButton } from "./ExportPolicyButton"; const meta: Meta = { diff --git a/site/src/pages/DeploymentSettingsPage/IdpOrgSyncPage/IdpOrgSyncPage.tsx b/site/src/pages/DeploymentSettingsPage/IdpOrgSyncPage/IdpOrgSyncPage.tsx index fcbbedc4f7265..38a76a5f3d43d 100644 --- a/site/src/pages/DeploymentSettingsPage/IdpOrgSyncPage/IdpOrgSyncPage.tsx +++ b/site/src/pages/DeploymentSettingsPage/IdpOrgSyncPage/IdpOrgSyncPage.tsx @@ -5,8 +5,7 @@ import { patchOrganizationSyncSettings, } from "api/queries/idpsync"; import { ChooseOne, Cond } from "components/Conditionals/ChooseOne"; -import { displayError } from "components/GlobalSnackbar/utils"; -import { displaySuccess } from "components/GlobalSnackbar/utils"; +import { displayError, displaySuccess } from "components/GlobalSnackbar/utils"; import { Link } from "components/Link/Link"; import { Loader } from "components/Loader/Loader"; import { Paywall } from "components/Paywall/Paywall"; diff --git a/site/src/pages/DeploymentSettingsPage/IdpOrgSyncPage/IdpOrgSyncPageView.stories.tsx b/site/src/pages/DeploymentSettingsPage/IdpOrgSyncPage/IdpOrgSyncPageView.stories.tsx index 430fce3a2ee05..148e061028284 100644 --- a/site/src/pages/DeploymentSettingsPage/IdpOrgSyncPage/IdpOrgSyncPageView.stories.tsx +++ b/site/src/pages/DeploymentSettingsPage/IdpOrgSyncPage/IdpOrgSyncPageView.stories.tsx @@ -1,5 +1,3 @@ -import type { Meta, StoryObj } from "@storybook/react"; -import { expect, userEvent, within } from "@storybook/test"; import { MockOrganization, MockOrganization2, @@ -7,6 +5,8 @@ import { MockOrganizationSyncSettings2, MockOrganizationSyncSettingsEmpty, } from "testHelpers/entities"; +import type { Meta, StoryObj } from "@storybook/react-vite"; +import { expect, userEvent, within } from "storybook/test"; import { IdpOrgSyncPageView } from "./IdpOrgSyncPageView"; const meta: Meta = { diff --git a/site/src/pages/DeploymentSettingsPage/IdpOrgSyncPage/IdpOrgSyncPageView.tsx b/site/src/pages/DeploymentSettingsPage/IdpOrgSyncPage/IdpOrgSyncPageView.tsx index 3fb267fb9daac..1feb4a8707f9b 100644 --- a/site/src/pages/DeploymentSettingsPage/IdpOrgSyncPage/IdpOrgSyncPageView.tsx +++ b/site/src/pages/DeploymentSettingsPage/IdpOrgSyncPage/IdpOrgSyncPageView.tsx @@ -215,7 +215,7 @@ export const IdpOrgSyncPageView: FC = ({ )}

    -
    +
    diff --git a/site/src/pages/DeploymentSettingsPage/IdpOrgSyncPage/OrganizationPills.tsx b/site/src/pages/DeploymentSettingsPage/IdpOrgSyncPage/OrganizationPills.tsx index 9e26368e9c2cb..030e3889cac41 100644 --- a/site/src/pages/DeploymentSettingsPage/IdpOrgSyncPage/OrganizationPills.tsx +++ b/site/src/pages/DeploymentSettingsPage/IdpOrgSyncPage/OrganizationPills.tsx @@ -1,10 +1,10 @@ import { useTheme } from "@emotion/react"; -import { Pill } from "components/Pill/Pill"; import { Popover, PopoverContent, PopoverTrigger, } from "components/deprecated/Popover/Popover"; +import { Pill } from "components/Pill/Pill"; import type { FC } from "react"; import { cn } from "utils/cn"; import { isUUID } from "utils/uuid"; diff --git a/site/src/pages/DeploymentSettingsPage/LicensesSettingsPage/AddNewLicensePage.tsx b/site/src/pages/DeploymentSettingsPage/LicensesSettingsPage/AddNewLicensePage.tsx index bb08c37218e18..aaa1f1706101d 100644 --- a/site/src/pages/DeploymentSettingsPage/LicensesSettingsPage/AddNewLicensePage.tsx +++ b/site/src/pages/DeploymentSettingsPage/LicensesSettingsPage/AddNewLicensePage.tsx @@ -3,7 +3,7 @@ import { displayError, displaySuccess } from "components/GlobalSnackbar/utils"; import type { FC } from "react"; import { Helmet } from "react-helmet-async"; import { useMutation } from "react-query"; -import { useNavigate } from "react-router-dom"; +import { useNavigate } from "react-router"; import { pageTitle } from "utils/page"; import { AddNewLicensePageView } from "./AddNewLicensePageView"; diff --git a/site/src/pages/DeploymentSettingsPage/LicensesSettingsPage/AddNewLicensePageView.tsx b/site/src/pages/DeploymentSettingsPage/LicensesSettingsPage/AddNewLicensePageView.tsx index e46c43fb7e05f..2815bfe15aa83 100644 --- a/site/src/pages/DeploymentSettingsPage/LicensesSettingsPage/AddNewLicensePageView.tsx +++ b/site/src/pages/DeploymentSettingsPage/LicensesSettingsPage/AddNewLicensePageView.tsx @@ -11,7 +11,7 @@ import { import { Stack } from "components/Stack/Stack"; import { ChevronLeftIcon } from "lucide-react"; import type { FC } from "react"; -import { Link as RouterLink } from "react-router-dom"; +import { Link as RouterLink } from "react-router"; import { Fieldset } from "../Fieldset"; import { DividerWithText } from "./DividerWithText"; diff --git a/site/src/pages/DeploymentSettingsPage/LicensesSettingsPage/LicenseCard.test.tsx b/site/src/pages/DeploymentSettingsPage/LicensesSettingsPage/LicenseCard.test.tsx index 6a172b701e66d..59f1182ac7c00 100644 --- a/site/src/pages/DeploymentSettingsPage/LicensesSettingsPage/LicenseCard.test.tsx +++ b/site/src/pages/DeploymentSettingsPage/LicensesSettingsPage/LicenseCard.test.tsx @@ -1,6 +1,6 @@ -import { screen } from "@testing-library/react"; import { MockLicenseResponse } from "testHelpers/entities"; import { render } from "testHelpers/renderHelpers"; +import { screen } from "@testing-library/react"; import { LicenseCard } from "./LicenseCard"; describe("LicenseCard", () => { diff --git a/site/src/pages/DeploymentSettingsPage/LicensesSettingsPage/LicenseSeatConsumptionChart.stories.tsx b/site/src/pages/DeploymentSettingsPage/LicensesSettingsPage/LicenseSeatConsumptionChart.stories.tsx index 4a872d2470b7e..e45bfa0d74cb4 100644 --- a/site/src/pages/DeploymentSettingsPage/LicensesSettingsPage/LicenseSeatConsumptionChart.stories.tsx +++ b/site/src/pages/DeploymentSettingsPage/LicensesSettingsPage/LicenseSeatConsumptionChart.stories.tsx @@ -1,4 +1,4 @@ -import type { Meta, StoryObj } from "@storybook/react"; +import type { Meta, StoryObj } from "@storybook/react-vite"; import { LicenseSeatConsumptionChart } from "./LicenseSeatConsumptionChart"; const meta: Meta = { diff --git a/site/src/pages/DeploymentSettingsPage/LicensesSettingsPage/LicenseSeatConsumptionChart.tsx b/site/src/pages/DeploymentSettingsPage/LicensesSettingsPage/LicenseSeatConsumptionChart.tsx index 73fa3508aa58b..3f91f58b8d678 100644 --- a/site/src/pages/DeploymentSettingsPage/LicensesSettingsPage/LicenseSeatConsumptionChart.tsx +++ b/site/src/pages/DeploymentSettingsPage/LicensesSettingsPage/LicenseSeatConsumptionChart.tsx @@ -14,7 +14,7 @@ import { Link } from "components/Link/Link"; import { Spinner } from "components/Spinner/Spinner"; import { ChevronRightIcon } from "lucide-react"; import type { FC } from "react"; -import { Link as RouterLink } from "react-router-dom"; +import { Link as RouterLink } from "react-router"; import { Area, AreaChart, @@ -81,17 +81,18 @@ export const LicenseSeatConsumptionChart: FC<

    • -
      +
      + + Legend for active users in the chart + +
      The user was active at least once during the last 90 days.
    • -
      +
      + + Legend for license seat limit in the chart +
      Current license seat limit, or the maximum number of allowed @@ -179,7 +180,7 @@ export const LicenseSeatConsumptionChart: FC< const item = p[0]; return `${item.value} seats`; }} - formatter={(v, n, item) => { + formatter={(_v, _n, item) => { const date = new Date(item.payload.date); return date.toLocaleString(undefined, { month: "long", diff --git a/site/src/pages/DeploymentSettingsPage/LicensesSettingsPage/LicensesSettingsPage.tsx b/site/src/pages/DeploymentSettingsPage/LicensesSettingsPage/LicensesSettingsPage.tsx index 5f617412a0c04..fe3fe0975e69f 100644 --- a/site/src/pages/DeploymentSettingsPage/LicensesSettingsPage/LicensesSettingsPage.tsx +++ b/site/src/pages/DeploymentSettingsPage/LicensesSettingsPage/LicensesSettingsPage.tsx @@ -7,7 +7,7 @@ import { useEmbeddedMetadata } from "hooks/useEmbeddedMetadata"; import { type FC, useEffect, useState } from "react"; import { Helmet } from "react-helmet-async"; import { useMutation, useQuery, useQueryClient } from "react-query"; -import { useSearchParams } from "react-router-dom"; +import { useSearchParams } from "react-router"; import { pageTitle } from "utils/page"; import LicensesSettingsPageView from "./LicensesSettingsPageView"; @@ -85,6 +85,9 @@ const LicensesSettingsPage: FC = () => { isRemovingLicense={isRemovingLicense} removeLicense={(licenseId: number) => removeLicenseApi(licenseId)} activeUsers={userStatusCount?.active} + managedAgentFeature={ + entitlementsQuery.data?.features.managed_agent_limit + } refreshEntitlements={async () => { try { await refreshEntitlementsMutation.mutateAsync(); diff --git a/site/src/pages/DeploymentSettingsPage/LicensesSettingsPage/LicensesSettingsPageView.tsx b/site/src/pages/DeploymentSettingsPage/LicensesSettingsPage/LicensesSettingsPageView.tsx index eb60361883b72..2a0b7f8d39b55 100644 --- a/site/src/pages/DeploymentSettingsPage/LicensesSettingsPage/LicensesSettingsPageView.tsx +++ b/site/src/pages/DeploymentSettingsPage/LicensesSettingsPage/LicensesSettingsPageView.tsx @@ -4,7 +4,7 @@ import MuiLink from "@mui/material/Link"; import Skeleton from "@mui/material/Skeleton"; import Tooltip from "@mui/material/Tooltip"; import type { GetLicensesResponse } from "api/api"; -import type { UserStatusChangeCount } from "api/typesGenerated"; +import type { Feature, UserStatusChangeCount } from "api/typesGenerated"; import { Button } from "components/Button/Button"; import { SettingsHeader, @@ -17,9 +17,10 @@ import { useWindowSize } from "hooks/useWindowSize"; import { PlusIcon, RotateCwIcon } from "lucide-react"; import type { FC } from "react"; import Confetti from "react-confetti"; -import { Link } from "react-router-dom"; +import { Link } from "react-router"; import { LicenseCard } from "./LicenseCard"; import { LicenseSeatConsumptionChart } from "./LicenseSeatConsumptionChart"; +import { ManagedAgentsConsumption } from "./ManagedAgentsConsumption"; type Props = { showConfetti: boolean; @@ -32,6 +33,7 @@ type Props = { removeLicense: (licenseId: number) => void; refreshEntitlements: () => void; activeUsers: UserStatusChangeCount[] | undefined; + managedAgentFeature?: Feature; }; const LicensesSettingsPageView: FC = ({ @@ -45,6 +47,7 @@ const LicensesSettingsPageView: FC = ({ removeLicense, refreshEntitlements, activeUsers, + managedAgentFeature, }) => { const theme = useTheme(); const { width, height } = useWindowSize(); @@ -151,6 +154,10 @@ const LicensesSettingsPageView: FC = ({ }))} /> )} + + {licenses && licenses.length > 0 && ( + + )}
      ); diff --git a/site/src/pages/DeploymentSettingsPage/LicensesSettingsPage/ManagedAgentsConsumption.stories.tsx b/site/src/pages/DeploymentSettingsPage/LicensesSettingsPage/ManagedAgentsConsumption.stories.tsx new file mode 100644 index 0000000000000..24b65093d384b --- /dev/null +++ b/site/src/pages/DeploymentSettingsPage/LicensesSettingsPage/ManagedAgentsConsumption.stories.tsx @@ -0,0 +1,213 @@ +import type { Meta, StoryObj } from "@storybook/react-vite"; +import { ManagedAgentsConsumption } from "./ManagedAgentsConsumption"; + +const meta: Meta = { + title: + "pages/DeploymentSettingsPage/LicensesSettingsPage/ManagedAgentsConsumption", + component: ManagedAgentsConsumption, + args: { + managedAgentFeature: { + enabled: true, + actual: 50000, + soft_limit: 60000, + limit: 120000, + usage_period: { + start: "February 27, 2025", + end: "February 27, 2026", + issued_at: "February 27, 2025", + }, + entitlement: "entitled", + }, + }, +}; + +export default meta; +type Story = StoryObj; + +export const Default: Story = {}; + +export const ZeroUsage: Story = { + args: { + managedAgentFeature: { + enabled: true, + actual: 0, + soft_limit: 60000, + limit: 120000, + usage_period: { + start: "February 27, 2025", + end: "February 27, 2026", + issued_at: "February 27, 2025", + }, + entitlement: "entitled", + }, + }, +}; + +export const NearLimit: Story = { + args: { + managedAgentFeature: { + enabled: true, + actual: 115000, + soft_limit: 60000, + limit: 120000, + usage_period: { + start: "February 27, 2025", + end: "February 27, 2026", + issued_at: "February 27, 2025", + }, + entitlement: "entitled", + }, + }, +}; + +export const OverIncluded: Story = { + args: { + managedAgentFeature: { + enabled: true, + actual: 80000, + soft_limit: 60000, + limit: 120000, + usage_period: { + start: "February 27, 2025", + end: "February 27, 2026", + issued_at: "February 27, 2025", + }, + entitlement: "entitled", + }, + }, +}; + +export const LowUsage: Story = { + args: { + managedAgentFeature: { + enabled: true, + actual: 25000, + soft_limit: 60000, + limit: 120000, + usage_period: { + start: "February 27, 2025", + end: "February 27, 2026", + issued_at: "February 27, 2025", + }, + entitlement: "entitled", + }, + }, +}; + +export const IncludedAtLimit: Story = { + args: { + managedAgentFeature: { + enabled: true, + actual: 25000, + soft_limit: 30500, + limit: 30500, + usage_period: { + start: "February 27, 2025", + end: "February 27, 2026", + issued_at: "February 27, 2025", + }, + entitlement: "entitled", + }, + }, +}; + +export const Disabled: Story = { + args: { + managedAgentFeature: { + enabled: false, + actual: undefined, + soft_limit: undefined, + limit: undefined, + usage_period: undefined, + entitlement: "not_entitled", + }, + }, +}; + +export const NoFeature: Story = { + args: { + managedAgentFeature: undefined, + }, +}; + +// Error States for Validation +export const ErrorMissingData: Story = { + args: { + managedAgentFeature: { + enabled: true, + actual: undefined, + soft_limit: undefined, + limit: undefined, + usage_period: undefined, + entitlement: "entitled", + }, + }, +}; + +export const ErrorNegativeValues: Story = { + args: { + managedAgentFeature: { + enabled: true, + actual: -100, + soft_limit: 60000, + limit: 120000, + usage_period: { + start: "February 27, 2025", + end: "February 27, 2026", + issued_at: "February 27, 2025", + }, + entitlement: "entitled", + }, + }, +}; + +export const ErrorSoftLimitExceedsLimit: Story = { + args: { + managedAgentFeature: { + enabled: true, + actual: 50000, + soft_limit: 150000, + limit: 120000, + usage_period: { + start: "February 27, 2025", + end: "February 27, 2026", + issued_at: "February 27, 2025", + }, + entitlement: "entitled", + }, + }, +}; + +export const ErrorInvalidDates: Story = { + args: { + managedAgentFeature: { + enabled: true, + actual: 50000, + soft_limit: 60000, + limit: 120000, + usage_period: { + start: "invalid-date", + end: "February 27, 2026", + issued_at: "February 27, 2025", + }, + entitlement: "entitled", + }, + }, +}; + +export const ErrorEndBeforeStart: Story = { + args: { + managedAgentFeature: { + enabled: true, + actual: 50000, + soft_limit: 60000, + limit: 120000, + usage_period: { + start: "February 27, 2026", + end: "February 27, 2025", + issued_at: "February 27, 2025", + }, + entitlement: "entitled", + }, + }, +}; diff --git a/site/src/pages/DeploymentSettingsPage/LicensesSettingsPage/ManagedAgentsConsumption.tsx b/site/src/pages/DeploymentSettingsPage/LicensesSettingsPage/ManagedAgentsConsumption.tsx new file mode 100644 index 0000000000000..022627c11dc02 --- /dev/null +++ b/site/src/pages/DeploymentSettingsPage/LicensesSettingsPage/ManagedAgentsConsumption.tsx @@ -0,0 +1,205 @@ +import MuiLink from "@mui/material/Link"; +import type { Feature } from "api/typesGenerated"; +import { ErrorAlert } from "components/Alert/ErrorAlert"; +import { Button } from "components/Button/Button"; +import { + Collapsible, + CollapsibleContent, + CollapsibleTrigger, +} from "components/Collapsible/Collapsible"; +import { Stack } from "components/Stack/Stack"; +import dayjs from "dayjs"; +import { ChevronRightIcon } from "lucide-react"; +import type { FC } from "react"; +import { docs } from "utils/docs"; + +interface ManagedAgentsConsumptionProps { + managedAgentFeature?: Feature; +} + +export const ManagedAgentsConsumption: FC = ({ + managedAgentFeature, +}) => { + // If no feature is provided or it's disabled, show disabled state + if (!managedAgentFeature?.enabled) { + return ( +
      + + + Managed AI Agents Disabled + + Managed AI agents are not included in your current license. + Contact sales to + upgrade your license and unlock this feature. + + + +
      + ); + } + + const usage = managedAgentFeature.actual; + const included = managedAgentFeature.soft_limit; + const limit = managedAgentFeature.limit; + const startDate = managedAgentFeature.usage_period?.start; + const endDate = managedAgentFeature.usage_period?.end; + + if (usage === undefined || usage < 0) { + return ; + } + + if ( + included === undefined || + included < 0 || + limit === undefined || + limit < 0 + ) { + return ; + } + + if (!startDate || !endDate) { + return ; + } + + const start = dayjs(startDate); + const end = dayjs(endDate); + if (!start.isValid() || !end.isValid() || !start.isBefore(end)) { + return ; + } + + const usagePercentage = Math.min((usage / limit) * 100, 100); + const includedPercentage = Math.min((included / limit) * 100, 100); + const remainingPercentage = Math.max(100 - includedPercentage, 0); + + return ( +
      +
      + +
      +

      Managed AI Agents Usage

      + + + + +
      + + +

      + + Coder Tasks + {" "} + and upcoming managed AI features are included in Coder Premium + licenses during beta. Usage limits and pricing subject to change. +

      +
        +
      • +
        + Legend for started workspaces +
        + Amount of started workspaces with an AI agent. +
      • +
      • +
        + Legend for included allowance +
        + Included allowance from your current license plan. +
      • +
      • +
        + + Legend for total limit in the chart + +
        +
        + Total limit after which further AI workspace builds will be + blocked. +
      • +
      +
      +
      +
      + +
      +
      + + {startDate ? dayjs(startDate).format("MMMM D, YYYY") : ""} + + {endDate ? dayjs(endDate).format("MMMM D, YYYY") : ""} +
      + +
      +
      + +
      +
      + +
      +
      + Actual: + {usage.toLocaleString()} +
      + +
      + Included: + {included.toLocaleString()} +
      + +
      + Limit: + {limit.toLocaleString()} +
      +
      + +
      +
      +
      + Actual: + {usage.toLocaleString()} +
      +
      + Included: + {included.toLocaleString()} +
      +
      + Limit: + {limit.toLocaleString()} +
      +
      +
      +
      +
      + ); +}; diff --git a/site/src/pages/DeploymentSettingsPage/NetworkSettingsPage/NetworkSettingsPageView.stories.tsx b/site/src/pages/DeploymentSettingsPage/NetworkSettingsPage/NetworkSettingsPageView.stories.tsx index f9d0610a7dfa4..f8b1b1cad2166 100644 --- a/site/src/pages/DeploymentSettingsPage/NetworkSettingsPage/NetworkSettingsPageView.stories.tsx +++ b/site/src/pages/DeploymentSettingsPage/NetworkSettingsPage/NetworkSettingsPageView.stories.tsx @@ -1,4 +1,4 @@ -import type { Meta, StoryObj } from "@storybook/react"; +import type { Meta, StoryObj } from "@storybook/react-vite"; import type { SerpentGroup } from "api/typesGenerated"; import { NetworkSettingsPageView } from "./NetworkSettingsPageView"; diff --git a/site/src/pages/DeploymentSettingsPage/NotificationsPage/NotificationEvents.stories.tsx b/site/src/pages/DeploymentSettingsPage/NotificationsPage/NotificationEvents.stories.tsx index 61a1eddcd1a78..1b1a93605c676 100644 --- a/site/src/pages/DeploymentSettingsPage/NotificationsPage/NotificationEvents.stories.tsx +++ b/site/src/pages/DeploymentSettingsPage/NotificationsPage/NotificationEvents.stories.tsx @@ -1,9 +1,9 @@ -import type { Meta, StoryObj } from "@storybook/react"; -import { spyOn, userEvent, within } from "@storybook/test"; +import { MockNotificationTemplates } from "testHelpers/entities"; +import type { Meta, StoryObj } from "@storybook/react-vite"; import { API } from "api/api"; import { selectTemplatesByGroup } from "api/queries/notifications"; import type { DeploymentValues } from "api/typesGenerated"; -import { MockNotificationTemplates } from "testHelpers/entities"; +import { spyOn, userEvent, within } from "storybook/test"; import { NotificationEvents } from "./NotificationEvents"; import { baseMeta } from "./storybookUtils"; diff --git a/site/src/pages/DeploymentSettingsPage/NotificationsPage/NotificationEvents.tsx b/site/src/pages/DeploymentSettingsPage/NotificationsPage/NotificationEvents.tsx index 38c36fc52c044..32f4d56ed9909 100644 --- a/site/src/pages/DeploymentSettingsPage/NotificationsPage/NotificationEvents.tsx +++ b/site/src/pages/DeploymentSettingsPage/NotificationsPage/NotificationEvents.tsx @@ -18,10 +18,10 @@ import { Alert } from "components/Alert/Alert"; import { displayError, displaySuccess } from "components/GlobalSnackbar/utils"; import { Stack } from "components/Stack/Stack"; import { - type NotificationMethod, castNotificationMethod, methodIcons, methodLabels, + type NotificationMethod, } from "modules/notifications/utils"; import { type FC, Fragment } from "react"; import { useMutation, useQueryClient } from "react-query"; diff --git a/site/src/pages/DeploymentSettingsPage/NotificationsPage/NotificationsPage.stories.tsx b/site/src/pages/DeploymentSettingsPage/NotificationsPage/NotificationsPage.stories.tsx index c7d57e5ff0d53..e35348e027e56 100644 --- a/site/src/pages/DeploymentSettingsPage/NotificationsPage/NotificationsPage.stories.tsx +++ b/site/src/pages/DeploymentSettingsPage/NotificationsPage/NotificationsPage.stories.tsx @@ -1,13 +1,13 @@ -import type { Meta, StoryObj } from "@storybook/react"; -import { userEvent, within } from "@storybook/test"; -import { - notificationDispatchMethodsKey, - systemNotificationTemplatesKey, -} from "api/queries/notifications"; import { MockNotificationMethodsResponse, MockNotificationTemplates, } from "testHelpers/entities"; +import type { Meta, StoryObj } from "@storybook/react-vite"; +import { + notificationDispatchMethodsKey, + systemNotificationTemplatesKey, +} from "api/queries/notifications"; +import { userEvent, within } from "storybook/test"; import NotificationsPage from "./NotificationsPage"; import { baseMeta } from "./storybookUtils"; diff --git a/site/src/pages/DeploymentSettingsPage/NotificationsPage/Troubleshooting.stories.tsx b/site/src/pages/DeploymentSettingsPage/NotificationsPage/Troubleshooting.stories.tsx index bd3deeeee7c26..a2afce8d7f900 100644 --- a/site/src/pages/DeploymentSettingsPage/NotificationsPage/Troubleshooting.stories.tsx +++ b/site/src/pages/DeploymentSettingsPage/NotificationsPage/Troubleshooting.stories.tsx @@ -1,8 +1,8 @@ -import type { Meta, StoryObj } from "@storybook/react"; -import { spyOn, userEvent, within } from "@storybook/test"; +import type { Meta, StoryObj } from "@storybook/react-vite"; import { API } from "api/api"; -import { Troubleshooting } from "./Troubleshooting"; +import { spyOn, userEvent, within } from "storybook/test"; import { baseMeta } from "./storybookUtils"; +import { Troubleshooting } from "./Troubleshooting"; const meta: Meta = { title: "pages/DeploymentSettingsPage/NotificationsPage/Troubleshooting", diff --git a/site/src/pages/DeploymentSettingsPage/NotificationsPage/storybookUtils.ts b/site/src/pages/DeploymentSettingsPage/NotificationsPage/storybookUtils.ts index f27535d5b5397..b1c61bc95eae1 100644 --- a/site/src/pages/DeploymentSettingsPage/NotificationsPage/storybookUtils.ts +++ b/site/src/pages/DeploymentSettingsPage/NotificationsPage/storybookUtils.ts @@ -1,9 +1,3 @@ -import type { Meta } from "@storybook/react"; -import { - notificationDispatchMethodsKey, - systemNotificationTemplatesKey, -} from "api/queries/notifications"; -import type { DeploymentValues, SerpentOption } from "api/typesGenerated"; import { MockNotificationMethodsResponse, MockNotificationTemplates, @@ -15,6 +9,12 @@ import { withGlobalSnackbar, withOrganizationSettingsProvider, } from "testHelpers/storybook"; +import type { Meta } from "@storybook/react-vite"; +import { + notificationDispatchMethodsKey, + systemNotificationTemplatesKey, +} from "api/queries/notifications"; +import type { DeploymentValues, SerpentOption } from "api/typesGenerated"; import type NotificationsPage from "./NotificationsPage"; // Extracted from a real API response diff --git a/site/src/pages/DeploymentSettingsPage/OAuth2AppsSettingsPage/CreateOAuth2AppPage.tsx b/site/src/pages/DeploymentSettingsPage/OAuth2AppsSettingsPage/CreateOAuth2AppPage.tsx index a1f651be5cdc9..82636e0b7a76b 100644 --- a/site/src/pages/DeploymentSettingsPage/OAuth2AppsSettingsPage/CreateOAuth2AppPage.tsx +++ b/site/src/pages/DeploymentSettingsPage/OAuth2AppsSettingsPage/CreateOAuth2AppPage.tsx @@ -3,7 +3,7 @@ import { displayError, displaySuccess } from "components/GlobalSnackbar/utils"; import type { FC } from "react"; import { Helmet } from "react-helmet-async"; import { useMutation, useQueryClient } from "react-query"; -import { useNavigate } from "react-router-dom"; +import { useNavigate } from "react-router"; import { pageTitle } from "utils/page"; import { CreateOAuth2AppPageView } from "./CreateOAuth2AppPageView"; diff --git a/site/src/pages/DeploymentSettingsPage/OAuth2AppsSettingsPage/CreateOAuth2AppPageView.stories.tsx b/site/src/pages/DeploymentSettingsPage/OAuth2AppsSettingsPage/CreateOAuth2AppPageView.stories.tsx index fc11ce2ecdce2..f97754143b61b 100644 --- a/site/src/pages/DeploymentSettingsPage/OAuth2AppsSettingsPage/CreateOAuth2AppPageView.stories.tsx +++ b/site/src/pages/DeploymentSettingsPage/OAuth2AppsSettingsPage/CreateOAuth2AppPageView.stories.tsx @@ -1,5 +1,5 @@ -import type { Meta, StoryObj } from "@storybook/react"; import { mockApiError } from "testHelpers/entities"; +import type { Meta, StoryObj } from "@storybook/react-vite"; import { CreateOAuth2AppPageView } from "./CreateOAuth2AppPageView"; const meta: Meta = { diff --git a/site/src/pages/DeploymentSettingsPage/OAuth2AppsSettingsPage/CreateOAuth2AppPageView.tsx b/site/src/pages/DeploymentSettingsPage/OAuth2AppsSettingsPage/CreateOAuth2AppPageView.tsx index b7204f4a8557a..c4bcfb25cda0a 100644 --- a/site/src/pages/DeploymentSettingsPage/OAuth2AppsSettingsPage/CreateOAuth2AppPageView.tsx +++ b/site/src/pages/DeploymentSettingsPage/OAuth2AppsSettingsPage/CreateOAuth2AppPageView.tsx @@ -9,7 +9,7 @@ import { import { Stack } from "components/Stack/Stack"; import { ChevronLeftIcon } from "lucide-react"; import type { FC } from "react"; -import { Link as RouterLink } from "react-router-dom"; +import { Link as RouterLink } from "react-router"; import { OAuth2AppForm } from "./OAuth2AppForm"; type CreateOAuth2AppProps = { diff --git a/site/src/pages/DeploymentSettingsPage/OAuth2AppsSettingsPage/EditOAuth2AppPage.tsx b/site/src/pages/DeploymentSettingsPage/OAuth2AppsSettingsPage/EditOAuth2AppPage.tsx index fa9f0adada71c..dcffe9e196d82 100644 --- a/site/src/pages/DeploymentSettingsPage/OAuth2AppsSettingsPage/EditOAuth2AppPage.tsx +++ b/site/src/pages/DeploymentSettingsPage/OAuth2AppsSettingsPage/EditOAuth2AppPage.tsx @@ -4,7 +4,7 @@ import { displayError, displaySuccess } from "components/GlobalSnackbar/utils"; import { type FC, useState } from "react"; import { Helmet } from "react-helmet-async"; import { useMutation, useQuery, useQueryClient } from "react-query"; -import { useNavigate, useParams } from "react-router-dom"; +import { useNavigate, useParams } from "react-router"; import { pageTitle } from "utils/page"; import { EditOAuth2AppPageView } from "./EditOAuth2AppPageView"; diff --git a/site/src/pages/DeploymentSettingsPage/OAuth2AppsSettingsPage/EditOAuth2AppPageView.stories.tsx b/site/src/pages/DeploymentSettingsPage/OAuth2AppsSettingsPage/EditOAuth2AppPageView.stories.tsx index ad86d81f3243e..e5ac1f394649e 100644 --- a/site/src/pages/DeploymentSettingsPage/OAuth2AppsSettingsPage/EditOAuth2AppPageView.stories.tsx +++ b/site/src/pages/DeploymentSettingsPage/OAuth2AppsSettingsPage/EditOAuth2AppPageView.stories.tsx @@ -1,9 +1,9 @@ -import type { Meta, StoryObj } from "@storybook/react"; import { MockOAuth2ProviderAppSecrets, MockOAuth2ProviderApps, mockApiError, } from "testHelpers/entities"; +import type { Meta, StoryObj } from "@storybook/react-vite"; import { EditOAuth2AppPageView } from "./EditOAuth2AppPageView"; const meta: Meta = { diff --git a/site/src/pages/DeploymentSettingsPage/OAuth2AppsSettingsPage/EditOAuth2AppPageView.tsx b/site/src/pages/DeploymentSettingsPage/OAuth2AppsSettingsPage/EditOAuth2AppPageView.tsx index 6c5490275baea..8b18d462e794c 100644 --- a/site/src/pages/DeploymentSettingsPage/OAuth2AppsSettingsPage/EditOAuth2AppPageView.tsx +++ b/site/src/pages/DeploymentSettingsPage/OAuth2AppsSettingsPage/EditOAuth2AppPageView.tsx @@ -23,10 +23,9 @@ import { import { Spinner } from "components/Spinner/Spinner"; import { Stack } from "components/Stack/Stack"; import { TableLoader } from "components/TableLoader/TableLoader"; -import { CopyIcon } from "lucide-react"; -import { ChevronLeftIcon } from "lucide-react"; +import { ChevronLeftIcon, CopyIcon } from "lucide-react"; import { type FC, useState } from "react"; -import { Link as RouterLink, useSearchParams } from "react-router-dom"; +import { Link as RouterLink, useSearchParams } from "react-router"; import { createDayString } from "utils/createDayString"; import { OAuth2AppForm } from "./OAuth2AppForm"; diff --git a/site/src/pages/DeploymentSettingsPage/OAuth2AppsSettingsPage/OAuth2AppsSettingsPageView.stories.tsx b/site/src/pages/DeploymentSettingsPage/OAuth2AppsSettingsPage/OAuth2AppsSettingsPageView.stories.tsx index 3e2d175487694..e399044ee8236 100644 --- a/site/src/pages/DeploymentSettingsPage/OAuth2AppsSettingsPage/OAuth2AppsSettingsPageView.stories.tsx +++ b/site/src/pages/DeploymentSettingsPage/OAuth2AppsSettingsPage/OAuth2AppsSettingsPageView.stories.tsx @@ -1,5 +1,5 @@ -import type { Meta, StoryObj } from "@storybook/react"; import { MockOAuth2ProviderApps } from "testHelpers/entities"; +import type { Meta, StoryObj } from "@storybook/react-vite"; import OAuth2AppsSettingsPageView from "./OAuth2AppsSettingsPageView"; const meta: Meta = { diff --git a/site/src/pages/DeploymentSettingsPage/OAuth2AppsSettingsPage/OAuth2AppsSettingsPageView.tsx b/site/src/pages/DeploymentSettingsPage/OAuth2AppsSettingsPage/OAuth2AppsSettingsPageView.tsx index 7aaadc5afeb15..55ee649353158 100644 --- a/site/src/pages/DeploymentSettingsPage/OAuth2AppsSettingsPage/OAuth2AppsSettingsPageView.tsx +++ b/site/src/pages/DeploymentSettingsPage/OAuth2AppsSettingsPage/OAuth2AppsSettingsPageView.tsx @@ -19,7 +19,7 @@ import { TableLoader } from "components/TableLoader/TableLoader"; import { useClickableTableRow } from "hooks/useClickableTableRow"; import { ChevronRightIcon, PlusIcon } from "lucide-react"; import type { FC } from "react"; -import { Link, useNavigate } from "react-router-dom"; +import { Link, useNavigate } from "react-router"; type OAuth2AppsSettingsProps = { apps?: TypesGen.OAuth2ProviderApp[]; @@ -93,7 +93,7 @@ type OAuth2AppRowProps = { }; const OAuth2AppRow: FC = ({ app }) => { - const theme = useTheme(); + const _theme = useTheme(); const navigate = useNavigate(); const clickableProps = useClickableTableRow({ onClick: () => navigate(`/deployment/oauth2-provider/apps/${app.id}`), diff --git a/site/src/pages/DeploymentSettingsPage/ObservabilitySettingsPage/ObservabilitySettingsPageView.stories.tsx b/site/src/pages/DeploymentSettingsPage/ObservabilitySettingsPage/ObservabilitySettingsPageView.stories.tsx index 6467ef0830010..2fb5af8d75838 100644 --- a/site/src/pages/DeploymentSettingsPage/ObservabilitySettingsPage/ObservabilitySettingsPageView.stories.tsx +++ b/site/src/pages/DeploymentSettingsPage/ObservabilitySettingsPage/ObservabilitySettingsPageView.stories.tsx @@ -1,4 +1,4 @@ -import type { Meta, StoryObj } from "@storybook/react"; +import type { Meta, StoryObj } from "@storybook/react-vite"; import type { SerpentGroup } from "api/typesGenerated"; import { ObservabilitySettingsPageView } from "./ObservabilitySettingsPageView"; diff --git a/site/src/pages/DeploymentSettingsPage/ObservabilitySettingsPage/ObservabilitySettingsPageView.tsx b/site/src/pages/DeploymentSettingsPage/ObservabilitySettingsPage/ObservabilitySettingsPageView.tsx index 54fbdc67c0b2b..cd152293e930b 100644 --- a/site/src/pages/DeploymentSettingsPage/ObservabilitySettingsPage/ObservabilitySettingsPageView.tsx +++ b/site/src/pages/DeploymentSettingsPage/ObservabilitySettingsPage/ObservabilitySettingsPageView.tsx @@ -4,6 +4,11 @@ import { EnterpriseBadge, PremiumBadge, } from "components/Badges/Badges"; +import { + Popover, + PopoverContent, + PopoverTrigger, +} from "components/deprecated/Popover/Popover"; import { PopoverPaywall } from "components/Paywall/PopoverPaywall"; import { SettingsHeader, @@ -12,11 +17,6 @@ import { SettingsHeaderTitle, } from "components/SettingsHeader/SettingsHeader"; import { Stack } from "components/Stack/Stack"; -import { - Popover, - PopoverContent, - PopoverTrigger, -} from "components/deprecated/Popover/Popover"; import type { FC } from "react"; import { deploymentGroupHasParent } from "utils/deployOptions"; import { docs } from "utils/docs"; @@ -32,68 +32,64 @@ export const ObservabilitySettingsPageView: FC< ObservabilitySettingsPageViewProps > = ({ options, featureAuditLogEnabled, isPremium }) => { return ( - <> - -
      - - Observability - + +
      + + Observability + - - } - > - - Audit Logging - - - Allow auditors to monitor user operations in your deployment. - - + + } + > + + Audit Logging + + + Allow auditors to monitor user operations in your deployment. + + - - - {featureAuditLogEnabled && !isPremium ? ( - - ) : ( - - - - - - )} + + + {featureAuditLogEnabled && !isPremium ? ( + + ) : ( + + + + + + )} - - - - - -
      + + + + + +
      -
      - - - Monitoring - - - Monitoring your Coder application with logs and metrics. - - +
      + + + Monitoring + + + Monitoring your Coder application with logs and metrics. + + - - deploymentGroupHasParent(o.group, "Introspection"), - )} - /> -
      - - + + deploymentGroupHasParent(o.group, "Introspection"), + )} + /> +
      +
      ); }; diff --git a/site/src/pages/DeploymentSettingsPage/Option.tsx b/site/src/pages/DeploymentSettingsPage/Option.tsx index a52db293610d7..3f2d848509a46 100644 --- a/site/src/pages/DeploymentSettingsPage/Option.tsx +++ b/site/src/pages/DeploymentSettingsPage/Option.tsx @@ -1,4 +1,4 @@ -import { type Interpolation, type Theme, css, useTheme } from "@emotion/react"; +import { css, type Interpolation, type Theme, useTheme } from "@emotion/react"; import BuildCircleOutlinedIcon from "@mui/icons-material/BuildCircleOutlined"; import { DisabledBadge, EnabledBadge } from "components/Badges/Badges"; import type { FC, HTMLAttributes, PropsWithChildren } from "react"; diff --git a/site/src/pages/DeploymentSettingsPage/OverviewPage/OverviewPageView.stories.tsx b/site/src/pages/DeploymentSettingsPage/OverviewPage/OverviewPageView.stories.tsx index 24e121b9ff0f5..b77d69a485ef3 100644 --- a/site/src/pages/DeploymentSettingsPage/OverviewPage/OverviewPageView.stories.tsx +++ b/site/src/pages/DeploymentSettingsPage/OverviewPage/OverviewPageView.stories.tsx @@ -1,5 +1,5 @@ -import type { Meta, StoryObj } from "@storybook/react"; import { MockDeploymentDAUResponse } from "testHelpers/entities"; +import type { Meta, StoryObj } from "@storybook/react-vite"; import { OverviewPageView } from "./OverviewPageView"; const meta: Meta = { diff --git a/site/src/pages/DeploymentSettingsPage/OverviewPage/UserEngagementChart.stories.tsx b/site/src/pages/DeploymentSettingsPage/OverviewPage/UserEngagementChart.stories.tsx index e2e2a99111db5..80f025bf76ff6 100644 --- a/site/src/pages/DeploymentSettingsPage/OverviewPage/UserEngagementChart.stories.tsx +++ b/site/src/pages/DeploymentSettingsPage/OverviewPage/UserEngagementChart.stories.tsx @@ -1,4 +1,4 @@ -import type { Meta, StoryObj } from "@storybook/react"; +import type { Meta, StoryObj } from "@storybook/react-vite"; import { UserEngagementChart } from "./UserEngagementChart"; const meta: Meta = { diff --git a/site/src/pages/DeploymentSettingsPage/OverviewPage/UserEngagementChart.tsx b/site/src/pages/DeploymentSettingsPage/OverviewPage/UserEngagementChart.tsx index c89295dbfabee..6605f5e60af80 100644 --- a/site/src/pages/DeploymentSettingsPage/OverviewPage/UserEngagementChart.tsx +++ b/site/src/pages/DeploymentSettingsPage/OverviewPage/UserEngagementChart.tsx @@ -14,7 +14,7 @@ import { Link } from "components/Link/Link"; import { Spinner } from "components/Spinner/Spinner"; import { ChevronRightIcon } from "lucide-react"; import type { FC } from "react"; -import { Link as RouterLink } from "react-router-dom"; +import { Link as RouterLink } from "react-router"; import { Area, AreaChart, CartesianGrid, XAxis, YAxis } from "recharts"; const chartConfig = { @@ -130,7 +130,7 @@ export const UserEngagementChart: FC = ({ data }) => { const item = p[0]; return `${item.value} users`; }} - formatter={(v, n, item) => { + formatter={(_v, _n, item) => { const date = new Date(item.payload.date); return date.toLocaleString(undefined, { month: "long", diff --git a/site/src/pages/DeploymentSettingsPage/SecuritySettingsPage/SecuritySettingsPageView.stories.tsx b/site/src/pages/DeploymentSettingsPage/SecuritySettingsPage/SecuritySettingsPageView.stories.tsx index c14f2d0a09f2b..cd6cacfddf21d 100644 --- a/site/src/pages/DeploymentSettingsPage/SecuritySettingsPage/SecuritySettingsPageView.stories.tsx +++ b/site/src/pages/DeploymentSettingsPage/SecuritySettingsPage/SecuritySettingsPageView.stories.tsx @@ -1,4 +1,4 @@ -import type { Meta, StoryObj } from "@storybook/react"; +import type { Meta, StoryObj } from "@storybook/react-vite"; import type { SerpentGroup, SerpentOption } from "api/typesGenerated"; import { SecuritySettingsPageView } from "./SecuritySettingsPageView"; diff --git a/site/src/pages/DeploymentSettingsPage/UserAuthSettingsPage/UserAuthSettingsPageView.stories.tsx b/site/src/pages/DeploymentSettingsPage/UserAuthSettingsPage/UserAuthSettingsPageView.stories.tsx index 5756f11748800..d8c3e0d49b056 100644 --- a/site/src/pages/DeploymentSettingsPage/UserAuthSettingsPage/UserAuthSettingsPageView.stories.tsx +++ b/site/src/pages/DeploymentSettingsPage/UserAuthSettingsPage/UserAuthSettingsPageView.stories.tsx @@ -1,4 +1,4 @@ -import type { Meta, StoryObj } from "@storybook/react"; +import type { Meta, StoryObj } from "@storybook/react-vite"; import type { SerpentGroup } from "api/typesGenerated"; import { UserAuthSettingsPageView } from "./UserAuthSettingsPageView"; diff --git a/site/src/pages/DeploymentSettingsPage/UserAuthSettingsPage/UserAuthSettingsPageView.tsx b/site/src/pages/DeploymentSettingsPage/UserAuthSettingsPage/UserAuthSettingsPageView.tsx index 043206bea3388..7b50eb486bf56 100644 --- a/site/src/pages/DeploymentSettingsPage/UserAuthSettingsPage/UserAuthSettingsPageView.tsx +++ b/site/src/pages/DeploymentSettingsPage/UserAuthSettingsPage/UserAuthSettingsPageView.tsx @@ -29,66 +29,62 @@ export const UserAuthSettingsPageView = ({ ); return ( - <> - -
      - - User Authentication - + +
      + + User Authentication + - - } - > - - Login with OpenID Connect - - - Set up authentication to login with OpenID Connect. - - + + } + > + + Login with OpenID Connect + + + Set up authentication to login with OpenID Connect. + + - {oidcEnabled ? : } + {oidcEnabled ? : } - {oidcEnabled && ( - - deploymentGroupHasParent(o.group, "OIDC"), - )} - /> - )} -
      + {oidcEnabled && ( + + deploymentGroupHasParent(o.group, "OIDC"), + )} + /> + )} +
      -
      - - } - > - - Login with GitHub - - - Set up authentication to login with GitHub. - - +
      + + } + > + + Login with GitHub + + + Set up authentication to login with GitHub. + + - - {githubEnabled ? : } - + {githubEnabled ? : } - {githubEnabled && ( - - deploymentGroupHasParent(o.group, "GitHub"), - )} - /> - )} -
      - - + {githubEnabled && ( + + deploymentGroupHasParent(o.group, "GitHub"), + )} + /> + )} +
      +
      ); }; diff --git a/site/src/pages/ExternalAuthPage/ExternalAuthPage.tsx b/site/src/pages/ExternalAuthPage/ExternalAuthPage.tsx index 0523a5da750d4..105e7ee501b38 100644 --- a/site/src/pages/ExternalAuthPage/ExternalAuthPage.tsx +++ b/site/src/pages/ExternalAuthPage/ExternalAuthPage.tsx @@ -16,7 +16,7 @@ import { useAuthenticated } from "hooks"; import type { FC } from "react"; import { useMemo } from "react"; import { useQuery, useQueryClient } from "react-query"; -import { useParams, useSearchParams } from "react-router-dom"; +import { useParams, useSearchParams } from "react-router"; import ExternalAuthPageView from "./ExternalAuthPageView"; const ExternalAuthPage: FC = () => { diff --git a/site/src/pages/ExternalAuthPage/ExternalAuthPageView.stories.tsx b/site/src/pages/ExternalAuthPage/ExternalAuthPageView.stories.tsx index aa976ab4a63a3..483600853fec6 100644 --- a/site/src/pages/ExternalAuthPage/ExternalAuthPageView.stories.tsx +++ b/site/src/pages/ExternalAuthPage/ExternalAuthPageView.stories.tsx @@ -1,149 +1,149 @@ -import type { Meta, StoryFn } from "@storybook/react"; -import ExternalAuthPageView, { - type ExternalAuthPageViewProps, -} from "./ExternalAuthPageView"; +import type { Meta } from "@storybook/react-vite"; +import ExternalAuthPageView from "./ExternalAuthPageView"; export default { title: "pages/ExternalAuthPage", component: ExternalAuthPageView, } as Meta; -const Template: StoryFn = (args) => ( - -); - -export const WebAuthenticated = Template.bind({}); -WebAuthenticated.args = { - externalAuth: { - authenticated: true, - device: false, - installations: [], - app_install_url: "", - app_installable: false, - display_name: "BitBucket", - user: { - id: 0, - avatar_url: "https://avatars.githubusercontent.com/u/7122116?v=4", - login: "kylecarbs", - name: "Kyle Carberry", - profile_url: "", +export const WebAuthenticated = { + args: { + externalAuth: { + authenticated: true, + device: false, + installations: [], + app_install_url: "", + app_installable: false, + display_name: "BitBucket", + user: { + id: 0, + avatar_url: "https://avatars.githubusercontent.com/u/7122116?v=4", + login: "kylecarbs", + name: "Kyle Carberry", + profile_url: "", + }, }, }, }; -export const DeviceUnauthenticated = Template.bind({}); -DeviceUnauthenticated.args = { - externalAuth: { - display_name: "GitHub", - authenticated: false, - device: true, - installations: [], - app_install_url: "", - app_installable: false, - user: null, - }, - externalAuthDevice: { - device_code: "1234-5678", - expires_in: 900, - interval: 5, - user_code: "ABCD-EFGH", - verification_uri: "", +export const DeviceUnauthenticated = { + args: { + externalAuth: { + display_name: "GitHub", + authenticated: false, + device: true, + installations: [], + app_install_url: "", + app_installable: false, + user: null, + }, + externalAuthDevice: { + device_code: "1234-5678", + expires_in: 900, + interval: 5, + user_code: "ABCD-EFGH", + verification_uri: "", + }, }, }; -export const Device429Error = Template.bind({}); -Device429Error.args = { - externalAuth: { - display_name: "GitHub", - authenticated: false, - device: true, - installations: [], - app_install_url: "", - app_installable: false, - user: null, - }, - // This is intentionally undefined. - // If we get a 429 on the first /device call, then this - // is undefined with a 429 error. - externalAuthDevice: undefined, - deviceExchangeError: { - message: "Failed to authorize device.", - detail: - "rate limit hit, unable to authorize device. please try again later", +export const Device429Error = { + args: { + externalAuth: { + display_name: "GitHub", + authenticated: false, + device: true, + installations: [], + app_install_url: "", + app_installable: false, + user: null, + }, + // This is intentionally undefined. + // If we get a 429 on the first /device call, then this + // is undefined with a 429 error. + externalAuthDevice: undefined, + deviceExchangeError: { + message: "Failed to authorize device.", + detail: + "rate limit hit, unable to authorize device. please try again later", + }, }, }; -export const DeviceUnauthenticatedError = Template.bind({}); -DeviceUnauthenticatedError.args = { - externalAuth: { - display_name: "GitHub", - authenticated: false, - device: true, - installations: [], - app_install_url: "", - app_installable: false, - user: null, - }, - externalAuthDevice: { - device_code: "1234-5678", - expires_in: 900, - interval: 5, - user_code: "ABCD-EFGH", - verification_uri: "", - }, - deviceExchangeError: { - message: "Error exchanging device code.", - detail: "expired_token", +export const DeviceUnauthenticatedError = { + args: { + externalAuth: { + display_name: "GitHub", + authenticated: false, + device: true, + installations: [], + app_install_url: "", + app_installable: false, + user: null, + }, + externalAuthDevice: { + device_code: "1234-5678", + expires_in: 900, + interval: 5, + user_code: "ABCD-EFGH", + verification_uri: "", + }, + deviceExchangeError: { + message: "Error exchanging device code.", + detail: "expired_token", + }, }, }; -export const DeviceAuthenticatedNotInstalled = Template.bind({}); -DeviceAuthenticatedNotInstalled.args = { - viewExternalAuthConfig: true, - externalAuth: { - display_name: "GitHub", - authenticated: true, - device: true, - installations: [], - app_install_url: "https://example.com", - app_installable: true, - user: { - id: 0, - avatar_url: "https://avatars.githubusercontent.com/u/7122116?v=4", - login: "kylecarbs", - name: "Kyle Carberry", - profile_url: "", +export const DeviceAuthenticatedNotInstalled = { + args: { + viewExternalAuthConfig: true, + externalAuth: { + display_name: "GitHub", + authenticated: true, + device: true, + installations: [], + app_install_url: "https://example.com", + app_installable: true, + user: { + id: 0, + avatar_url: "https://avatars.githubusercontent.com/u/7122116?v=4", + login: "kylecarbs", + name: "Kyle Carberry", + profile_url: "", + }, }, }, }; -export const DeviceAuthenticatedInstalled = Template.bind({}); -DeviceAuthenticatedInstalled.args = { - externalAuth: { - display_name: "GitHub", - authenticated: true, - device: true, - installations: [ - { - configure_url: "https://example.com", - id: 1, - account: { - id: 0, - avatar_url: "https://github.com/coder.png", - login: "coder", - name: "Coder", - profile_url: "https://github.com/coder", +export const DeviceAuthenticatedInstalled = { + args: { + externalAuth: { + display_name: "GitHub", + authenticated: true, + device: true, + installations: [ + { + configure_url: "https://example.com", + id: 1, + account: { + id: 0, + avatar_url: "https://github.com/coder.png", + login: "coder", + name: "Coder", + profile_url: "https://github.com/coder", + }, }, + ], + app_install_url: "https://example.com", + app_installable: true, + user: { + id: 0, + avatar_url: "https://avatars.githubusercontent.com/u/7122116?v=4", + login: "kylecarbs", + name: "Kyle Carberry", + profile_url: "", }, - ], - app_install_url: "https://example.com", - app_installable: true, - user: { - id: 0, - avatar_url: "https://avatars.githubusercontent.com/u/7122116?v=4", - login: "kylecarbs", - name: "Kyle Carberry", - profile_url: "", }, }, }; diff --git a/site/src/pages/ExternalAuthPage/ExternalAuthPageView.tsx b/site/src/pages/ExternalAuthPage/ExternalAuthPageView.tsx index 798116145dd78..f99328ad72cf3 100644 --- a/site/src/pages/ExternalAuthPage/ExternalAuthPageView.tsx +++ b/site/src/pages/ExternalAuthPage/ExternalAuthPageView.tsx @@ -8,11 +8,10 @@ import { Avatar } from "components/Avatar/Avatar"; import { GitDeviceAuth } from "components/GitDeviceAuth/GitDeviceAuth"; import { SignInLayout } from "components/SignInLayout/SignInLayout"; import { Welcome } from "components/Welcome/Welcome"; -import { ExternalLinkIcon } from "lucide-react"; -import { RotateCwIcon } from "lucide-react"; +import { ExternalLinkIcon, RotateCwIcon } from "lucide-react"; import type { FC, ReactNode } from "react"; -export interface ExternalAuthPageViewProps { +interface ExternalAuthPageViewProps { externalAuth: ExternalAuth; viewExternalAuthConfig: boolean; diff --git a/site/src/pages/GroupsPage/CreateGroupPage.tsx b/site/src/pages/GroupsPage/CreateGroupPage.tsx index b256861c6827b..07dcea0e5e0ef 100644 --- a/site/src/pages/GroupsPage/CreateGroupPage.tsx +++ b/site/src/pages/GroupsPage/CreateGroupPage.tsx @@ -2,7 +2,7 @@ import { createGroup } from "api/queries/groups"; import type { FC } from "react"; import { Helmet } from "react-helmet-async"; import { useMutation, useQueryClient } from "react-query"; -import { useNavigate, useParams } from "react-router-dom"; +import { useNavigate, useParams } from "react-router"; import { pageTitle } from "utils/page"; import { CreateGroupPageView } from "./CreateGroupPageView"; diff --git a/site/src/pages/GroupsPage/CreateGroupPageView.stories.tsx b/site/src/pages/GroupsPage/CreateGroupPageView.stories.tsx index ea8dfcc3f3e02..fe73334930652 100644 --- a/site/src/pages/GroupsPage/CreateGroupPageView.stories.tsx +++ b/site/src/pages/GroupsPage/CreateGroupPageView.stories.tsx @@ -1,6 +1,6 @@ -import type { Meta, StoryObj } from "@storybook/react"; -import { userEvent, within } from "@storybook/test"; import { mockApiError } from "testHelpers/entities"; +import type { Meta, StoryObj } from "@storybook/react-vite"; +import { userEvent, within } from "storybook/test"; import { CreateGroupPageView } from "./CreateGroupPageView"; const meta: Meta = { diff --git a/site/src/pages/GroupsPage/CreateGroupPageView.tsx b/site/src/pages/GroupsPage/CreateGroupPageView.tsx index 6a3230e7ae646..4fdf78eadb1a1 100644 --- a/site/src/pages/GroupsPage/CreateGroupPageView.tsx +++ b/site/src/pages/GroupsPage/CreateGroupPageView.tsx @@ -18,7 +18,7 @@ import { import { Spinner } from "components/Spinner/Spinner"; import { useFormik } from "formik"; import type { FC } from "react"; -import { useNavigate } from "react-router-dom"; +import { useNavigate } from "react-router"; import { getFormHelpers, nameValidator, diff --git a/site/src/pages/GroupsPage/GroupPage.stories.tsx b/site/src/pages/GroupsPage/GroupPage.stories.tsx index 2325567dd4607..4367461fcdc99 100644 --- a/site/src/pages/GroupsPage/GroupPage.stories.tsx +++ b/site/src/pages/GroupsPage/GroupPage.stories.tsx @@ -1,15 +1,15 @@ -import type { Meta, StoryObj } from "@storybook/react"; -import { spyOn, userEvent, within } from "@storybook/test"; -import { API } from "api/api"; -import { getGroupQueryKey, groupPermissionsKey } from "api/queries/groups"; -import { organizationMembersKey } from "api/queries/organizations"; -import { reactRouterParameters } from "storybook-addon-remix-react-router"; import { MockDefaultOrganization, MockGroup, MockOrganizationMember, MockOrganizationMember2, } from "testHelpers/entities"; +import type { Meta, StoryObj } from "@storybook/react-vite"; +import { API } from "api/api"; +import { getGroupQueryKey, groupPermissionsKey } from "api/queries/groups"; +import { organizationMembersKey } from "api/queries/organizations"; +import { spyOn, userEvent, within } from "storybook/test"; +import { reactRouterParameters } from "storybook-addon-remix-react-router"; import GroupPage from "./GroupPage"; const meta: Meta = { diff --git a/site/src/pages/GroupsPage/GroupPage.tsx b/site/src/pages/GroupsPage/GroupPage.tsx index ca15b80e4d259..76d18f46033b8 100644 --- a/site/src/pages/GroupsPage/GroupPage.tsx +++ b/site/src/pages/GroupsPage/GroupPage.tsx @@ -48,13 +48,16 @@ import { TableToolbar, } from "components/TableToolbar/TableToolbar"; import { MemberAutocomplete } from "components/UserAutocomplete/UserAutocomplete"; -import { UserPlusIcon } from "lucide-react"; -import { SettingsIcon } from "lucide-react"; -import { EllipsisVertical, TrashIcon } from "lucide-react"; +import { + EllipsisVertical, + SettingsIcon, + TrashIcon, + UserPlusIcon, +} from "lucide-react"; import { type FC, useState } from "react"; import { Helmet } from "react-helmet-async"; import { useMutation, useQuery, useQueryClient } from "react-query"; -import { Link as RouterLink, useNavigate, useParams } from "react-router-dom"; +import { Link as RouterLink, useNavigate, useParams } from "react-router"; import { isEveryoneGroup } from "utils/groups"; import { pageTitle } from "utils/page"; diff --git a/site/src/pages/GroupsPage/GroupSettingsPage.tsx b/site/src/pages/GroupsPage/GroupSettingsPage.tsx index 570877e1d8681..b210e82f464db 100644 --- a/site/src/pages/GroupsPage/GroupSettingsPage.tsx +++ b/site/src/pages/GroupsPage/GroupSettingsPage.tsx @@ -6,7 +6,7 @@ import { Loader } from "components/Loader/Loader"; import type { FC } from "react"; import { Helmet } from "react-helmet-async"; import { useMutation, useQuery, useQueryClient } from "react-query"; -import { useNavigate, useParams } from "react-router-dom"; +import { useNavigate, useParams } from "react-router"; import { pageTitle } from "utils/page"; import GroupSettingsPageView from "./GroupSettingsPageView"; diff --git a/site/src/pages/GroupsPage/GroupSettingsPageView.stories.tsx b/site/src/pages/GroupsPage/GroupSettingsPageView.stories.tsx index 148b044f18217..fa8a0dc459381 100644 --- a/site/src/pages/GroupsPage/GroupSettingsPageView.stories.tsx +++ b/site/src/pages/GroupsPage/GroupSettingsPageView.stories.tsx @@ -1,6 +1,6 @@ -import { action } from "@storybook/addon-actions"; -import type { Meta, StoryObj } from "@storybook/react"; import { MockGroup } from "testHelpers/entities"; +import type { Meta, StoryObj } from "@storybook/react-vite"; +import { action } from "storybook/actions"; import GroupSettingsPageView from "./GroupSettingsPageView"; const meta: Meta = { diff --git a/site/src/pages/GroupsPage/GroupsPage.tsx b/site/src/pages/GroupsPage/GroupsPage.tsx index 616e99fe15404..c5089cbad1e6b 100644 --- a/site/src/pages/GroupsPage/GroupsPage.tsx +++ b/site/src/pages/GroupsPage/GroupsPage.tsx @@ -17,7 +17,7 @@ import { RequirePermission } from "modules/permissions/RequirePermission"; import { type FC, useEffect } from "react"; import { Helmet } from "react-helmet-async"; import { useQuery } from "react-query"; -import { Link as RouterLink } from "react-router-dom"; +import { Link as RouterLink } from "react-router"; import { pageTitle } from "utils/page"; import { useGroupsSettings } from "./GroupsPageProvider"; import { GroupsPageView } from "./GroupsPageView"; diff --git a/site/src/pages/GroupsPage/GroupsPageProvider.tsx b/site/src/pages/GroupsPage/GroupsPageProvider.tsx index be4913c194dc1..83c11c4ae9c00 100644 --- a/site/src/pages/GroupsPage/GroupsPageProvider.tsx +++ b/site/src/pages/GroupsPage/GroupsPageProvider.tsx @@ -1,7 +1,7 @@ import type { Organization } from "api/typesGenerated"; import { useDashboard } from "modules/dashboard/useDashboard"; -import { type FC, createContext, useContext } from "react"; -import { Navigate, Outlet, useParams } from "react-router-dom"; +import { createContext, type FC, useContext } from "react"; +import { Navigate, Outlet, useParams } from "react-router"; const GroupsPageContext = createContext( undefined, diff --git a/site/src/pages/GroupsPage/GroupsPageView.stories.tsx b/site/src/pages/GroupsPage/GroupsPageView.stories.tsx index 466ee2b149524..2f2f659680380 100644 --- a/site/src/pages/GroupsPage/GroupsPageView.stories.tsx +++ b/site/src/pages/GroupsPage/GroupsPageView.stories.tsx @@ -1,5 +1,5 @@ -import type { Meta, StoryObj } from "@storybook/react"; import { MockGroup } from "testHelpers/entities"; +import type { Meta, StoryObj } from "@storybook/react-vite"; import { GroupsPageView } from "./GroupsPageView"; const meta: Meta = { diff --git a/site/src/pages/GroupsPage/GroupsPageView.tsx b/site/src/pages/GroupsPage/GroupsPageView.tsx index b3c4d35d8c41c..1eb3f6b809648 100644 --- a/site/src/pages/GroupsPage/GroupsPageView.tsx +++ b/site/src/pages/GroupsPage/GroupsPageView.tsx @@ -24,7 +24,7 @@ import { import { useClickableTableRow } from "hooks"; import { ChevronRightIcon, PlusIcon } from "lucide-react"; import type { FC } from "react"; -import { Link as RouterLink, useNavigate } from "react-router-dom"; +import { Link as RouterLink, useNavigate } from "react-router"; import { docs } from "utils/docs"; type GroupsPageViewProps = { @@ -42,66 +42,64 @@ export const GroupsPageView: FC = ({ const isEmpty = Boolean(groups && groups.length === 0); return ( - <> - - - - - - - - - Name - Users - - - - - - - - + + + + + +
      + + + Name + Users + + + + + + + + - - - - - - - Create group - - - ) - } - /> - - - + + + + + + + Create group + + + ) + } + /> + + + - - {groups?.map((group) => ( - - ))} - - - -
      -
      -
      - + + {groups?.map((group) => ( + + ))} + + + + + + ); }; diff --git a/site/src/pages/HealthPage/AccessURLPage.stories.tsx b/site/src/pages/HealthPage/AccessURLPage.stories.tsx index 776abd22c25e1..0a620cb9fcf67 100644 --- a/site/src/pages/HealthPage/AccessURLPage.stories.tsx +++ b/site/src/pages/HealthPage/AccessURLPage.stories.tsx @@ -1,7 +1,7 @@ -import type { StoryObj } from "@storybook/react"; +import { MockHealth } from "testHelpers/entities"; +import type { StoryObj } from "@storybook/react-vite"; import { HEALTH_QUERY_KEY } from "api/queries/debug"; import type { HealthcheckReport } from "api/typesGenerated"; -import { MockHealth } from "testHelpers/entities"; import AccessURLPage from "./AccessURLPage"; import { generateMeta } from "./storybook"; diff --git a/site/src/pages/HealthPage/AccessURLPage.tsx b/site/src/pages/HealthPage/AccessURLPage.tsx index f9b0242be556e..12b11e50374f5 100644 --- a/site/src/pages/HealthPage/AccessURLPage.tsx +++ b/site/src/pages/HealthPage/AccessURLPage.tsx @@ -1,7 +1,7 @@ import type { HealthcheckReport } from "api/typesGenerated"; import { Alert } from "components/Alert/Alert"; import { Helmet } from "react-helmet-async"; -import { useOutletContext } from "react-router-dom"; +import { useOutletContext } from "react-router"; import { pageTitle } from "utils/page"; import { GridData, diff --git a/site/src/pages/HealthPage/Content.tsx b/site/src/pages/HealthPage/Content.tsx index 74cbc9a5b87c1..b3e39343c1e02 100644 --- a/site/src/pages/HealthPage/Content.tsx +++ b/site/src/pages/HealthPage/Content.tsx @@ -2,15 +2,18 @@ import { css } from "@emotion/css"; import { useTheme } from "@emotion/react"; import Link from "@mui/material/Link"; import type { HealthCode, HealthSeverity } from "api/typesGenerated"; -import { CircleAlertIcon } from "lucide-react"; -import { CircleCheckIcon, CircleMinusIcon } from "lucide-react"; +import { + CircleAlertIcon, + CircleCheckIcon, + CircleMinusIcon, +} from "lucide-react"; import { type ComponentProps, + cloneElement, type FC, + forwardRef, type HTMLAttributes, type ReactElement, - cloneElement, - forwardRef, } from "react"; import { docs } from "utils/docs"; import { healthyColor } from "./healthyColor"; diff --git a/site/src/pages/HealthPage/DERPPage.stories.tsx b/site/src/pages/HealthPage/DERPPage.stories.tsx index 30f02840f7db5..7dd8c2031e1fb 100644 --- a/site/src/pages/HealthPage/DERPPage.stories.tsx +++ b/site/src/pages/HealthPage/DERPPage.stories.tsx @@ -1,4 +1,4 @@ -import type { Meta, StoryObj } from "@storybook/react"; +import type { Meta, StoryObj } from "@storybook/react-vite"; import DERPPage from "./DERPPage"; import { generateMeta } from "./storybook"; diff --git a/site/src/pages/HealthPage/DERPPage.tsx b/site/src/pages/HealthPage/DERPPage.tsx index 6cd96321e1d62..08b2a121b445f 100644 --- a/site/src/pages/HealthPage/DERPPage.tsx +++ b/site/src/pages/HealthPage/DERPPage.tsx @@ -2,15 +2,15 @@ import { useTheme } from "@emotion/react"; import LocationOnOutlined from "@mui/icons-material/LocationOnOutlined"; import Button from "@mui/material/Button"; import type { + HealthcheckReport, HealthMessage, HealthSeverity, - HealthcheckReport, NetcheckReport, } from "api/typesGenerated"; import { Alert } from "components/Alert/Alert"; import type { FC } from "react"; import { Helmet } from "react-helmet-async"; -import { Link, useOutletContext } from "react-router-dom"; +import { Link, useOutletContext } from "react-router"; import { pageTitle } from "utils/page"; import { BooleanPill, diff --git a/site/src/pages/HealthPage/DERPRegionPage.stories.tsx b/site/src/pages/HealthPage/DERPRegionPage.stories.tsx index a834d6e40212a..d7bbe71d28c1c 100644 --- a/site/src/pages/HealthPage/DERPRegionPage.stories.tsx +++ b/site/src/pages/HealthPage/DERPRegionPage.stories.tsx @@ -1,5 +1,5 @@ -import type { Meta, StoryObj } from "@storybook/react"; import { MockHealth } from "testHelpers/entities"; +import type { Meta, StoryObj } from "@storybook/react-vite"; import DERPRegionPage from "./DERPRegionPage"; import { generateMeta } from "./storybook"; diff --git a/site/src/pages/HealthPage/DERPRegionPage.tsx b/site/src/pages/HealthPage/DERPRegionPage.tsx index afdc34d43cf66..1c81196412795 100644 --- a/site/src/pages/HealthPage/DERPRegionPage.tsx +++ b/site/src/pages/HealthPage/DERPRegionPage.tsx @@ -3,15 +3,15 @@ import Tooltip from "@mui/material/Tooltip"; import type { DERPNodeReport, DERPRegionReport, + HealthcheckReport, HealthMessage, HealthSeverity, - HealthcheckReport, } from "api/typesGenerated"; import { Alert } from "components/Alert/Alert"; import { ChevronLeftIcon, CodeIcon, HashIcon } from "lucide-react"; import type { FC } from "react"; import { Helmet } from "react-helmet-async"; -import { Link, useOutletContext, useParams } from "react-router-dom"; +import { Link, useOutletContext, useParams } from "react-router"; import { getLatencyColor } from "utils/latency"; import { pageTitle } from "utils/page"; import { diff --git a/site/src/pages/HealthPage/DatabasePage.stories.tsx b/site/src/pages/HealthPage/DatabasePage.stories.tsx index e283fdebda16b..38f742bfc1c4d 100644 --- a/site/src/pages/HealthPage/DatabasePage.stories.tsx +++ b/site/src/pages/HealthPage/DatabasePage.stories.tsx @@ -1,4 +1,4 @@ -import type { Meta, StoryObj } from "@storybook/react"; +import type { Meta, StoryObj } from "@storybook/react-vite"; import DatabasePage from "./DatabasePage"; import { generateMeta } from "./storybook"; diff --git a/site/src/pages/HealthPage/DatabasePage.tsx b/site/src/pages/HealthPage/DatabasePage.tsx index 5bf0b4cb1fe4c..e7b4cc9578288 100644 --- a/site/src/pages/HealthPage/DatabasePage.tsx +++ b/site/src/pages/HealthPage/DatabasePage.tsx @@ -1,7 +1,7 @@ import type { HealthcheckReport } from "api/typesGenerated"; import { Alert } from "components/Alert/Alert"; import { Helmet } from "react-helmet-async"; -import { useOutletContext } from "react-router-dom"; +import { useOutletContext } from "react-router"; import { pageTitle } from "utils/page"; import { GridData, diff --git a/site/src/pages/HealthPage/HealthLayout.tsx b/site/src/pages/HealthPage/HealthLayout.tsx index a5d76ebd8d7c9..86e79aa9ec69e 100644 --- a/site/src/pages/HealthPage/HealthLayout.tsx +++ b/site/src/pages/HealthPage/HealthLayout.tsx @@ -1,4 +1,3 @@ -import { cx } from "@emotion/css"; import { useTheme } from "@emotion/react"; import NotificationsOffOutlined from "@mui/icons-material/NotificationsOffOutlined"; import ReplayIcon from "@mui/icons-material/Replay"; @@ -9,17 +8,26 @@ import { health, refreshHealth } from "api/queries/debug"; import type { HealthSeverity } from "api/typesGenerated"; import { ErrorAlert } from "components/Alert/ErrorAlert"; import { Loader } from "components/Loader/Loader"; -import { type ClassName, useClassName } from "hooks/useClassName"; import kebabCase from "lodash/fp/kebabCase"; import { DashboardFullPage } from "modules/dashboard/DashboardLayout"; import { type FC, Suspense } from "react"; import { Helmet } from "react-helmet-async"; import { useMutation, useQuery, useQueryClient } from "react-query"; -import { NavLink, Outlet } from "react-router-dom"; +import { NavLink, Outlet } from "react-router"; +import { cn } from "utils/cn"; import { createDayString } from "utils/createDayString"; import { pageTitle } from "utils/page"; import { HealthIcon } from "./Content"; +const linkStyles = { + normal: ` + text-content-secondary border-none text-sm w-full flex items-center gap-3 + text-left h-9 px-6 cursor-pointer no-underline transition-colors + hover:bg-surface-secondary hover:text-content-primary + `, + active: "bg-surface-secondary text-content-primary", +}; + export const HealthLayout: FC = () => { const theme = useTheme(); const queryClient = useQueryClient(); @@ -44,9 +52,6 @@ export const HealthLayout: FC = () => { } as const; const visibleSections = filterVisibleSections(sections); - const link = useClassName(classNames.link, []); - const activeLink = useClassName(classNames.activeLink, []); - if (isLoading) { return (
      @@ -70,38 +75,11 @@ export const HealthLayout: FC = () => { -
      -
      -
      +
      +
      +
      -
      +
      @@ -116,20 +94,15 @@ export const HealthLayout: FC = () => { {isRefreshing ? ( ) : ( - + )}
      -
      +
      {healthStatus.healthy ? "Healthy" : "Unhealthy"}
      -
      +
      {healthStatus.healthy ? Object.keys(visibleSections).some((key) => { const section = @@ -142,34 +115,28 @@ export const HealthLayout: FC = () => {
      -
      - Last check +
      + Last check {createDayString(healthStatus.time)}
      -
      - Version +
      + Version {healthStatus.coder_version}
      -
      -
      +
      }> @@ -229,35 +196,3 @@ const filterVisibleSections = (sections: T) => { return visible; }; - -const classNames = { - link: (css, theme) => - css({ - background: "none", - pointerEvents: "auto", - color: theme.palette.text.secondary, - border: "none", - fontSize: 14, - width: "100%", - display: "flex", - alignItems: "center", - gap: 12, - textAlign: "left", - height: 36, - padding: "0 24px", - cursor: "pointer", - textDecoration: "none", - - "&:hover": { - background: theme.palette.action.hover, - color: theme.palette.text.primary, - }, - }), - - activeLink: (css, theme) => - css({ - background: theme.palette.action.hover, - pointerEvents: "none", - color: theme.palette.text.primary, - }), -} satisfies Record; diff --git a/site/src/pages/HealthPage/ProvisionerDaemonsPage.stories.tsx b/site/src/pages/HealthPage/ProvisionerDaemonsPage.stories.tsx index 33aa4563019db..6838cd992c6d0 100644 --- a/site/src/pages/HealthPage/ProvisionerDaemonsPage.stories.tsx +++ b/site/src/pages/HealthPage/ProvisionerDaemonsPage.stories.tsx @@ -1,4 +1,4 @@ -import type { Meta, StoryObj } from "@storybook/react"; +import type { Meta, StoryObj } from "@storybook/react-vite"; import ProvisionerDaemonsPage from "./ProvisionerDaemonsPage"; import { generateMeta } from "./storybook"; diff --git a/site/src/pages/HealthPage/ProvisionerDaemonsPage.tsx b/site/src/pages/HealthPage/ProvisionerDaemonsPage.tsx index feb569f158ffd..fb473b8d6cae7 100644 --- a/site/src/pages/HealthPage/ProvisionerDaemonsPage.tsx +++ b/site/src/pages/HealthPage/ProvisionerDaemonsPage.tsx @@ -3,7 +3,7 @@ import { Alert } from "components/Alert/Alert"; import { Provisioner } from "modules/provisioners/Provisioner"; import type { FC } from "react"; import { Helmet } from "react-helmet-async"; -import { useOutletContext } from "react-router-dom"; +import { useOutletContext } from "react-router"; import { pageTitle } from "utils/page"; import { Header, diff --git a/site/src/pages/HealthPage/WebsocketPage.stories.tsx b/site/src/pages/HealthPage/WebsocketPage.stories.tsx index 90577f8aa0d5e..73b4d5ea241f8 100644 --- a/site/src/pages/HealthPage/WebsocketPage.stories.tsx +++ b/site/src/pages/HealthPage/WebsocketPage.stories.tsx @@ -1,9 +1,9 @@ -import type { StoryObj } from "@storybook/react"; +import { MockHealth } from "testHelpers/entities"; +import type { StoryObj } from "@storybook/react-vite"; import { HEALTH_QUERY_KEY } from "api/queries/debug"; import type { HealthcheckReport } from "api/typesGenerated"; -import { MockHealth } from "testHelpers/entities"; -import WebsocketPage from "./WebsocketPage"; import { generateMeta } from "./storybook"; +import WebsocketPage from "./WebsocketPage"; const meta = { title: "pages/Health/Websocket", diff --git a/site/src/pages/HealthPage/WebsocketPage.tsx b/site/src/pages/HealthPage/WebsocketPage.tsx index fed223163e8e1..f7406c8806f00 100644 --- a/site/src/pages/HealthPage/WebsocketPage.tsx +++ b/site/src/pages/HealthPage/WebsocketPage.tsx @@ -4,7 +4,7 @@ import type { HealthcheckReport } from "api/typesGenerated"; import { Alert } from "components/Alert/Alert"; import { CodeIcon } from "lucide-react"; import { Helmet } from "react-helmet-async"; -import { useOutletContext } from "react-router-dom"; +import { useOutletContext } from "react-router"; import { MONOSPACE_FONT_FAMILY } from "theme/constants"; import { pageTitle } from "utils/page"; import { diff --git a/site/src/pages/HealthPage/WorkspaceProxyPage.stories.tsx b/site/src/pages/HealthPage/WorkspaceProxyPage.stories.tsx index b2eaad45a28a8..7de80154fa7aa 100644 --- a/site/src/pages/HealthPage/WorkspaceProxyPage.stories.tsx +++ b/site/src/pages/HealthPage/WorkspaceProxyPage.stories.tsx @@ -1,9 +1,9 @@ -import type { StoryObj } from "@storybook/react"; +import { MockHealth } from "testHelpers/entities"; +import type { StoryObj } from "@storybook/react-vite"; import { HEALTH_QUERY_KEY } from "api/queries/debug"; import type { HealthcheckReport } from "api/typesGenerated"; -import { MockHealth } from "testHelpers/entities"; -import WorkspaceProxyPage from "./WorkspaceProxyPage"; import { generateMeta } from "./storybook"; +import WorkspaceProxyPage from "./WorkspaceProxyPage"; const meta = { title: "pages/Health/WorkspaceProxy", diff --git a/site/src/pages/HealthPage/WorkspaceProxyPage.tsx b/site/src/pages/HealthPage/WorkspaceProxyPage.tsx index f37b2721eb4b1..25188463d0126 100644 --- a/site/src/pages/HealthPage/WorkspaceProxyPage.tsx +++ b/site/src/pages/HealthPage/WorkspaceProxyPage.tsx @@ -6,7 +6,7 @@ import { Alert } from "components/Alert/Alert"; import { HashIcon } from "lucide-react"; import type { FC } from "react"; import { Helmet } from "react-helmet-async"; -import { useOutletContext } from "react-router-dom"; +import { useOutletContext } from "react-router"; import { createDayString } from "utils/createDayString"; import { pageTitle } from "utils/page"; import { diff --git a/site/src/pages/HealthPage/storybook.tsx b/site/src/pages/HealthPage/storybook.tsx index 9e8b84f1c53f9..aa327297e12de 100644 --- a/site/src/pages/HealthPage/storybook.tsx +++ b/site/src/pages/HealthPage/storybook.tsx @@ -1,10 +1,3 @@ -import type { Meta } from "@storybook/react"; -import { HEALTH_QUERY_KEY, HEALTH_QUERY_SETTINGS_KEY } from "api/queries/debug"; -import { - type RouteDefinition, - reactRouterOutlet, - reactRouterParameters, -} from "storybook-addon-remix-react-router"; import { chromatic } from "testHelpers/chromatic"; import { MockAppearanceConfig, @@ -15,6 +8,13 @@ import { MockHealthSettings, } from "testHelpers/entities"; import { withDashboardProvider } from "testHelpers/storybook"; +import type { Meta } from "@storybook/react-vite"; +import { HEALTH_QUERY_KEY, HEALTH_QUERY_SETTINGS_KEY } from "api/queries/debug"; +import { + type RouteDefinition, + reactRouterOutlet, + reactRouterParameters, +} from "storybook-addon-remix-react-router"; import { HealthLayout } from "./HealthLayout"; type MetaOptions = { diff --git a/site/src/pages/IconsPage/IconsPage.stories.tsx b/site/src/pages/IconsPage/IconsPage.stories.tsx index c0f824bd0c8e6..7fdb66d4a252b 100644 --- a/site/src/pages/IconsPage/IconsPage.stories.tsx +++ b/site/src/pages/IconsPage/IconsPage.stories.tsx @@ -1,5 +1,5 @@ -import type { Meta, StoryObj } from "@storybook/react"; import { chromatic } from "testHelpers/chromatic"; +import type { Meta, StoryObj } from "@storybook/react-vite"; import IconsPage from "./IconsPage"; const meta: Meta = { diff --git a/site/src/pages/LoginOAuthDevicePage/LoginOAuthDevicePage.tsx b/site/src/pages/LoginOAuthDevicePage/LoginOAuthDevicePage.tsx index 908e21461c5b0..06177cfb84ccc 100644 --- a/site/src/pages/LoginOAuthDevicePage/LoginOAuthDevicePage.tsx +++ b/site/src/pages/LoginOAuthDevicePage/LoginOAuthDevicePage.tsx @@ -10,10 +10,10 @@ import { } from "components/GitDeviceAuth/GitDeviceAuth"; import { SignInLayout } from "components/SignInLayout/SignInLayout"; import { Welcome } from "components/Welcome/Welcome"; -import { useEffect, useMemo } from "react"; import type { FC } from "react"; +import { useEffect, useMemo } from "react"; import { useQuery } from "react-query"; -import { useSearchParams } from "react-router-dom"; +import { useSearchParams } from "react-router"; import LoginOAuthDevicePageView from "./LoginOAuthDevicePageView"; // The page is hardcoded to only use GitHub, @@ -31,6 +31,10 @@ const LoginOAuthDevicePage: FC = () => { ); } + return ; +}; + +const LoginOauthDevicePageWithState: FC<{ state: string }> = ({ state }) => { const externalAuthDeviceQuery = useQuery({ ...getGitHubDevice(), refetchOnMount: false, diff --git a/site/src/pages/LoginPage/LoginPage.test.tsx b/site/src/pages/LoginPage/LoginPage.test.tsx index 30847cd2e79cc..f43578aecf5ca 100644 --- a/site/src/pages/LoginPage/LoginPage.test.tsx +++ b/site/src/pages/LoginPage/LoginPage.test.tsx @@ -1,13 +1,13 @@ -import { fireEvent, screen } from "@testing-library/react"; -import userEvent from "@testing-library/user-event"; -import { http, HttpResponse } from "msw"; -import { createMemoryRouter } from "react-router-dom"; import { render, renderWithRouter, waitForLoaderToBeRemoved, } from "testHelpers/renderHelpers"; import { server } from "testHelpers/server"; +import { fireEvent, screen } from "@testing-library/react"; +import userEvent from "@testing-library/user-event"; +import { HttpResponse, http } from "msw"; +import { createMemoryRouter } from "react-router"; import { Language } from "./Language"; import LoginPage from "./LoginPage"; diff --git a/site/src/pages/LoginPage/LoginPage.tsx b/site/src/pages/LoginPage/LoginPage.tsx index 85f3d24d47fbb..e476c3579f116 100644 --- a/site/src/pages/LoginPage/LoginPage.tsx +++ b/site/src/pages/LoginPage/LoginPage.tsx @@ -5,7 +5,7 @@ import { useEmbeddedMetadata } from "hooks/useEmbeddedMetadata"; import { type FC, useEffect } from "react"; import { Helmet } from "react-helmet-async"; import { useQuery } from "react-query"; -import { Navigate, useLocation, useNavigate } from "react-router-dom"; +import { Navigate, useLocation, useNavigate } from "react-router"; import { getApplicationName } from "utils/appearance"; import { retrieveRedirect } from "utils/redirect"; import { sendDeploymentEvent } from "utils/telemetry"; diff --git a/site/src/pages/LoginPage/LoginPageView.stories.tsx b/site/src/pages/LoginPage/LoginPageView.stories.tsx index c82392511394f..f4cb1eb0b070e 100644 --- a/site/src/pages/LoginPage/LoginPageView.stories.tsx +++ b/site/src/pages/LoginPage/LoginPageView.stories.tsx @@ -1,4 +1,3 @@ -import type { Meta, StoryObj } from "@storybook/react"; import { MockAuthMethodsAll, MockAuthMethodsExternal, @@ -7,6 +6,7 @@ import { MockBuildInfo, mockApiError, } from "testHelpers/entities"; +import type { Meta, StoryObj } from "@storybook/react-vite"; import { LoginPageView } from "./LoginPageView"; const meta: Meta = { diff --git a/site/src/pages/LoginPage/LoginPageView.tsx b/site/src/pages/LoginPage/LoginPageView.tsx index 1ef0cdf8f7d73..c6714218ab0c8 100644 --- a/site/src/pages/LoginPage/LoginPageView.tsx +++ b/site/src/pages/LoginPage/LoginPageView.tsx @@ -4,7 +4,7 @@ import { Button } from "components/Button/Button"; import { CustomLogo } from "components/CustomLogo/CustomLogo"; import { Loader } from "components/Loader/Loader"; import { type FC, useState } from "react"; -import { useLocation } from "react-router-dom"; +import { useLocation } from "react-router"; import { SignInForm } from "./SignInForm"; import { TermsOfServiceLink } from "./TermsOfServiceLink"; diff --git a/site/src/pages/LoginPage/PasswordSignInForm.tsx b/site/src/pages/LoginPage/PasswordSignInForm.tsx index 34c753e67bb18..4ba897464d31c 100644 --- a/site/src/pages/LoginPage/PasswordSignInForm.tsx +++ b/site/src/pages/LoginPage/PasswordSignInForm.tsx @@ -5,7 +5,7 @@ import { Spinner } from "components/Spinner/Spinner"; import { Stack } from "components/Stack/Stack"; import { useFormik } from "formik"; import type { FC } from "react"; -import { Link as RouterLink } from "react-router-dom"; +import { Link as RouterLink } from "react-router"; import { getFormHelpers, onChangeTrimmed } from "utils/formUtils"; import * as Yup from "yup"; import { Language } from "./Language"; diff --git a/site/src/pages/LoginPage/SignInForm.stories.tsx b/site/src/pages/LoginPage/SignInForm.stories.tsx index 125e912e08e70..f839af4e2a094 100644 --- a/site/src/pages/LoginPage/SignInForm.stories.tsx +++ b/site/src/pages/LoginPage/SignInForm.stories.tsx @@ -1,5 +1,5 @@ -import type { Meta, StoryObj } from "@storybook/react"; import { mockApiError } from "testHelpers/entities"; +import type { Meta, StoryObj } from "@storybook/react-vite"; import { SignInForm } from "./SignInForm"; const meta: Meta = { diff --git a/site/src/pages/OrganizationSettingsPage/CreateOrganizationPage.tsx b/site/src/pages/OrganizationSettingsPage/CreateOrganizationPage.tsx index eeb958b040dca..bd84205befe2b 100644 --- a/site/src/pages/OrganizationSettingsPage/CreateOrganizationPage.tsx +++ b/site/src/pages/OrganizationSettingsPage/CreateOrganizationPage.tsx @@ -5,7 +5,7 @@ import { useFeatureVisibility } from "modules/dashboard/useFeatureVisibility"; import { RequirePermission } from "modules/permissions/RequirePermission"; import type { FC } from "react"; import { useMutation, useQueryClient } from "react-query"; -import { useNavigate } from "react-router-dom"; +import { useNavigate } from "react-router"; import { CreateOrganizationPageView } from "./CreateOrganizationPageView"; const CreateOrganizationPage: FC = () => { diff --git a/site/src/pages/OrganizationSettingsPage/CreateOrganizationPageView.stories.tsx b/site/src/pages/OrganizationSettingsPage/CreateOrganizationPageView.stories.tsx index 491fea3a14239..a9657cd93de8d 100644 --- a/site/src/pages/OrganizationSettingsPage/CreateOrganizationPageView.stories.tsx +++ b/site/src/pages/OrganizationSettingsPage/CreateOrganizationPageView.stories.tsx @@ -1,5 +1,5 @@ -import type { Meta, StoryObj } from "@storybook/react"; import { mockApiError } from "testHelpers/entities"; +import type { Meta, StoryObj } from "@storybook/react-vite"; import { CreateOrganizationPageView } from "./CreateOrganizationPageView"; const meta: Meta = { diff --git a/site/src/pages/OrganizationSettingsPage/CreateOrganizationPageView.tsx b/site/src/pages/OrganizationSettingsPage/CreateOrganizationPageView.tsx index cdb70c3158c06..2b1902646fb34 100644 --- a/site/src/pages/OrganizationSettingsPage/CreateOrganizationPageView.tsx +++ b/site/src/pages/OrganizationSettingsPage/CreateOrganizationPageView.tsx @@ -5,20 +5,19 @@ import { ErrorAlert } from "components/Alert/ErrorAlert"; import { Badges, PremiumBadge } from "components/Badges/Badges"; import { Button } from "components/Button/Button"; import { ChooseOne, Cond } from "components/Conditionals/ChooseOne"; -import { IconField } from "components/IconField/IconField"; -import { Paywall } from "components/Paywall/Paywall"; -import { PopoverPaywall } from "components/Paywall/PopoverPaywall"; -import { Spinner } from "components/Spinner/Spinner"; import { Popover, PopoverContent, PopoverTrigger, } from "components/deprecated/Popover/Popover"; +import { IconField } from "components/IconField/IconField"; +import { Paywall } from "components/Paywall/Paywall"; +import { PopoverPaywall } from "components/Paywall/PopoverPaywall"; +import { Spinner } from "components/Spinner/Spinner"; import { useFormik } from "formik"; import { ArrowLeft } from "lucide-react"; import type { FC } from "react"; -import { useNavigate } from "react-router-dom"; -import { Link } from "react-router-dom"; +import { Link, useNavigate } from "react-router"; import { docs } from "utils/docs"; import { displayNameValidator, diff --git a/site/src/pages/OrganizationSettingsPage/CustomRolesPage/CreateEditRolePage.tsx b/site/src/pages/OrganizationSettingsPage/CustomRolesPage/CreateEditRolePage.tsx index 16878929e5190..89cac66f7c851 100644 --- a/site/src/pages/OrganizationSettingsPage/CustomRolesPage/CreateEditRolePage.tsx +++ b/site/src/pages/OrganizationSettingsPage/CustomRolesPage/CreateEditRolePage.tsx @@ -13,7 +13,7 @@ import { RequirePermission } from "modules/permissions/RequirePermission"; import type { FC } from "react"; import { Helmet } from "react-helmet-async"; import { useMutation, useQuery, useQueryClient } from "react-query"; -import { useNavigate, useParams } from "react-router-dom"; +import { useNavigate, useParams } from "react-router"; import { pageTitle } from "utils/page"; import CreateEditRolePageView from "./CreateEditRolePageView"; diff --git a/site/src/pages/OrganizationSettingsPage/CustomRolesPage/CreateEditRolePageView.stories.tsx b/site/src/pages/OrganizationSettingsPage/CustomRolesPage/CreateEditRolePageView.stories.tsx index 94111752422a2..01d9150a6a276 100644 --- a/site/src/pages/OrganizationSettingsPage/CustomRolesPage/CreateEditRolePageView.stories.tsx +++ b/site/src/pages/OrganizationSettingsPage/CustomRolesPage/CreateEditRolePageView.stories.tsx @@ -1,11 +1,11 @@ -import type { Meta, StoryObj } from "@storybook/react"; -import { expect, userEvent, within } from "@storybook/test"; import { + assignableRole, MockRole2WithOrgPermissions, MockRoleWithOrgPermissions, - assignableRole, mockApiError, } from "testHelpers/entities"; +import type { Meta, StoryObj } from "@storybook/react-vite"; +import { expect, userEvent, within } from "storybook/test"; import CreateEditRolePageView from "./CreateEditRolePageView"; const meta: Meta = { diff --git a/site/src/pages/OrganizationSettingsPage/CustomRolesPage/CreateEditRolePageView.tsx b/site/src/pages/OrganizationSettingsPage/CustomRolesPage/CreateEditRolePageView.tsx index 294f5f28d92a6..93ebabdae4ccb 100644 --- a/site/src/pages/OrganizationSettingsPage/CustomRolesPage/CreateEditRolePageView.tsx +++ b/site/src/pages/OrganizationSettingsPage/CustomRolesPage/CreateEditRolePageView.tsx @@ -32,7 +32,7 @@ import { Stack } from "components/Stack/Stack"; import { useFormik } from "formik"; import { EyeIcon, EyeOffIcon } from "lucide-react"; import { type ChangeEvent, type FC, useState } from "react"; -import { useNavigate } from "react-router-dom"; +import { useNavigate } from "react-router"; import { getFormHelpers, nameValidator } from "utils/formUtils"; import * as Yup from "yup"; diff --git a/site/src/pages/OrganizationSettingsPage/CustomRolesPage/CustomRolesPage.tsx b/site/src/pages/OrganizationSettingsPage/CustomRolesPage/CustomRolesPage.tsx index ccdc5103c6977..ff197cc52aad6 100644 --- a/site/src/pages/OrganizationSettingsPage/CustomRolesPage/CustomRolesPage.tsx +++ b/site/src/pages/OrganizationSettingsPage/CustomRolesPage/CustomRolesPage.tsx @@ -16,7 +16,7 @@ import { RequirePermission } from "modules/permissions/RequirePermission"; import { type FC, useEffect, useState } from "react"; import { Helmet } from "react-helmet-async"; import { useMutation, useQuery, useQueryClient } from "react-query"; -import { useParams } from "react-router-dom"; +import { useParams } from "react-router"; import { pageTitle } from "utils/page"; import { CustomRolesPageView } from "./CustomRolesPageView"; diff --git a/site/src/pages/OrganizationSettingsPage/CustomRolesPage/CustomRolesPageView.stories.tsx b/site/src/pages/OrganizationSettingsPage/CustomRolesPage/CustomRolesPageView.stories.tsx index 14ffbfa85bc90..7f02cb4f48fc1 100644 --- a/site/src/pages/OrganizationSettingsPage/CustomRolesPage/CustomRolesPageView.stories.tsx +++ b/site/src/pages/OrganizationSettingsPage/CustomRolesPage/CustomRolesPageView.stories.tsx @@ -1,8 +1,8 @@ -import type { Meta, StoryObj } from "@storybook/react"; import { MockOrganizationAuditorRole, MockRoleWithOrgPermissions, } from "testHelpers/entities"; +import type { Meta, StoryObj } from "@storybook/react-vite"; import { CustomRolesPageView } from "./CustomRolesPageView"; const meta: Meta = { diff --git a/site/src/pages/OrganizationSettingsPage/CustomRolesPage/CustomRolesPageView.tsx b/site/src/pages/OrganizationSettingsPage/CustomRolesPage/CustomRolesPageView.tsx index 91ca7b5fa2732..cd94b6a18e669 100644 --- a/site/src/pages/OrganizationSettingsPage/CustomRolesPage/CustomRolesPageView.tsx +++ b/site/src/pages/OrganizationSettingsPage/CustomRolesPage/CustomRolesPageView.tsx @@ -27,7 +27,7 @@ import { } from "components/TableLoader/TableLoader"; import { EllipsisVertical, PlusIcon } from "lucide-react"; import type { FC } from "react"; -import { Link as RouterLink, useNavigate } from "react-router-dom"; +import { Link as RouterLink, useNavigate } from "react-router"; import { docs } from "utils/docs"; import { PermissionPillsList } from "./PermissionPillsList"; diff --git a/site/src/pages/OrganizationSettingsPage/CustomRolesPage/PermissionPillsList.stories.tsx b/site/src/pages/OrganizationSettingsPage/CustomRolesPage/PermissionPillsList.stories.tsx index 7a62a8f955747..57a4aab1fc0e2 100644 --- a/site/src/pages/OrganizationSettingsPage/CustomRolesPage/PermissionPillsList.stories.tsx +++ b/site/src/pages/OrganizationSettingsPage/CustomRolesPage/PermissionPillsList.stories.tsx @@ -1,6 +1,6 @@ -import type { Meta, StoryObj } from "@storybook/react"; -import { userEvent, within } from "@storybook/test"; import { MockRoleWithOrgPermissions } from "testHelpers/entities"; +import type { Meta, StoryObj } from "@storybook/react-vite"; +import { userEvent, within } from "storybook/test"; import { PermissionPillsList } from "./PermissionPillsList"; const meta: Meta = { diff --git a/site/src/pages/OrganizationSettingsPage/CustomRolesPage/PermissionPillsList.tsx b/site/src/pages/OrganizationSettingsPage/CustomRolesPage/PermissionPillsList.tsx index 8a456460481ba..11071e0dab164 100644 --- a/site/src/pages/OrganizationSettingsPage/CustomRolesPage/PermissionPillsList.tsx +++ b/site/src/pages/OrganizationSettingsPage/CustomRolesPage/PermissionPillsList.tsx @@ -1,12 +1,12 @@ import { type Interpolation, type Theme, useTheme } from "@emotion/react"; import Stack from "@mui/material/Stack"; import type { Permission } from "api/typesGenerated"; -import { Pill } from "components/Pill/Pill"; import { Popover, PopoverContent, PopoverTrigger, } from "components/deprecated/Popover/Popover"; +import { Pill } from "components/Pill/Pill"; import type { FC } from "react"; function getUniqueResourceTypes(jsonObject: readonly Permission[]) { diff --git a/site/src/pages/OrganizationSettingsPage/IdpSyncPage/ExportPolicyButton.stories.tsx b/site/src/pages/OrganizationSettingsPage/IdpSyncPage/ExportPolicyButton.stories.tsx index 6c25f170d629e..a55588afc096f 100644 --- a/site/src/pages/OrganizationSettingsPage/IdpSyncPage/ExportPolicyButton.stories.tsx +++ b/site/src/pages/OrganizationSettingsPage/IdpSyncPage/ExportPolicyButton.stories.tsx @@ -1,10 +1,10 @@ -import type { Meta, StoryObj } from "@storybook/react"; -import { expect, fn, userEvent, waitFor, within } from "@storybook/test"; import { MockGroupSyncSettings, MockOrganization, MockRoleSyncSettings, } from "testHelpers/entities"; +import type { Meta, StoryObj } from "@storybook/react-vite"; +import { expect, fn, userEvent, waitFor, within } from "storybook/test"; import { ExportPolicyButton } from "./ExportPolicyButton"; const meta: Meta = { diff --git a/site/src/pages/OrganizationSettingsPage/IdpSyncPage/IdpGroupSyncForm.tsx b/site/src/pages/OrganizationSettingsPage/IdpSyncPage/IdpGroupSyncForm.tsx index 9282bd6bfd2b1..1be01567f6bb3 100644 --- a/site/src/pages/OrganizationSettingsPage/IdpSyncPage/IdpGroupSyncForm.tsx +++ b/site/src/pages/OrganizationSettingsPage/IdpSyncPage/IdpGroupSyncForm.tsx @@ -219,7 +219,7 @@ export const IdpGroupSyncForm: FC = ({
      -
      +
      diff --git a/site/src/pages/OrganizationSettingsPage/IdpSyncPage/IdpPillList.tsx b/site/src/pages/OrganizationSettingsPage/IdpSyncPage/IdpPillList.tsx index 3a5c603fa3e64..877ba6c9a205a 100644 --- a/site/src/pages/OrganizationSettingsPage/IdpSyncPage/IdpPillList.tsx +++ b/site/src/pages/OrganizationSettingsPage/IdpSyncPage/IdpPillList.tsx @@ -1,11 +1,11 @@ import { type Interpolation, type Theme, useTheme } from "@emotion/react"; import Stack from "@mui/material/Stack"; -import { Pill } from "components/Pill/Pill"; import { Popover, PopoverContent, PopoverTrigger, } from "components/deprecated/Popover/Popover"; +import { Pill } from "components/Pill/Pill"; import type { FC } from "react"; import { isUUID } from "utils/uuid"; diff --git a/site/src/pages/OrganizationSettingsPage/IdpSyncPage/IdpRoleSyncForm.tsx b/site/src/pages/OrganizationSettingsPage/IdpSyncPage/IdpRoleSyncForm.tsx index 0825ab4217395..2efbf6f758393 100644 --- a/site/src/pages/OrganizationSettingsPage/IdpSyncPage/IdpRoleSyncForm.tsx +++ b/site/src/pages/OrganizationSettingsPage/IdpSyncPage/IdpRoleSyncForm.tsx @@ -159,7 +159,7 @@ export const IdpRoleSyncForm: FC = ({

      {form.errors.field}

      )}
      -
      +
      diff --git a/site/src/pages/OrganizationSettingsPage/IdpSyncPage/IdpSyncPage.tsx b/site/src/pages/OrganizationSettingsPage/IdpSyncPage/IdpSyncPage.tsx index 8132318fb96c7..59a086a024b9a 100644 --- a/site/src/pages/OrganizationSettingsPage/IdpSyncPage/IdpSyncPage.tsx +++ b/site/src/pages/OrganizationSettingsPage/IdpSyncPage/IdpSyncPage.tsx @@ -10,8 +10,7 @@ import { import { organizationRoles } from "api/queries/roles"; import { ChooseOne, Cond } from "components/Conditionals/ChooseOne"; import { EmptyState } from "components/EmptyState/EmptyState"; -import { displayError } from "components/GlobalSnackbar/utils"; -import { displaySuccess } from "components/GlobalSnackbar/utils"; +import { displayError, displaySuccess } from "components/GlobalSnackbar/utils"; import { Link } from "components/Link/Link"; import { Paywall } from "components/Paywall/Paywall"; import { useFeatureVisibility } from "modules/dashboard/useFeatureVisibility"; @@ -20,7 +19,7 @@ import { RequirePermission } from "modules/permissions/RequirePermission"; import { type FC, useEffect, useState } from "react"; import { Helmet } from "react-helmet-async"; import { useMutation, useQueries, useQuery, useQueryClient } from "react-query"; -import { useParams, useSearchParams } from "react-router-dom"; +import { useParams, useSearchParams } from "react-router"; import { docs } from "utils/docs"; import { pageTitle } from "utils/page"; import IdpSyncPageView from "./IdpSyncPageView"; @@ -75,6 +74,13 @@ const IdpSyncPage: FC = () => { enabled: !!field, }); + const patchGroupSyncSettingsMutation = useMutation( + patchGroupSyncSettings(organizationName, queryClient), + ); + const patchRoleSyncSettingsMutation = useMutation( + patchRoleSyncSettings(organizationName, queryClient), + ); + if (!organization) { return ; } @@ -96,13 +102,6 @@ const IdpSyncPage: FC = () => { ); } - const patchGroupSyncSettingsMutation = useMutation( - patchGroupSyncSettings(organizationName, queryClient), - ); - const patchRoleSyncSettingsMutation = useMutation( - patchRoleSyncSettings(organizationName, queryClient), - ); - const error = patchGroupSyncSettingsMutation.error || patchRoleSyncSettingsMutation.error || diff --git a/site/src/pages/OrganizationSettingsPage/IdpSyncPage/IdpSyncPageView.stories.tsx b/site/src/pages/OrganizationSettingsPage/IdpSyncPage/IdpSyncPageView.stories.tsx index e5a77d1c7f779..b2eb64ab4eec5 100644 --- a/site/src/pages/OrganizationSettingsPage/IdpSyncPage/IdpSyncPageView.stories.tsx +++ b/site/src/pages/OrganizationSettingsPage/IdpSyncPage/IdpSyncPageView.stories.tsx @@ -1,5 +1,3 @@ -import type { Meta, StoryObj } from "@storybook/react"; -import { expect, userEvent } from "@storybook/test"; import { MockGroup, MockGroup2, @@ -9,6 +7,8 @@ import { MockOrganization, MockRoleSyncSettings, } from "testHelpers/entities"; +import type { Meta, StoryObj } from "@storybook/react-vite"; +import { expect, userEvent } from "storybook/test"; import IdpSyncPageView from "./IdpSyncPageView"; const groupsMap = new Map(); diff --git a/site/src/pages/OrganizationSettingsPage/OrganizationMembersPage.test.tsx b/site/src/pages/OrganizationSettingsPage/OrganizationMembersPage.test.tsx index 4c90a21659ee2..713bc7e98d9d7 100644 --- a/site/src/pages/OrganizationSettingsPage/OrganizationMembersPage.test.tsx +++ b/site/src/pages/OrganizationSettingsPage/OrganizationMembersPage.test.tsx @@ -1,7 +1,3 @@ -import { fireEvent, screen, within } from "@testing-library/react"; -import userEvent from "@testing-library/user-event"; -import type { SlimRole } from "api/typesGenerated"; -import { http, HttpResponse } from "msw"; import { MockEntitlementsWithMultiOrg, MockOrganization, @@ -14,6 +10,10 @@ import { waitForLoaderToBeRemoved, } from "testHelpers/renderHelpers"; import { server } from "testHelpers/server"; +import { fireEvent, screen, within } from "@testing-library/react"; +import userEvent from "@testing-library/user-event"; +import type { SlimRole } from "api/typesGenerated"; +import { HttpResponse, http } from "msw"; import OrganizationMembersPage from "./OrganizationMembersPage"; jest.spyOn(console, "error").mockImplementation(() => {}); diff --git a/site/src/pages/OrganizationSettingsPage/OrganizationMembersPage.tsx b/site/src/pages/OrganizationSettingsPage/OrganizationMembersPage.tsx index 3c24404f24205..f2c270cd929af 100644 --- a/site/src/pages/OrganizationSettingsPage/OrganizationMembersPage.tsx +++ b/site/src/pages/OrganizationSettingsPage/OrganizationMembersPage.tsx @@ -20,7 +20,7 @@ import { RequirePermission } from "modules/permissions/RequirePermission"; import { type FC, useState } from "react"; import { Helmet } from "react-helmet-async"; import { useMutation, useQuery, useQueryClient } from "react-query"; -import { useParams, useSearchParams } from "react-router-dom"; +import { useParams, useSearchParams } from "react-router"; import { pageTitle } from "utils/page"; import { OrganizationMembersPageView } from "./OrganizationMembersPageView"; diff --git a/site/src/pages/OrganizationSettingsPage/OrganizationMembersPageView.stories.tsx b/site/src/pages/OrganizationSettingsPage/OrganizationMembersPageView.stories.tsx index 566bebfe7f3af..9cf02a22f1b9e 100644 --- a/site/src/pages/OrganizationSettingsPage/OrganizationMembersPageView.stories.tsx +++ b/site/src/pages/OrganizationSettingsPage/OrganizationMembersPageView.stories.tsx @@ -1,11 +1,11 @@ -import type { Meta, StoryObj } from "@storybook/react"; -import { mockSuccessResult } from "components/PaginationWidget/PaginationContainer.mocks"; -import type { UsePaginatedQueryResult } from "hooks/usePaginatedQuery"; import { MockOrganizationMember, MockOrganizationMember2, MockUserOwner, } from "testHelpers/entities"; +import type { Meta, StoryObj } from "@storybook/react-vite"; +import { mockSuccessResult } from "components/PaginationWidget/PaginationContainer.mocks"; +import type { UsePaginatedQueryResult } from "hooks/usePaginatedQuery"; import { OrganizationMembersPageView } from "./OrganizationMembersPageView"; const meta: Meta = { diff --git a/site/src/pages/OrganizationSettingsPage/OrganizationMembersPageView.tsx b/site/src/pages/OrganizationSettingsPage/OrganizationMembersPageView.tsx index 9270e27e3d9c6..7f8ed8e92ea17 100644 --- a/site/src/pages/OrganizationSettingsPage/OrganizationMembersPageView.tsx +++ b/site/src/pages/OrganizationSettingsPage/OrganizationMembersPageView.tsx @@ -34,8 +34,7 @@ import { } from "components/Table/Table"; import { UserAutocomplete } from "components/UserAutocomplete/UserAutocomplete"; import type { PaginationResultInfo } from "hooks/usePaginatedQuery"; -import { UserPlusIcon } from "lucide-react"; -import { EllipsisVertical, TriangleAlert } from "lucide-react"; +import { EllipsisVertical, TriangleAlert, UserPlusIcon } from "lucide-react"; import { UserGroupsCell } from "pages/UsersPage/UsersTable/UserGroupsCell"; import { type FC, useState } from "react"; import { TableColumnHelpTooltip } from "./UserTable/TableColumnHelpTooltip"; diff --git a/site/src/pages/OrganizationSettingsPage/OrganizationProvisionerJobsPage/CancelJobButton.stories.tsx b/site/src/pages/OrganizationSettingsPage/OrganizationProvisionerJobsPage/CancelJobButton.stories.tsx index 713a7fdc299c1..e42d653e1eaee 100644 --- a/site/src/pages/OrganizationSettingsPage/OrganizationProvisionerJobsPage/CancelJobButton.stories.tsx +++ b/site/src/pages/OrganizationSettingsPage/OrganizationProvisionerJobsPage/CancelJobButton.stories.tsx @@ -1,6 +1,6 @@ -import type { Meta, StoryObj } from "@storybook/react"; -import { userEvent, waitFor, within } from "@storybook/test"; import { MockProvisionerJob } from "testHelpers/entities"; +import type { Meta, StoryObj } from "@storybook/react-vite"; +import { userEvent, waitFor, within } from "storybook/test"; import { CancelJobButton } from "./CancelJobButton"; const meta: Meta = { diff --git a/site/src/pages/OrganizationSettingsPage/OrganizationProvisionerJobsPage/CancelJobConfirmationDialog.stories.tsx b/site/src/pages/OrganizationSettingsPage/OrganizationProvisionerJobsPage/CancelJobConfirmationDialog.stories.tsx index f0c117360d53a..82c49511a105d 100644 --- a/site/src/pages/OrganizationSettingsPage/OrganizationProvisionerJobsPage/CancelJobConfirmationDialog.stories.tsx +++ b/site/src/pages/OrganizationSettingsPage/OrganizationProvisionerJobsPage/CancelJobConfirmationDialog.stories.tsx @@ -1,8 +1,8 @@ -import type { Meta, StoryObj } from "@storybook/react"; -import { expect, fn, userEvent, waitFor, within } from "@storybook/test"; -import type { Response } from "api/typesGenerated"; import { MockProvisionerJob } from "testHelpers/entities"; import { withGlobalSnackbar } from "testHelpers/storybook"; +import type { Meta, StoryObj } from "@storybook/react-vite"; +import type { Response } from "api/typesGenerated"; +import { expect, fn, userEvent, waitFor, within } from "storybook/test"; import { CancelJobConfirmationDialog } from "./CancelJobConfirmationDialog"; const meta: Meta = { diff --git a/site/src/pages/OrganizationSettingsPage/OrganizationProvisionerJobsPage/JobRow.stories.tsx b/site/src/pages/OrganizationSettingsPage/OrganizationProvisionerJobsPage/JobRow.stories.tsx index 8fcc52e4957a6..0a611982442b5 100644 --- a/site/src/pages/OrganizationSettingsPage/OrganizationProvisionerJobsPage/JobRow.stories.tsx +++ b/site/src/pages/OrganizationSettingsPage/OrganizationProvisionerJobsPage/JobRow.stories.tsx @@ -1,7 +1,7 @@ -import type { Meta, StoryObj } from "@storybook/react"; -import { expect, userEvent, within } from "@storybook/test"; -import { Table, TableBody } from "components/Table/Table"; import { MockProvisionerJob } from "testHelpers/entities"; +import type { Meta, StoryObj } from "@storybook/react-vite"; +import { Table, TableBody } from "components/Table/Table"; +import { expect, userEvent, within } from "storybook/test"; import { daysAgo } from "utils/time"; import { JobRow } from "./JobRow"; diff --git a/site/src/pages/OrganizationSettingsPage/OrganizationProvisionerJobsPage/JobRow.tsx b/site/src/pages/OrganizationSettingsPage/OrganizationProvisionerJobsPage/JobRow.tsx index 2073f75ca3558..e9f7170693a2c 100644 --- a/site/src/pages/OrganizationSettingsPage/OrganizationProvisionerJobsPage/JobRow.tsx +++ b/site/src/pages/OrganizationSettingsPage/OrganizationProvisionerJobsPage/JobRow.tsx @@ -15,7 +15,7 @@ import { ProvisionerTruncateTags, } from "modules/provisioners/ProvisionerTags"; import { type FC, useState } from "react"; -import { Link as RouterLink } from "react-router-dom"; +import { Link as RouterLink } from "react-router"; import { cn } from "utils/cn"; import { relativeTime } from "utils/time"; import { CancelJobButton } from "./CancelJobButton"; diff --git a/site/src/pages/OrganizationSettingsPage/OrganizationProvisionerJobsPage/OrganizationProvisionerJobsPage.tsx b/site/src/pages/OrganizationSettingsPage/OrganizationProvisionerJobsPage/OrganizationProvisionerJobsPage.tsx index e64feaf2e31c6..cbc36203ca225 100644 --- a/site/src/pages/OrganizationSettingsPage/OrganizationProvisionerJobsPage/OrganizationProvisionerJobsPage.tsx +++ b/site/src/pages/OrganizationSettingsPage/OrganizationProvisionerJobsPage/OrganizationProvisionerJobsPage.tsx @@ -2,7 +2,7 @@ import { provisionerJobs } from "api/queries/organizations"; import { useOrganizationSettings } from "modules/management/OrganizationSettingsLayout"; import type { FC } from "react"; import { useQuery } from "react-query"; -import { useSearchParams } from "react-router-dom"; +import { useSearchParams } from "react-router"; import OrganizationProvisionerJobsPageView from "./OrganizationProvisionerJobsPageView"; const OrganizationProvisionerJobsPage: FC = () => { diff --git a/site/src/pages/OrganizationSettingsPage/OrganizationProvisionerJobsPage/OrganizationProvisionerJobsPageView.stories.tsx b/site/src/pages/OrganizationSettingsPage/OrganizationProvisionerJobsPage/OrganizationProvisionerJobsPageView.stories.tsx index 35a96a1b3bd5f..c47096be87317 100644 --- a/site/src/pages/OrganizationSettingsPage/OrganizationProvisionerJobsPage/OrganizationProvisionerJobsPageView.stories.tsx +++ b/site/src/pages/OrganizationSettingsPage/OrganizationProvisionerJobsPage/OrganizationProvisionerJobsPageView.stories.tsx @@ -1,8 +1,8 @@ -import type { Meta, StoryObj } from "@storybook/react"; -import { expect, fn, userEvent, waitFor, within } from "@storybook/test"; +import { MockOrganization, MockProvisionerJob } from "testHelpers/entities"; +import type { Meta, StoryObj } from "@storybook/react-vite"; import type { ProvisionerJob } from "api/typesGenerated"; import { useState } from "react"; -import { MockOrganization, MockProvisionerJob } from "testHelpers/entities"; +import { expect, fn, userEvent, waitFor, within } from "storybook/test"; import { daysAgo } from "utils/time"; import OrganizationProvisionerJobsPageView from "./OrganizationProvisionerJobsPageView"; diff --git a/site/src/pages/OrganizationSettingsPage/OrganizationProvisionerKeysPage/OrganizationProvisionerKeysPage.tsx b/site/src/pages/OrganizationSettingsPage/OrganizationProvisionerKeysPage/OrganizationProvisionerKeysPage.tsx index 77bcfe10cb229..f757b48830ca8 100644 --- a/site/src/pages/OrganizationSettingsPage/OrganizationProvisionerKeysPage/OrganizationProvisionerKeysPage.tsx +++ b/site/src/pages/OrganizationSettingsPage/OrganizationProvisionerKeysPage/OrganizationProvisionerKeysPage.tsx @@ -6,7 +6,7 @@ import { RequirePermission } from "modules/permissions/RequirePermission"; import type { FC } from "react"; import { Helmet } from "react-helmet-async"; import { useQuery } from "react-query"; -import { useParams } from "react-router-dom"; +import { useParams } from "react-router"; import { pageTitle } from "utils/page"; import { OrganizationProvisionerKeysPageView } from "./OrganizationProvisionerKeysPageView"; diff --git a/site/src/pages/OrganizationSettingsPage/OrganizationProvisionerKeysPage/OrganizationProvisionerKeysPageView.stories.tsx b/site/src/pages/OrganizationSettingsPage/OrganizationProvisionerKeysPage/OrganizationProvisionerKeysPageView.stories.tsx index f30ea66175e07..df5548511ba04 100644 --- a/site/src/pages/OrganizationSettingsPage/OrganizationProvisionerKeysPage/OrganizationProvisionerKeysPageView.stories.tsx +++ b/site/src/pages/OrganizationSettingsPage/OrganizationProvisionerKeysPage/OrganizationProvisionerKeysPageView.stories.tsx @@ -1,15 +1,15 @@ -import type { Meta, StoryObj } from "@storybook/react"; +import { + MockProvisioner, + MockProvisionerKey, + mockApiError, +} from "testHelpers/entities"; +import type { Meta, StoryObj } from "@storybook/react-vite"; import { type ProvisionerKeyDaemons, ProvisionerKeyIDBuiltIn, ProvisionerKeyIDPSK, ProvisionerKeyIDUserAuth, } from "api/typesGenerated"; -import { - MockProvisioner, - MockProvisionerKey, - mockApiError, -} from "testHelpers/entities"; import { OrganizationProvisionerKeysPageView } from "./OrganizationProvisionerKeysPageView"; const mockProvisionerKeyDaemons: ProvisionerKeyDaemons[] = [ diff --git a/site/src/pages/OrganizationSettingsPage/OrganizationProvisionerKeysPage/ProvisionerKeyRow.tsx b/site/src/pages/OrganizationSettingsPage/OrganizationProvisionerKeysPage/ProvisionerKeyRow.tsx index dd0a2e2aeb954..9402a64acc90d 100644 --- a/site/src/pages/OrganizationSettingsPage/OrganizationProvisionerKeysPage/ProvisionerKeyRow.tsx +++ b/site/src/pages/OrganizationSettingsPage/OrganizationProvisionerKeysPage/ProvisionerKeyRow.tsx @@ -9,7 +9,7 @@ import { ProvisionerTruncateTags, } from "modules/provisioners/ProvisionerTags"; import { type FC, useState } from "react"; -import { Link as RouterLink } from "react-router-dom"; +import { Link as RouterLink } from "react-router"; import { cn } from "utils/cn"; import { relativeTime } from "utils/time"; diff --git a/site/src/pages/OrganizationSettingsPage/OrganizationProvisionersPage/LastConnectionHead.stories.tsx b/site/src/pages/OrganizationSettingsPage/OrganizationProvisionersPage/LastConnectionHead.stories.tsx index 8f67f6f92cff8..43468d59c087e 100644 --- a/site/src/pages/OrganizationSettingsPage/OrganizationProvisionersPage/LastConnectionHead.stories.tsx +++ b/site/src/pages/OrganizationSettingsPage/OrganizationProvisionersPage/LastConnectionHead.stories.tsx @@ -1,5 +1,5 @@ -import type { Meta, StoryObj } from "@storybook/react"; -import { userEvent } from "@storybook/test"; +import type { Meta, StoryObj } from "@storybook/react-vite"; +import { userEvent } from "storybook/test"; import { LastConnectionHead } from "./LastConnectionHead"; const meta: Meta = { diff --git a/site/src/pages/OrganizationSettingsPage/OrganizationProvisionersPage/OrganizationProvisionersPage.tsx b/site/src/pages/OrganizationSettingsPage/OrganizationProvisionersPage/OrganizationProvisionersPage.tsx index 242c0acdf842b..95db66f2c41c4 100644 --- a/site/src/pages/OrganizationSettingsPage/OrganizationProvisionersPage/OrganizationProvisionersPage.tsx +++ b/site/src/pages/OrganizationSettingsPage/OrganizationProvisionersPage/OrganizationProvisionersPage.tsx @@ -8,7 +8,7 @@ import { RequirePermission } from "modules/permissions/RequirePermission"; import type { FC } from "react"; import { Helmet } from "react-helmet-async"; import { useQuery } from "react-query"; -import { useParams, useSearchParams } from "react-router-dom"; +import { useParams, useSearchParams } from "react-router"; import { pageTitle } from "utils/page"; import { OrganizationProvisionersPageView } from "./OrganizationProvisionersPageView"; @@ -20,6 +20,7 @@ const OrganizationProvisionersPage: FC = () => { const queryParams = { ids: searchParams.get("ids") ?? "", tags: searchParams.get("tags") ?? "", + offline: searchParams.get("offline") === "true", }; const { organization, organizationPermissions } = useOrganizationSettings(); const { entitlements } = useDashboard(); @@ -66,7 +67,12 @@ const OrganizationProvisionersPage: FC = () => { buildVersion={buildInfoQuery.data?.version} onRetry={provisionersQuery.refetch} filter={queryParams} - onFilterChange={setSearchParams} + onFilterChange={({ ids, offline }) => { + setSearchParams({ + ids, + offline: offline.toString(), + }); + }} /> ); diff --git a/site/src/pages/OrganizationSettingsPage/OrganizationProvisionersPage/OrganizationProvisionersPageView.stories.tsx b/site/src/pages/OrganizationSettingsPage/OrganizationProvisionersPage/OrganizationProvisionersPageView.stories.tsx index a559af512bbe3..8dba15b4d8856 100644 --- a/site/src/pages/OrganizationSettingsPage/OrganizationProvisionersPage/OrganizationProvisionersPageView.stories.tsx +++ b/site/src/pages/OrganizationSettingsPage/OrganizationProvisionersPage/OrganizationProvisionersPageView.stories.tsx @@ -1,4 +1,3 @@ -import type { Meta, StoryObj } from "@storybook/react"; import { MockBuildInfo, MockProvisioner, @@ -6,6 +5,7 @@ import { MockUserProvisioner, mockApiError, } from "testHelpers/entities"; +import type { Meta, StoryObj } from "@storybook/react-vite"; import { OrganizationProvisionersPageView } from "./OrganizationProvisionersPageView"; const meta: Meta = { @@ -23,9 +23,14 @@ const meta: Meta = { ...MockProvisionerWithTags, version: "0.0.0", }, + { + ...MockUserProvisioner, + status: "offline", + }, ], filter: { ids: "", + offline: true, }, }, }; @@ -69,6 +74,17 @@ export const FilterByID: Story = { provisioners: [MockProvisioner], filter: { ids: MockProvisioner.id, + offline: true, + }, + }, +}; + +export const FilterByOffline: Story = { + args: { + provisioners: [MockProvisioner], + filter: { + ids: "", + offline: false, }, }, }; diff --git a/site/src/pages/OrganizationSettingsPage/OrganizationProvisionersPage/OrganizationProvisionersPageView.tsx b/site/src/pages/OrganizationSettingsPage/OrganizationProvisionersPage/OrganizationProvisionersPageView.tsx index 387baf31519cb..ac6e45aed24cf 100644 --- a/site/src/pages/OrganizationSettingsPage/OrganizationProvisionersPage/OrganizationProvisionersPageView.tsx +++ b/site/src/pages/OrganizationSettingsPage/OrganizationProvisionersPage/OrganizationProvisionersPageView.tsx @@ -1,6 +1,7 @@ import type { ProvisionerDaemon } from "api/typesGenerated"; import { Badge } from "components/Badge/Badge"; import { Button } from "components/Button/Button"; +import { Checkbox } from "components/Checkbox/Checkbox"; import { EmptyState } from "components/EmptyState/EmptyState"; import { Link } from "components/Link/Link"; import { Loader } from "components/Loader/Loader"; @@ -24,7 +25,7 @@ import { TooltipProvider, TooltipTrigger, } from "components/Tooltip/Tooltip"; -import { SquareArrowOutUpRightIcon, XIcon } from "lucide-react"; +import { XIcon } from "lucide-react"; import type { FC } from "react"; import { docs } from "utils/docs"; import { LastConnectionHead } from "./LastConnectionHead"; @@ -32,6 +33,7 @@ import { ProvisionerRow } from "./ProvisionerRow"; type ProvisionersFilter = { ids: string; + offline: boolean; }; interface OrganizationProvisionersPageViewProps { @@ -102,70 +104,89 @@ export const OrganizationProvisionersPageView: FC< documentationLink={docs("/")} /> ) : ( - - - - Name - Key - Version - Status - Tags - - - - - - - {provisioners ? ( - provisioners.length > 0 ? ( - provisioners.map((provisioner) => ( - - )) - ) : ( + <> +
      + { + onFilterChange({ + ...filter, + offline: checked === true, + }); + }} + /> + +
      +
      + + + Name + Key + Version + Status + Tags + + + + + + + {provisioners ? ( + provisioners.length > 0 ? ( + provisioners.map((provisioner) => ( + + )) + ) : ( + + + + + Create a provisioner + + + } + /> + + + ) + ) : error ? ( - - Create a provisioner - - + } /> - ) - ) : error ? ( - - - - Retry - - } - /> - - - ) : ( - - - - - - )} - -
      + ) : ( + + + + + + )} + + + )} ); diff --git a/site/src/pages/OrganizationSettingsPage/OrganizationProvisionersPage/ProvisionerKey.stories.tsx b/site/src/pages/OrganizationSettingsPage/OrganizationProvisionersPage/ProvisionerKey.stories.tsx index 4d75ad83587fb..5555d1048c3b9 100644 --- a/site/src/pages/OrganizationSettingsPage/OrganizationProvisionersPage/ProvisionerKey.stories.tsx +++ b/site/src/pages/OrganizationSettingsPage/OrganizationProvisionersPage/ProvisionerKey.stories.tsx @@ -1,10 +1,10 @@ -import type { Meta, StoryObj } from "@storybook/react"; -import { userEvent } from "@storybook/test"; +import type { Meta, StoryObj } from "@storybook/react-vite"; import { ProvisionerKeyNameBuiltIn, ProvisionerKeyNamePSK, ProvisionerKeyNameUserAuth, } from "api/typesGenerated"; +import { userEvent } from "storybook/test"; import { ProvisionerKey } from "./ProvisionerKey"; const meta: Meta = { diff --git a/site/src/pages/OrganizationSettingsPage/OrganizationProvisionersPage/ProvisionerRow.stories.tsx b/site/src/pages/OrganizationSettingsPage/OrganizationProvisionersPage/ProvisionerRow.stories.tsx index eecba0494eac9..a0c777f4ba606 100644 --- a/site/src/pages/OrganizationSettingsPage/OrganizationProvisionersPage/ProvisionerRow.stories.tsx +++ b/site/src/pages/OrganizationSettingsPage/OrganizationProvisionersPage/ProvisionerRow.stories.tsx @@ -1,7 +1,7 @@ -import type { Meta, StoryObj } from "@storybook/react"; -import { expect, userEvent, within } from "@storybook/test"; -import { Table, TableBody } from "components/Table/Table"; import { MockBuildInfo, MockProvisioner } from "testHelpers/entities"; +import type { Meta, StoryObj } from "@storybook/react-vite"; +import { Table, TableBody } from "components/Table/Table"; +import { expect, userEvent, within } from "storybook/test"; import { ProvisionerRow } from "./ProvisionerRow"; const meta: Meta = { diff --git a/site/src/pages/OrganizationSettingsPage/OrganizationProvisionersPage/ProvisionerRow.tsx b/site/src/pages/OrganizationSettingsPage/OrganizationProvisionersPage/ProvisionerRow.tsx index ca5af240d1b02..9508c1a261b85 100644 --- a/site/src/pages/OrganizationSettingsPage/OrganizationProvisionersPage/ProvisionerRow.tsx +++ b/site/src/pages/OrganizationSettingsPage/OrganizationProvisionersPage/ProvisionerRow.tsx @@ -18,7 +18,7 @@ import { } from "modules/provisioners/ProvisionerTags"; import { ProvisionerKey } from "pages/OrganizationSettingsPage/OrganizationProvisionersPage/ProvisionerKey"; import { type FC, useState } from "react"; -import { Link as RouterLink } from "react-router-dom"; +import { Link as RouterLink } from "react-router"; import { cn } from "utils/cn"; import { relativeTime } from "utils/time"; import { ProvisionerVersion } from "./ProvisionerVersion"; diff --git a/site/src/pages/OrganizationSettingsPage/OrganizationProvisionersPage/ProvisionerVersion.stories.tsx b/site/src/pages/OrganizationSettingsPage/OrganizationProvisionersPage/ProvisionerVersion.stories.tsx index 305fbd441fa7f..43c872aa55e48 100644 --- a/site/src/pages/OrganizationSettingsPage/OrganizationProvisionersPage/ProvisionerVersion.stories.tsx +++ b/site/src/pages/OrganizationSettingsPage/OrganizationProvisionersPage/ProvisionerVersion.stories.tsx @@ -1,6 +1,6 @@ -import type { Meta, StoryObj } from "@storybook/react"; -import { expect, userEvent, within } from "@storybook/test"; import { MockBuildInfo, MockProvisioner } from "testHelpers/entities"; +import type { Meta, StoryObj } from "@storybook/react-vite"; +import { expect, userEvent, within } from "storybook/test"; import { ProvisionerVersion } from "./ProvisionerVersion"; const meta: Meta = { diff --git a/site/src/pages/OrganizationSettingsPage/OrganizationRedirect.test.tsx b/site/src/pages/OrganizationSettingsPage/OrganizationRedirect.test.tsx index 2572ba0076999..18c0eed0c2e0b 100644 --- a/site/src/pages/OrganizationSettingsPage/OrganizationRedirect.test.tsx +++ b/site/src/pages/OrganizationSettingsPage/OrganizationRedirect.test.tsx @@ -1,5 +1,3 @@ -import { screen } from "@testing-library/react"; -import { http, HttpResponse } from "msw"; import { MockDefaultOrganization, MockEntitlementsWithMultiOrg, @@ -10,6 +8,8 @@ import { waitForLoaderToBeRemoved, } from "testHelpers/renderHelpers"; import { server } from "testHelpers/server"; +import { screen } from "@testing-library/react"; +import { HttpResponse, http } from "msw"; import OrganizationRedirect from "./OrganizationRedirect"; jest.spyOn(console, "error").mockImplementation(() => {}); @@ -69,7 +69,7 @@ describe("OrganizationRedirect", () => { }), ); const router = await renderPage(); - const form = screen.getByText("Organization Settings"); + const form = await screen.findByText("Organization Settings"); expect(form).toBeInTheDocument(); expect(router.state.location.pathname).toBe( `/organizations/${MockDefaultOrganization.name}`, @@ -94,7 +94,7 @@ describe("OrganizationRedirect", () => { }), ); const router = await renderPage(); - const form = screen.getByText("Organization Settings"); + const form = await screen.findByText("Organization Settings"); expect(form).toBeInTheDocument(); expect(router.state.location.pathname).toBe( `/organizations/${MockOrganization2.name}`, diff --git a/site/src/pages/OrganizationSettingsPage/OrganizationRedirect.tsx b/site/src/pages/OrganizationSettingsPage/OrganizationRedirect.tsx index d01c9d1cda29f..88634ec672b5c 100644 --- a/site/src/pages/OrganizationSettingsPage/OrganizationRedirect.tsx +++ b/site/src/pages/OrganizationSettingsPage/OrganizationRedirect.tsx @@ -2,7 +2,7 @@ import { EmptyState } from "components/EmptyState/EmptyState"; import { useOrganizationSettings } from "modules/management/OrganizationSettingsLayout"; import { canEditOrganization } from "modules/permissions/organizations"; import type { FC } from "react"; -import { Navigate } from "react-router-dom"; +import { Navigate } from "react-router"; const OrganizationRedirect: FC = () => { const { diff --git a/site/src/pages/OrganizationSettingsPage/OrganizationSettingsPage.tsx b/site/src/pages/OrganizationSettingsPage/OrganizationSettingsPage.tsx index 4a0395d984952..60cf4789d08be 100644 --- a/site/src/pages/OrganizationSettingsPage/OrganizationSettingsPage.tsx +++ b/site/src/pages/OrganizationSettingsPage/OrganizationSettingsPage.tsx @@ -4,14 +4,13 @@ import { updateOrganization, } from "api/queries/organizations"; import { EmptyState } from "components/EmptyState/EmptyState"; -import { displaySuccess } from "components/GlobalSnackbar/utils"; -import { displayError } from "components/GlobalSnackbar/utils"; +import { displayError, displaySuccess } from "components/GlobalSnackbar/utils"; import { useOrganizationSettings } from "modules/management/OrganizationSettingsLayout"; import { RequirePermission } from "modules/permissions/RequirePermission"; import type { FC } from "react"; import { Helmet } from "react-helmet-async"; import { useMutation, useQueryClient } from "react-query"; -import { useNavigate } from "react-router-dom"; +import { useNavigate } from "react-router"; import { pageTitle } from "utils/page"; import { OrganizationSettingsPageView } from "./OrganizationSettingsPageView"; diff --git a/site/src/pages/OrganizationSettingsPage/OrganizationSettingsPageView.stories.tsx b/site/src/pages/OrganizationSettingsPage/OrganizationSettingsPageView.stories.tsx index 3e8b1ad3133b7..fc3cf3767dc2b 100644 --- a/site/src/pages/OrganizationSettingsPage/OrganizationSettingsPageView.stories.tsx +++ b/site/src/pages/OrganizationSettingsPage/OrganizationSettingsPageView.stories.tsx @@ -1,9 +1,9 @@ -import type { Meta, StoryObj } from "@storybook/react"; import { chromatic } from "testHelpers/chromatic"; import { MockDefaultOrganization, MockOrganization, } from "testHelpers/entities"; +import type { Meta, StoryObj } from "@storybook/react-vite"; import { OrganizationSettingsPageView } from "./OrganizationSettingsPageView"; const meta: Meta = { diff --git a/site/src/pages/OrganizationSettingsPage/UserTable/EditRolesButton.stories.tsx b/site/src/pages/OrganizationSettingsPage/UserTable/EditRolesButton.stories.tsx index f3244898483ce..7b6b29c4cca3d 100644 --- a/site/src/pages/OrganizationSettingsPage/UserTable/EditRolesButton.stories.tsx +++ b/site/src/pages/OrganizationSettingsPage/UserTable/EditRolesButton.stories.tsx @@ -1,5 +1,3 @@ -import type { Meta, StoryObj } from "@storybook/react"; -import { userEvent, within } from "@storybook/test"; import { MockOwnerRole, MockSiteRoles, @@ -7,6 +5,8 @@ import { MockWorkspaceCreationBanRole, } from "testHelpers/entities"; import { withDesktopViewport } from "testHelpers/storybook"; +import type { Meta, StoryObj } from "@storybook/react-vite"; +import { userEvent, within } from "storybook/test"; import { EditRolesButton } from "./EditRolesButton"; const meta: Meta = { diff --git a/site/src/pages/OrganizationSettingsPage/UserTable/EditRolesButton.tsx b/site/src/pages/OrganizationSettingsPage/UserTable/EditRolesButton.tsx index f409b09724d86..4983e671aa5a6 100644 --- a/site/src/pages/OrganizationSettingsPage/UserTable/EditRolesButton.tsx +++ b/site/src/pages/OrganizationSettingsPage/UserTable/EditRolesButton.tsx @@ -3,6 +3,11 @@ import Tooltip from "@mui/material/Tooltip"; import type { SlimRole } from "api/typesGenerated"; import { Button } from "components/Button/Button"; import { CollapsibleSummary } from "components/CollapsibleSummary/CollapsibleSummary"; +import { + Popover, + PopoverContent, + PopoverTrigger, +} from "components/deprecated/Popover/Popover"; import { HelpTooltip, HelpTooltipContent, @@ -11,11 +16,6 @@ import { HelpTooltipTrigger, } from "components/HelpTooltip/HelpTooltip"; import { EditSquare } from "components/Icons/EditSquare"; -import { - Popover, - PopoverContent, - PopoverTrigger, -} from "components/deprecated/Popover/Popover"; import { UserIcon } from "lucide-react"; import { type FC, useEffect, useState } from "react"; @@ -75,25 +75,8 @@ interface EditRolesButtonProps { userLoginType?: string; } -export const EditRolesButton: FC = ({ - roles, - selectedRoleNames, - onChange, - isLoading, - userLoginType, - oidcRoleSync, -}) => { - const handleChange = (roleName: string) => { - if (selectedRoleNames.has(roleName)) { - const serialized = [...selectedRoleNames]; - onChange(serialized.filter((role) => role !== roleName)); - return; - } - - onChange([...selectedRoleNames, roleName]); - }; - const [isAdvancedOpen, setIsAdvancedOpen] = useState(false); - +export const EditRolesButton: FC = (props) => { + const { userLoginType, oidcRoleSync } = props; const canSetRoles = userLoginType !== "oidc" || (userLoginType === "oidc" && !oidcRoleSync); @@ -111,6 +94,26 @@ export const EditRolesButton: FC = ({ ); } + return ; +}; + +const EnabledEditRolesButton: FC = ({ + roles, + selectedRoleNames, + onChange, + isLoading, +}) => { + const handleChange = (roleName: string) => { + if (selectedRoleNames.has(roleName)) { + const serialized = [...selectedRoleNames]; + onChange(serialized.filter((role) => role !== roleName)); + return; + } + + onChange([...selectedRoleNames, roleName]); + }; + const [isAdvancedOpen, setIsAdvancedOpen] = useState(false); + const filteredRoles = roles.filter( (role) => role.name !== "organization-workspace-creation-ban", ); diff --git a/site/src/pages/OrganizationSettingsPage/UserTable/UserRoleCell.tsx b/site/src/pages/OrganizationSettingsPage/UserTable/UserRoleCell.tsx index 4c350f6ffb5be..0261d81e3f578 100644 --- a/site/src/pages/OrganizationSettingsPage/UserTable/UserRoleCell.tsx +++ b/site/src/pages/OrganizationSettingsPage/UserTable/UserRoleCell.tsx @@ -16,13 +16,13 @@ import { type Interpolation, type Theme, useTheme } from "@emotion/react"; import Tooltip from "@mui/material/Tooltip"; import type { LoginType, SlimRole } from "api/typesGenerated"; -import { Pill } from "components/Pill/Pill"; -import { TableCell } from "components/Table/Table"; import { Popover, PopoverContent, PopoverTrigger, } from "components/deprecated/Popover/Popover"; +import { Pill } from "components/Pill/Pill"; +import { TableCell } from "components/Table/Table"; import type { FC } from "react"; import { EditRolesButton } from "./EditRolesButton"; diff --git a/site/src/pages/ResetPasswordPage/ChangePasswordPage.stories.tsx b/site/src/pages/ResetPasswordPage/ChangePasswordPage.stories.tsx index ce4644ce2d48e..359f7df66579c 100644 --- a/site/src/pages/ResetPasswordPage/ChangePasswordPage.stories.tsx +++ b/site/src/pages/ResetPasswordPage/ChangePasswordPage.stories.tsx @@ -1,8 +1,8 @@ -import type { Meta, StoryObj } from "@storybook/react"; -import { spyOn, userEvent, within } from "@storybook/test"; -import { API } from "api/api"; import { mockApiError } from "testHelpers/entities"; import { withGlobalSnackbar } from "testHelpers/storybook"; +import type { Meta, StoryObj } from "@storybook/react-vite"; +import { API } from "api/api"; +import { spyOn, userEvent, within } from "storybook/test"; import ChangePasswordPage from "./ChangePasswordPage"; const meta: Meta = { diff --git a/site/src/pages/ResetPasswordPage/ChangePasswordPage.tsx b/site/src/pages/ResetPasswordPage/ChangePasswordPage.tsx index e2a8c8206e713..0b859b8c8e507 100644 --- a/site/src/pages/ResetPasswordPage/ChangePasswordPage.tsx +++ b/site/src/pages/ResetPasswordPage/ChangePasswordPage.tsx @@ -13,11 +13,7 @@ import { useFormik } from "formik"; import type { FC } from "react"; import { Helmet } from "react-helmet-async"; import { useMutation } from "react-query"; -import { - Link as RouterLink, - useNavigate, - useSearchParams, -} from "react-router-dom"; +import { Link as RouterLink, useNavigate, useSearchParams } from "react-router"; import { getApplicationName } from "utils/appearance"; import { getFormHelpers } from "utils/formUtils"; import * as yup from "yup"; diff --git a/site/src/pages/ResetPasswordPage/RequestOTPPage.stories.tsx b/site/src/pages/ResetPasswordPage/RequestOTPPage.stories.tsx index 5f75f607ab9d3..130d6013ceacc 100644 --- a/site/src/pages/ResetPasswordPage/RequestOTPPage.stories.tsx +++ b/site/src/pages/ResetPasswordPage/RequestOTPPage.stories.tsx @@ -1,8 +1,8 @@ -import type { Meta, StoryObj } from "@storybook/react"; -import { spyOn, userEvent, within } from "@storybook/test"; -import { API } from "api/api"; import { mockApiError } from "testHelpers/entities"; import { withGlobalSnackbar } from "testHelpers/storybook"; +import type { Meta, StoryObj } from "@storybook/react-vite"; +import { API } from "api/api"; +import { spyOn, userEvent, within } from "storybook/test"; import RequestOTPPage from "./RequestOTPPage"; const meta: Meta = { diff --git a/site/src/pages/ResetPasswordPage/RequestOTPPage.tsx b/site/src/pages/ResetPasswordPage/RequestOTPPage.tsx index 1ba5017b906aa..66eea517e090e 100644 --- a/site/src/pages/ResetPasswordPage/RequestOTPPage.tsx +++ b/site/src/pages/ResetPasswordPage/RequestOTPPage.tsx @@ -9,7 +9,7 @@ import { Stack } from "components/Stack/Stack"; import type { FC } from "react"; import { Helmet } from "react-helmet-async"; import { useMutation } from "react-query"; -import { Link as RouterLink } from "react-router-dom"; +import { Link as RouterLink } from "react-router"; import { getApplicationName } from "utils/appearance"; const RequestOTPPage: FC = () => { diff --git a/site/src/pages/SetupPage/SetupPage.test.tsx b/site/src/pages/SetupPage/SetupPage.test.tsx index 0ab5d15c6f338..386720ac5f93d 100644 --- a/site/src/pages/SetupPage/SetupPage.test.tsx +++ b/site/src/pages/SetupPage/SetupPage.test.tsx @@ -1,14 +1,14 @@ -import { screen, waitFor } from "@testing-library/react"; -import userEvent from "@testing-library/user-event"; -import type { Response, User } from "api/typesGenerated"; -import { http, HttpResponse } from "msw"; -import { createMemoryRouter } from "react-router-dom"; import { MockBuildInfo, MockUserOwner } from "testHelpers/entities"; import { renderWithRouter, waitForLoaderToBeRemoved, } from "testHelpers/renderHelpers"; import { server } from "testHelpers/server"; +import { screen, waitFor } from "@testing-library/react"; +import userEvent from "@testing-library/user-event"; +import type { Response, User } from "api/typesGenerated"; +import { HttpResponse, http } from "msw"; +import { createMemoryRouter } from "react-router"; import { SetupPage } from "./SetupPage"; import { Language as PageViewLanguage } from "./SetupPageView"; @@ -135,10 +135,6 @@ describe("Setup Page", () => { path: "/setup", element: , }, - { - path: "/templates", - element:

      Templates

      , - }, ], { initialEntries: ["/setup"] }, ), diff --git a/site/src/pages/SetupPage/SetupPage.tsx b/site/src/pages/SetupPage/SetupPage.tsx index 45d0e06eee5cd..ece2a714a7019 100644 --- a/site/src/pages/SetupPage/SetupPage.tsx +++ b/site/src/pages/SetupPage/SetupPage.tsx @@ -3,10 +3,10 @@ import { authMethods, createFirstUser } from "api/queries/users"; import { Loader } from "components/Loader/Loader"; import { useAuthContext } from "contexts/auth/AuthProvider"; import { useEmbeddedMetadata } from "hooks/useEmbeddedMetadata"; -import { type FC, useEffect } from "react"; +import { type FC, useEffect, useRef } from "react"; import { Helmet } from "react-helmet-async"; import { useMutation, useQuery } from "react-query"; -import { Navigate, useNavigate } from "react-router-dom"; +import { Navigate } from "react-router"; import { pageTitle } from "utils/page"; import { sendDeploymentEvent } from "utils/telemetry"; import { SetupPageView } from "./SetupPageView"; @@ -24,7 +24,7 @@ export const SetupPage: FC = () => { const setupIsComplete = !isConfiguringTheFirstUser; const { metadata } = useEmbeddedMetadata(); const buildInfoQuery = useQuery(buildInfo(metadata["build-info"])); - const navigate = useNavigate(); + const setupRequired = useRef(false); useEffect(() => { if (!buildInfoQuery.data) { @@ -41,7 +41,11 @@ export const SetupPage: FC = () => { // If the user is logged in, navigate to the app if (isSignedIn) { - return ; + return setupRequired.current ? ( + + ) : ( + + ); } // If we've already completed setup, navigate to the login page @@ -49,6 +53,8 @@ export const SetupPage: FC = () => { return ; } + setupRequired.current = true; + return ( <> @@ -61,7 +67,6 @@ export const SetupPage: FC = () => { onSubmit={async (firstUser) => { await createFirstUserMutation.mutateAsync(firstUser); await signIn(firstUser.email, firstUser.password); - navigate("/templates"); }} /> diff --git a/site/src/pages/SetupPage/SetupPageView.stories.tsx b/site/src/pages/SetupPage/SetupPageView.stories.tsx index e013757e93330..ce6b9ce8c3394 100644 --- a/site/src/pages/SetupPage/SetupPageView.stories.tsx +++ b/site/src/pages/SetupPage/SetupPageView.stories.tsx @@ -1,6 +1,6 @@ -import type { Meta, StoryObj } from "@storybook/react"; import { chromatic } from "testHelpers/chromatic"; import { mockApiError } from "testHelpers/entities"; +import type { Meta, StoryObj } from "@storybook/react-vite"; import { SetupPageView } from "./SetupPageView"; const meta: Meta = { diff --git a/site/src/pages/StarterTemplatePage/StarterTemplatePage.tsx b/site/src/pages/StarterTemplatePage/StarterTemplatePage.tsx index d7846a6648969..3f719d8e93a22 100644 --- a/site/src/pages/StarterTemplatePage/StarterTemplatePage.tsx +++ b/site/src/pages/StarterTemplatePage/StarterTemplatePage.tsx @@ -2,7 +2,7 @@ import { templateExamples } from "api/queries/templates"; import type { FC } from "react"; import { Helmet } from "react-helmet-async"; import { useQuery } from "react-query"; -import { useParams } from "react-router-dom"; +import { useParams } from "react-router"; import { pageTitle } from "utils/page"; import { StarterTemplatePageView } from "./StarterTemplatePageView"; diff --git a/site/src/pages/StarterTemplatePage/StarterTemplatePageView.stories.tsx b/site/src/pages/StarterTemplatePage/StarterTemplatePageView.stories.tsx index 28dede5bad03d..3c35efdc2686b 100644 --- a/site/src/pages/StarterTemplatePage/StarterTemplatePageView.stories.tsx +++ b/site/src/pages/StarterTemplatePage/StarterTemplatePageView.stories.tsx @@ -1,6 +1,6 @@ -import type { Meta, StoryObj } from "@storybook/react"; import { chromatic } from "testHelpers/chromatic"; import { MockTemplateExample, mockApiError } from "testHelpers/entities"; +import type { Meta, StoryObj } from "@storybook/react-vite"; import { StarterTemplatePageView } from "./StarterTemplatePageView"; const meta: Meta = { diff --git a/site/src/pages/StarterTemplatePage/StarterTemplatePageView.tsx b/site/src/pages/StarterTemplatePage/StarterTemplatePageView.tsx index 2da189d2523d5..c4bb59f7717ab 100644 --- a/site/src/pages/StarterTemplatePage/StarterTemplatePageView.tsx +++ b/site/src/pages/StarterTemplatePage/StarterTemplatePageView.tsx @@ -14,7 +14,7 @@ import { import { Stack } from "components/Stack/Stack"; import { ExternalLinkIcon, PlusIcon } from "lucide-react"; import type { FC } from "react"; -import { Link } from "react-router-dom"; +import { Link } from "react-router"; interface StarterTemplatePageViewProps { starterTemplate?: TemplateExample; diff --git a/site/src/pages/TaskPage/TaskAppIframe.tsx b/site/src/pages/TaskPage/TaskAppIframe.tsx index ce0223e802fd9..4cea4d7d5e936 100644 --- a/site/src/pages/TaskPage/TaskAppIframe.tsx +++ b/site/src/pages/TaskPage/TaskAppIframe.tsx @@ -11,7 +11,7 @@ import { EllipsisVertical, ExternalLinkIcon, HouseIcon } from "lucide-react"; import { useAppLink } from "modules/apps/useAppLink"; import type { Task } from "modules/tasks/tasks"; import { type FC, useRef } from "react"; -import { Link as RouterLink } from "react-router-dom"; +import { Link as RouterLink } from "react-router"; import { cn } from "utils/cn"; type TaskAppIFrameProps = { diff --git a/site/src/pages/TaskPage/TaskApps.stories.tsx b/site/src/pages/TaskPage/TaskApps.stories.tsx new file mode 100644 index 0000000000000..3447c1c68035c --- /dev/null +++ b/site/src/pages/TaskPage/TaskApps.stories.tsx @@ -0,0 +1,134 @@ +import { + MockTasks, + MockWorkspace, + MockWorkspaceAgent, + MockWorkspaceApp, +} from "testHelpers/entities"; +import { withProxyProvider } from "testHelpers/storybook"; +import type { Meta, StoryObj } from "@storybook/react-vite"; +import type { WorkspaceApp } from "api/typesGenerated"; +import { TaskApps } from "./TaskApps"; + +const meta: Meta = { + title: "pages/TaskPage/TaskApps", + component: TaskApps, + decorators: [withProxyProvider()], + parameters: { + layout: "fullscreen", + }, +}; + +export default meta; +type Story = StoryObj; + +const mockAgentNoApps = { + ...MockWorkspaceAgent, + apps: [], +}; + +const mockExternalApp: WorkspaceApp = { + ...MockWorkspaceApp, + external: true, +}; + +const mockEmbeddedApp: WorkspaceApp = { + ...MockWorkspaceApp, + external: false, +}; + +const taskWithNoApps = { + ...MockTasks[0], + workspace: { + ...MockWorkspace, + latest_build: { + ...MockWorkspace.latest_build, + resources: [ + { + ...MockWorkspace.latest_build.resources[0], + agents: [mockAgentNoApps], + }, + ], + }, + }, +}; + +export const NoEmbeddedApps: Story = { + args: { + task: taskWithNoApps, + }, +}; + +export const WithExternalAppsOnly: Story = { + args: { + task: { + ...MockTasks[0], + workspace: { + ...MockWorkspace, + latest_build: { + ...MockWorkspace.latest_build, + resources: [ + { + ...MockWorkspace.latest_build.resources[0], + agents: [ + { + ...MockWorkspaceAgent, + apps: [mockExternalApp], + }, + ], + }, + ], + }, + }, + }, + }, +}; + +export const WithEmbeddedApps: Story = { + args: { + task: { + ...MockTasks[0], + workspace: { + ...MockWorkspace, + latest_build: { + ...MockWorkspace.latest_build, + resources: [ + { + ...MockWorkspace.latest_build.resources[0], + agents: [ + { + ...MockWorkspaceAgent, + apps: [mockEmbeddedApp], + }, + ], + }, + ], + }, + }, + }, + }, +}; + +export const WithMixedApps: Story = { + args: { + task: { + ...MockTasks[0], + workspace: { + ...MockWorkspace, + latest_build: { + ...MockWorkspace.latest_build, + resources: [ + { + ...MockWorkspace.latest_build.resources[0], + agents: [ + { + ...MockWorkspaceAgent, + apps: [mockEmbeddedApp, mockExternalApp], + }, + ], + }, + ], + }, + }, + }, + }, +}; diff --git a/site/src/pages/TaskPage/TaskApps.tsx b/site/src/pages/TaskPage/TaskApps.tsx index 0cccc8c7a01df..26d8562d1ebd2 100644 --- a/site/src/pages/TaskPage/TaskApps.tsx +++ b/site/src/pages/TaskPage/TaskApps.tsx @@ -1,4 +1,4 @@ -import type { WorkspaceApp } from "api/typesGenerated"; +import type { WorkspaceAgent, WorkspaceApp } from "api/typesGenerated"; import { Button } from "components/Button/Button"; import { DropdownMenu, @@ -8,19 +8,26 @@ import { } from "components/DropdownMenu/DropdownMenu"; import { ExternalImage } from "components/ExternalImage/ExternalImage"; import { InfoTooltip } from "components/InfoTooltip/InfoTooltip"; +import { Link } from "components/Link/Link"; import { ChevronDownIcon, LayoutGridIcon } from "lucide-react"; import { useAppLink } from "modules/apps/useAppLink"; import type { Task } from "modules/tasks/tasks"; import type React from "react"; import { type FC, useState } from "react"; -import { Link as RouterLink } from "react-router-dom"; +import { Link as RouterLink } from "react-router"; import { cn } from "utils/cn"; +import { docs } from "utils/docs"; import { TaskAppIFrame } from "./TaskAppIframe"; type TaskAppsProps = { task: Task; }; +type AppWithAgent = { + app: WorkspaceApp; + agent: WorkspaceAgent; +}; + export const TaskApps: FC = ({ task }) => { const agents = task.workspace.latest_build.resources .flatMap((r) => r.agents) @@ -29,43 +36,34 @@ export const TaskApps: FC = ({ task }) => { // The Chat UI app will be displayed in the sidebar, so we don't want to show // it here const apps = agents - .flatMap((a) => a?.apps) + .flatMap((agent) => + agent.apps.map((app) => ({ + app, + agent, + })), + ) .filter( - (a) => !!a && a.id !== task.workspace.latest_build.ai_task_sidebar_app_id, + ({ app }) => + !!app && app.id !== task.workspace.latest_build.ai_task_sidebar_app_id, ); - const embeddedApps = apps.filter((app) => !app.external); - const externalApps = apps.filter((app) => app.external); - - const [activeAppId, setActiveAppId] = useState(() => { - const appId = embeddedApps[0]?.id; - if (!appId) { - throw new Error("No apps found in task"); - } - return appId; - }); - - const activeApp = apps.find((app) => app.id === activeAppId); - if (!activeApp) { - throw new Error(`Active app with ID ${activeAppId} not found in task`); - } + const embeddedApps = apps.filter(({ app }) => !app.external); + const externalApps = apps.filter(({ app }) => app.external); - const agent = agents.find((a) => - a.apps.some((app) => app.id === activeAppId), + const [activeAppId, setActiveAppId] = useState( + embeddedApps[0]?.app.id, ); - if (!agent) { - throw new Error(`Agent for app ${activeAppId} not found in task workspace`); - } return (
      - {embeddedApps.map((app) => ( + {embeddedApps.map(({ app, agent }) => ( { e.preventDefault(); @@ -76,73 +74,118 @@ export const TaskApps: FC = ({ task }) => {
      {externalApps.length > 0 && ( -
      - - - - - - {externalApps.map((app) => { - const link = useAppLink(app, { - agent, - workspace: task.workspace, - }); - - return ( - - - {app.icon ? ( - - ) : ( - - )} - {link.label} - - - ); - })} - - -
      + )}
      -
      - {embeddedApps.map((app) => { - return ( - 0 ? ( +
      + {embeddedApps.map(({ app }) => { + return ( + + ); + })} +
      + ) : ( +
      +

      + No embedded apps found. +

      + + + + Learn how to configure apps + {" "} + for your tasks. + +
      + )} +
      + ); +}; + +type TaskExternalAppsDropdownProps = { + task: Task; + agents: WorkspaceAgent[]; + externalApps: AppWithAgent[]; +}; + +const TaskExternalAppsDropdown: FC = ({ + task, + externalApps, +}) => { + return ( +
      + + + + + + {externalApps.map(({ app, agent }) => ( + - ); - })} -
      - + ))} + + +
      + ); +}; + +const ExternalAppMenuItem: FC<{ + app: WorkspaceApp; + agent: WorkspaceAgent; + task: Task; +}> = ({ app, agent, task }) => { + const link = useAppLink(app, { + agent, + workspace: task.workspace, + }); + + return ( + + + {app.icon ? : } + {link.label} + + ); }; type TaskAppTabProps = { task: Task; app: WorkspaceApp; + agent: WorkspaceAgent; active: boolean; onClick: (e: React.MouseEvent) => void; }; -const TaskAppTab: FC = ({ task, app, active, onClick }) => { - const agent = task.workspace.latest_build.resources - .flatMap((r) => r.agents) - .filter((a) => !!a) - .find((a) => a.apps.some((a) => a.id === app.id)); - - if (!agent) { - throw new Error(`Agent for app ${app.id} not found in task workspace`); - } - +const TaskAppTab: FC = ({ + task, + app, + agent, + active, + onClick, +}) => { const link = useAppLink(app, { agent, workspace: task.workspace, diff --git a/site/src/pages/TaskPage/TaskPage.stories.tsx b/site/src/pages/TaskPage/TaskPage.stories.tsx index 0799f4625c95f..6a486442ace8c 100644 --- a/site/src/pages/TaskPage/TaskPage.stories.tsx +++ b/site/src/pages/TaskPage/TaskPage.stories.tsx @@ -1,11 +1,3 @@ -import type { Meta, StoryObj } from "@storybook/react"; -import { expect, spyOn, within } from "@storybook/test"; -import { API } from "api/api"; -import type { - Workspace, - WorkspaceApp, - WorkspaceResource, -} from "api/typesGenerated"; import { MockFailedWorkspace, MockStartingWorkspace, @@ -19,6 +11,14 @@ import { mockApiError, } from "testHelpers/entities"; import { withProxyProvider } from "testHelpers/storybook"; +import type { Meta, StoryObj } from "@storybook/react-vite"; +import { API } from "api/api"; +import type { + Workspace, + WorkspaceApp, + WorkspaceResource, +} from "api/typesGenerated"; +import { expect, spyOn, within } from "storybook/test"; import TaskPage, { data, WorkspaceDoesNotHaveAITaskError } from "./TaskPage"; const meta: Meta = { @@ -36,7 +36,7 @@ type Story = StoryObj; export const Loading: Story = { beforeEach: () => { spyOn(data, "fetchTask").mockImplementation( - () => new Promise((res) => 1000 * 60 * 60), + () => new Promise((_res) => 1000 * 60 * 60), ); }, }; diff --git a/site/src/pages/TaskPage/TaskPage.tsx b/site/src/pages/TaskPage/TaskPage.tsx index 19e2c5aafdcd7..4a65c6f1be993 100644 --- a/site/src/pages/TaskPage/TaskPage.tsx +++ b/site/src/pages/TaskPage/TaskPage.tsx @@ -2,26 +2,28 @@ import { API } from "api/api"; import { getErrorDetail, getErrorMessage } from "api/errors"; import { template as templateQueryOptions } from "api/queries/templates"; import type { Workspace, WorkspaceStatus } from "api/typesGenerated"; +import isChromatic from "chromatic/isChromatic"; import { Button } from "components/Button/Button"; import { Loader } from "components/Loader/Loader"; import { Margins } from "components/Margins/Margins"; +import { ScrollArea } from "components/ScrollArea/ScrollArea"; import { useWorkspaceBuildLogs } from "hooks/useWorkspaceBuildLogs"; import { ArrowLeftIcon, RotateCcwIcon } from "lucide-react"; import { AI_PROMPT_PARAMETER_NAME, type Task } from "modules/tasks/tasks"; -import type { ReactNode } from "react"; +import { WorkspaceBuildLogs } from "modules/workspaces/WorkspaceBuildLogs/WorkspaceBuildLogs"; +import { type FC, type ReactNode, useEffect, useRef } from "react"; import { Helmet } from "react-helmet-async"; import { useQuery } from "react-query"; import { Panel, PanelGroup, PanelResizeHandle } from "react-resizable-panels"; -import { useParams } from "react-router-dom"; -import { Link as RouterLink } from "react-router-dom"; -import { ellipsizeText } from "utils/ellipsizeText"; +import { Link as RouterLink, useParams } from "react-router"; import { pageTitle } from "utils/page"; import { - ActiveTransition, + getActiveTransitionStats, WorkspaceBuildProgress, } from "../WorkspacePage/WorkspaceBuildProgress"; import { TaskApps } from "./TaskApps"; import { TaskSidebar } from "./TaskSidebar"; +import { TaskTopbar } from "./TaskTopbar"; const TaskPage = () => { const { workspace: workspaceName, username } = useParams() as { @@ -38,18 +40,7 @@ const TaskPage = () => { refetchInterval: 5_000, }); - const { data: template } = useQuery({ - ...templateQueryOptions(task?.workspace.template_id ?? ""), - enabled: Boolean(task), - }); - const waitingStatuses: WorkspaceStatus[] = ["starting", "pending"]; - const shouldStreamBuildLogs = - task && waitingStatuses.includes(task.workspace.latest_build.status); - const buildLogs = useWorkspaceBuildLogs( - task?.workspace.latest_build.id ?? "", - shouldStreamBuildLogs, - ); if (error) { return ( @@ -96,38 +87,9 @@ const TaskPage = () => { } let content: ReactNode = null; - const terminatedStatuses: WorkspaceStatus[] = [ - "canceled", - "canceling", - "deleted", - "deleting", - "stopped", - "stopping", - ]; if (waitingStatuses.includes(task.workspace.latest_build.status)) { - // If no template yet, use an indeterminate progress bar. - const transition = (template && - ActiveTransition(template, task.workspace)) || { P50: 0, P95: null }; - const lastStage = - buildLogs?.[buildLogs.length - 1]?.stage || "Waiting for build status"; - content = ( -
      -
      -

      - Starting your workspace -

      -
      {lastStage}
      -
      -
      - -
      -
      - ); + content = ; } else if (task.workspace.latest_build.status === "failed") { content = (
      @@ -171,14 +133,7 @@ const TaskPage = () => { ); } else { - content = ; - } - - return ( - <> - - {pageTitle(ellipsizeText(task.prompt, 64) ?? "Task")} - + content = ( @@ -186,14 +141,95 @@ const TaskPage = () => {
      - {content} + + + + ); + } + + return ( + <> + + {pageTitle(ellipsizeText(task.prompt, 64))} + + +
      + + {content} +
      ); }; export default TaskPage; +type TaskBuildingWorkspaceProps = { task: Task }; + +const TaskBuildingWorkspace: FC = ({ task }) => { + const { data: template } = useQuery( + templateQueryOptions(task.workspace.template_id), + ); + + const buildLogs = useWorkspaceBuildLogs(task?.workspace.latest_build.id); + + // If no template yet, use an indeterminate progress bar. + const transitionStats = (template && + getActiveTransitionStats(template, task.workspace)) || { + P50: 0, + P95: null, + }; + + const scrollAreaRef = useRef(null); + // biome-ignore lint/correctness/useExhaustiveDependencies: this effect should run when build logs change + useEffect(() => { + if (isChromatic()) { + return; + } + const scrollAreaEl = scrollAreaRef.current; + const scrollAreaViewportEl = scrollAreaEl?.querySelector( + "[data-radix-scroll-area-viewport]", + ); + if (scrollAreaViewportEl) { + scrollAreaViewportEl.scrollTop = scrollAreaViewportEl.scrollHeight; + } + }, [buildLogs]); + + return ( +
      +
      +
      +

      + Starting your workspace +

      +
      + Your task will be running in a few moments +
      +
      + +
      + + + + + +
      +
      +
      + ); +}; + export class WorkspaceDoesNotHaveAITaskError extends Error { constructor(workspace: Workspace) { super( @@ -229,3 +265,7 @@ export const data = { } satisfies Task; }, }; + +const ellipsizeText = (text: string, maxLength = 80): string => { + return text.length <= maxLength ? text : `${text.slice(0, maxLength - 3)}...`; +}; diff --git a/site/src/pages/TaskPage/TaskSidebar.tsx b/site/src/pages/TaskPage/TaskSidebar.tsx index ca691bea08788..eb1aeb6d59375 100644 --- a/site/src/pages/TaskPage/TaskSidebar.tsx +++ b/site/src/pages/TaskPage/TaskSidebar.tsx @@ -1,24 +1,8 @@ import type { WorkspaceApp } from "api/typesGenerated"; -import { Button } from "components/Button/Button"; -import { - DropdownMenu, - DropdownMenuContent, - DropdownMenuItem, - DropdownMenuTrigger, -} from "components/DropdownMenu/DropdownMenu"; import { Spinner } from "components/Spinner/Spinner"; -import { - Tooltip, - TooltipContent, - TooltipProvider, - TooltipTrigger, -} from "components/Tooltip/Tooltip"; -import { ArrowLeftIcon, EllipsisVerticalIcon } from "lucide-react"; import type { Task } from "modules/tasks/tasks"; import type { FC } from "react"; -import { Link as RouterLink } from "react-router-dom"; import { TaskAppIFrame } from "./TaskAppIframe"; -import { TaskStatusLink } from "./TaskStatusLink"; type TaskSidebarProps = { task: Task; @@ -84,60 +68,6 @@ export const TaskSidebar: FC = ({ task }) => { return (
      +
      + + + ); +}; + +const TasksEmpty: FC = () => { + return ( + + +
      +
      +

      + No tasks found +

      + + Use the form above to run a task + +
      +
      +
      +
      + ); +}; + +type TasksProps = { tasks: Task[] }; + +const Tasks: FC = ({ tasks }) => { + return tasks.map(({ workspace, prompt }) => { + const templateDisplayName = + workspace.template_display_name ?? workspace.template_name; + + return ( + + + + + {prompt} + + + Access task + + + } + subtitle={templateDisplayName} + avatar={ + + } + /> + + + + + + + {relativeTime(new Date(workspace.created_at))} + + } + src={workspace.owner_avatar_url} + /> + + + ); + }); +}; + +const TasksSkeleton: FC = () => { + return ( + + + + + + + + + + + + + + ); +}; diff --git a/site/src/pages/TasksPage/UsersCombobox.tsx b/site/src/pages/TasksPage/UsersCombobox.tsx index e3f8de2bbca56..e3e443754a17f 100644 --- a/site/src/pages/TasksPage/UsersCombobox.tsx +++ b/site/src/pages/TasksPage/UsersCombobox.tsx @@ -1,5 +1,6 @@ import Skeleton from "@mui/material/Skeleton"; import { users } from "api/queries/users"; +import type { User } from "api/typesGenerated"; import { Avatar } from "components/Avatar/Avatar"; import { Button } from "components/Button/Button"; import { @@ -15,44 +16,41 @@ import { PopoverContent, PopoverTrigger, } from "components/Popover/Popover"; +import { useAuthenticated } from "hooks"; import { useDebouncedValue } from "hooks/debounce"; import { CheckIcon, ChevronsUpDownIcon } from "lucide-react"; import { type FC, useState } from "react"; import { keepPreviousData, useQuery } from "react-query"; import { cn } from "utils/cn"; -export type UserOption = { +type UserOption = { label: string; - value: string; // Username + /** + * The username of the user. + */ + value: string; avatarUrl?: string; }; type UsersComboboxProps = { - selectedOption: UserOption | undefined; - onSelect: (option: UserOption | undefined) => void; + value: string; + onValueChange: (value: string) => void; }; export const UsersCombobox: FC = ({ - selectedOption, - onSelect, + value, + onValueChange, }) => { const [open, setOpen] = useState(false); const [search, setSearch] = useState(""); const debouncedSearch = useDebouncedValue(search, 250); - const usersQuery = useQuery({ + const { user } = useAuthenticated(); + const { data: options } = useQuery({ ...users({ q: debouncedSearch }), - select: (data) => - data.users.toSorted((a, b) => { - return selectedOption && a.username === selectedOption.value ? -1 : 0; - }), + select: (res) => mapUsersToOptions(res.users, user, value), placeholderData: keepPreviousData, }); - - const options = usersQuery.data?.map((user) => ({ - label: user.name || user.username, - value: user.username, - avatarUrl: user.avatar_url, - })); + const selectedOption = options?.find((o) => o.value === value); return ( @@ -91,11 +89,7 @@ export const UsersCombobox: FC = ({ key={option.value} value={option.value} onSelect={() => { - onSelect( - option.value === selectedOption?.value - ? undefined - : option, - ); + onValueChange(option.value); setOpen(false); }} > @@ -131,3 +125,37 @@ const UserItem: FC = ({ option, className }) => {
      ); }; + +function mapUsersToOptions( + users: readonly User[], + /** + * Includes the authenticated user in the list if they are not already + * present. So the current user can always select themselves easily. + */ + authUser: User, + /** + * Username of the currently selected user. + */ + selectedValue: string, +): UserOption[] { + const includeAuthenticatedUser = (users: readonly User[]) => { + const hasAuthenticatedUser = users.some( + (u) => u.username === authUser.username, + ); + if (hasAuthenticatedUser) { + return users; + } + return [authUser, ...users]; + }; + + const sortSelectedFirst = (a: User) => + selectedValue && a.username === selectedValue ? -1 : 0; + + return includeAuthenticatedUser(users) + .toSorted(sortSelectedFirst) + .map((user) => ({ + label: user.name || user.username, + value: user.username, + avatarUrl: user.avatar_url, + })); +} diff --git a/site/src/pages/TasksPage/data.ts b/site/src/pages/TasksPage/data.ts new file mode 100644 index 0000000000000..0795dab2bb638 --- /dev/null +++ b/site/src/pages/TasksPage/data.ts @@ -0,0 +1,24 @@ +import { API } from "api/api"; +import type { Task } from "modules/tasks/tasks"; + +// TODO: This is a temporary solution while the BE does not return the Task in a +// right shape with a custom name. This should be removed once the BE is fixed. +export const data = { + async createTask( + prompt: string, + userId: string, + templateVersionId: string, + presetId: string | undefined, + ): Promise { + const workspace = await API.experimental.createTask(userId, { + template_version_id: templateVersionId, + template_version_preset_id: presetId, + prompt, + }); + + return { + workspace, + prompt, + }; + }, +}; diff --git a/site/src/pages/TemplatePage/TemplateEmbedPage/TemplateEmbedExperimentRouter.tsx b/site/src/pages/TemplatePage/TemplateEmbedPage/TemplateEmbedExperimentRouter.tsx index 85dd2e39b5452..ec834d0630d1c 100644 --- a/site/src/pages/TemplatePage/TemplateEmbedPage/TemplateEmbedExperimentRouter.tsx +++ b/site/src/pages/TemplatePage/TemplateEmbedPage/TemplateEmbedExperimentRouter.tsx @@ -3,7 +3,7 @@ import { ErrorAlert } from "components/Alert/ErrorAlert"; import { Loader } from "components/Loader/Loader"; import type { FC } from "react"; import { useQuery } from "react-query"; -import { useParams } from "react-router-dom"; +import { useParams } from "react-router"; import TemplateEmbedPage from "./TemplateEmbedPage"; import TemplateEmbedPageExperimental from "./TemplateEmbedPageExperimental"; diff --git a/site/src/pages/TemplatePage/TemplateEmbedPage/TemplateEmbedPage.test.tsx b/site/src/pages/TemplatePage/TemplateEmbedPage/TemplateEmbedPage.test.tsx index a98e669807f89..abe227a17f053 100644 --- a/site/src/pages/TemplatePage/TemplateEmbedPage/TemplateEmbedPage.test.tsx +++ b/site/src/pages/TemplatePage/TemplateEmbedPage/TemplateEmbedPage.test.tsx @@ -1,7 +1,3 @@ -import { screen } from "@testing-library/react"; -import userEvent from "@testing-library/user-event"; -import { API } from "api/api"; -import { TemplateLayout } from "pages/TemplatePage/TemplateLayout"; import { MockTemplate, MockTemplateVersionParameter1 as parameter1, @@ -11,6 +7,10 @@ import { renderWithAuth, waitForLoaderToBeRemoved, } from "testHelpers/renderHelpers"; +import { screen } from "@testing-library/react"; +import userEvent from "@testing-library/user-event"; +import { API } from "api/api"; +import { TemplateLayout } from "pages/TemplatePage/TemplateLayout"; import TemplateEmbedPage from "./TemplateEmbedPage"; test("Users can fill the parameters and copy the open in coder url", async () => { diff --git a/site/src/pages/TemplatePage/TemplateEmbedPage/TemplateEmbedPageExperimental.tsx b/site/src/pages/TemplatePage/TemplateEmbedPage/TemplateEmbedPageExperimental.tsx index 010c765007aef..e1f53cb6af6a6 100644 --- a/site/src/pages/TemplatePage/TemplateEmbedPage/TemplateEmbedPageExperimental.tsx +++ b/site/src/pages/TemplatePage/TemplateEmbedPage/TemplateEmbedPageExperimental.tsx @@ -39,7 +39,7 @@ const TemplateEmbedPageExperimental: FC = () => { const [wsError, setWsError] = useState(null); const sendMessage = useEffectEvent( - (formValues: Record, ownerId?: string) => { + (formValues: Record, _ownerId?: string) => { const request: DynamicParametersRequest = { id: wsResponseId.current + 1, owner_id: me.id, @@ -187,90 +187,88 @@ const TemplateEmbedPageView: FC = ({ }; return ( - <> -
      -
      - {isLoading ? ( -
      -
      - - -
      -
      - - -
      -
      - - -
      +
      +
      + {isLoading ? ( +
      +
      + +
      - ) : ( - <> - {Boolean(error) && } - {diagnostics.length > 0 && ( - - )} -
      -
      -
      -

      Creation mode

      -

      - When set to automatic mode, clicking the button will - create the workspace automatically without displaying a - form to the user. -

      +
      + + +
      +
      + + +
      +
      + ) : ( + <> + {Boolean(error) && } + {diagnostics.length > 0 && ( + + )} +
      +
      +
      +

      Creation mode

      +

      + When set to automatic mode, clicking the button will create + the workspace automatically without displaying a form to the + user. +

      +
      + { + setFormState((prev) => ({ + ...prev, + mode: v as "manual" | "auto", + })); + }} + > +
      + +
      - { - setFormState((prev) => ({ - ...prev, - mode: v as "manual" | "auto", - })); - }} - > -
      - - -
      -
      - - -
      -
      -
      - - - - {parameters.length > 0 && ( -
      - {parameters.map((parameter) => { - const isDisabled = parameter.styling?.disabled; - return ( - handleChange(parameter, value)} - disabled={isDisabled} - value={formState.paramValues[parameter.name] || ""} - /> - ); - })} +
      + +
      - )} -
      - - )} -
      + +
      + + - + {parameters.length > 0 && ( +
      + {parameters.map((parameter) => { + const isDisabled = parameter.styling?.disabled; + return ( + handleChange(parameter, value)} + disabled={isDisabled} + value={formState.paramValues[parameter.name] || ""} + /> + ); + })} +
      + )} +
      + + )}
      - + + +
      ); }; diff --git a/site/src/pages/TemplatePage/TemplateEmbedPage/TemplateEmbedPageView.stories.tsx b/site/src/pages/TemplatePage/TemplateEmbedPage/TemplateEmbedPageView.stories.tsx index 571a27f6116b5..5eac986498491 100644 --- a/site/src/pages/TemplatePage/TemplateEmbedPage/TemplateEmbedPageView.stories.tsx +++ b/site/src/pages/TemplatePage/TemplateEmbedPage/TemplateEmbedPageView.stories.tsx @@ -1,4 +1,3 @@ -import type { Meta, StoryObj } from "@storybook/react"; import { MockTemplate, MockTemplateVersionParameter1, @@ -6,6 +5,7 @@ import { MockTemplateVersionParameter3, MockTemplateVersionParameter4, } from "testHelpers/entities"; +import type { Meta, StoryObj } from "@storybook/react-vite"; import { TemplateEmbedPageView } from "./TemplateEmbedPage"; const meta: Meta = { diff --git a/site/src/pages/TemplatePage/TemplateFilesPage/TemplateFilesPage.test.tsx b/site/src/pages/TemplatePage/TemplateFilesPage/TemplateFilesPage.test.tsx index b6b6bc231ea71..d042cb0e67ed0 100644 --- a/site/src/pages/TemplatePage/TemplateFilesPage/TemplateFilesPage.test.tsx +++ b/site/src/pages/TemplatePage/TemplateFilesPage/TemplateFilesPage.test.tsx @@ -1,10 +1,10 @@ -import { render, screen } from "@testing-library/react"; import { AppProviders } from "App"; -import { RequireAuth } from "contexts/auth/RequireAuth"; -import { http, HttpResponse } from "msw"; -import { RouterProvider, createMemoryRouter } from "react-router-dom"; import { MockTemplate } from "testHelpers/entities"; import { server } from "testHelpers/server"; +import { render, screen } from "@testing-library/react"; +import { RequireAuth } from "contexts/auth/RequireAuth"; +import { HttpResponse, http } from "msw"; +import { createMemoryRouter, RouterProvider } from "react-router"; import { TemplateLayout } from "../TemplateLayout"; import TemplateFilesPage from "./TemplateFilesPage"; diff --git a/site/src/pages/TemplatePage/TemplateFilesPage/TemplateFilesPage.tsx b/site/src/pages/TemplatePage/TemplateFilesPage/TemplateFilesPage.tsx index 23ee02d5442d4..833afbfe77c02 100644 --- a/site/src/pages/TemplatePage/TemplateFilesPage/TemplateFilesPage.tsx +++ b/site/src/pages/TemplatePage/TemplateFilesPage/TemplateFilesPage.tsx @@ -5,7 +5,7 @@ import { useTemplateLayoutContext } from "pages/TemplatePage/TemplateLayout"; import type { FC } from "react"; import { Helmet } from "react-helmet-async"; import { useQuery } from "react-query"; -import { useParams } from "react-router-dom"; +import { useParams } from "react-router"; import { getTemplatePageTitle } from "../utils"; const TemplateFilesPage: FC = () => { diff --git a/site/src/pages/TemplatePage/TemplateInsightsPage/DateRange.tsx b/site/src/pages/TemplatePage/TemplateInsightsPage/DateRange.tsx index 1f27ec7f8412f..3d9fb8120efbf 100644 --- a/site/src/pages/TemplatePage/TemplateInsightsPage/DateRange.tsx +++ b/site/src/pages/TemplatePage/TemplateInsightsPage/DateRange.tsx @@ -10,7 +10,7 @@ import { import dayjs from "dayjs"; import { MoveRightIcon } from "lucide-react"; import { type ComponentProps, type FC, useRef, useState } from "react"; -import { DateRangePicker, createStaticRanges } from "react-date-range"; +import { createStaticRanges, DateRangePicker } from "react-date-range"; // The type definition from @types is wrong declare module "react-date-range" { diff --git a/site/src/pages/TemplatePage/TemplateInsightsPage/IntervalMenu.tsx b/site/src/pages/TemplatePage/TemplateInsightsPage/IntervalMenu.tsx index 7f3b11a4069ad..5aa98a7665d19 100644 --- a/site/src/pages/TemplatePage/TemplateInsightsPage/IntervalMenu.tsx +++ b/site/src/pages/TemplatePage/TemplateInsightsPage/IntervalMenu.tsx @@ -1,8 +1,7 @@ import Menu from "@mui/material/Menu"; import MenuItem from "@mui/material/MenuItem"; import { Button } from "components/Button/Button"; -import { ChevronDownIcon } from "lucide-react"; -import { CheckIcon } from "lucide-react"; +import { CheckIcon, ChevronDownIcon } from "lucide-react"; import { type FC, useRef, useState } from "react"; const insightsIntervals = { diff --git a/site/src/pages/TemplatePage/TemplateInsightsPage/TemplateInsightsPage.stories.tsx b/site/src/pages/TemplatePage/TemplateInsightsPage/TemplateInsightsPage.stories.tsx index 2638308b876f4..37b7b89a4c0b2 100644 --- a/site/src/pages/TemplatePage/TemplateInsightsPage/TemplateInsightsPage.stories.tsx +++ b/site/src/pages/TemplatePage/TemplateInsightsPage/TemplateInsightsPage.stories.tsx @@ -1,5 +1,5 @@ -import type { Meta, StoryObj } from "@storybook/react"; import { chromatic } from "testHelpers/chromatic"; +import type { Meta, StoryObj } from "@storybook/react-vite"; import { TemplateInsightsPageView } from "./TemplateInsightsPage"; const meta: Meta = { diff --git a/site/src/pages/TemplatePage/TemplateInsightsPage/TemplateInsightsPage.tsx b/site/src/pages/TemplatePage/TemplateInsightsPage/TemplateInsightsPage.tsx index 37124431b4b41..0c12d96625156 100644 --- a/site/src/pages/TemplatePage/TemplateInsightsPage/TemplateInsightsPage.tsx +++ b/site/src/pages/TemplatePage/TemplateInsightsPage/TemplateInsightsPage.tsx @@ -47,7 +47,7 @@ import { } from "react"; import { Helmet } from "react-helmet-async"; import { useQuery } from "react-query"; -import { useSearchParams } from "react-router-dom"; +import { useSearchParams } from "react-router"; import { getLatencyColor } from "utils/latency"; import { addTime, @@ -59,8 +59,8 @@ import { import { getTemplatePageTitle } from "../utils"; import { DateRange as DailyPicker, type DateRangeValue } from "./DateRange"; import { type InsightsInterval, IntervalMenu } from "./IntervalMenu"; -import { WeekPicker, numberOfWeeksOptions } from "./WeekPicker"; import { lastWeeks } from "./utils"; +import { numberOfWeeksOptions, WeekPicker } from "./WeekPicker"; const DEFAULT_NUMBER_OF_WEEKS = numberOfWeeksOptions[0]; diff --git a/site/src/pages/TemplatePage/TemplateInsightsPage/WeekPicker.tsx b/site/src/pages/TemplatePage/TemplateInsightsPage/WeekPicker.tsx index f2f3e95bf4a68..77ce8475a6de6 100644 --- a/site/src/pages/TemplatePage/TemplateInsightsPage/WeekPicker.tsx +++ b/site/src/pages/TemplatePage/TemplateInsightsPage/WeekPicker.tsx @@ -2,8 +2,7 @@ import Button from "@mui/material/Button"; import Menu from "@mui/material/Menu"; import MenuItem from "@mui/material/MenuItem"; import dayjs from "dayjs"; -import { ChevronDownIcon } from "lucide-react"; -import { CheckIcon } from "lucide-react"; +import { CheckIcon, ChevronDownIcon } from "lucide-react"; import { type FC, useRef, useState } from "react"; import type { DateRangeValue } from "./DateRange"; import { lastWeeks } from "./utils"; diff --git a/site/src/pages/TemplatePage/TemplateLayout.tsx b/site/src/pages/TemplatePage/TemplateLayout.tsx index 500f870579367..c6b9f81945f30 100644 --- a/site/src/pages/TemplatePage/TemplateLayout.tsx +++ b/site/src/pages/TemplatePage/TemplateLayout.tsx @@ -11,14 +11,14 @@ import { workspacePermissionChecks, } from "modules/permissions/workspaces"; import { + createContext, type FC, type PropsWithChildren, Suspense, - createContext, useContext, } from "react"; import { useQuery } from "react-query"; -import { Outlet, useLocation, useNavigate, useParams } from "react-router-dom"; +import { Outlet, useLocation, useNavigate, useParams } from "react-router"; import { TemplatePageHeader } from "./TemplatePageHeader"; const templatePermissions = ( diff --git a/site/src/pages/TemplatePage/TemplatePageHeader.stories.tsx b/site/src/pages/TemplatePage/TemplatePageHeader.stories.tsx index 04721a2224f0f..10063c21a134f 100644 --- a/site/src/pages/TemplatePage/TemplatePageHeader.stories.tsx +++ b/site/src/pages/TemplatePage/TemplatePageHeader.stories.tsx @@ -1,6 +1,6 @@ -import type { Meta, StoryObj } from "@storybook/react"; import { MockTemplate, MockTemplateVersion } from "testHelpers/entities"; import { withDashboardProvider } from "testHelpers/storybook"; +import type { Meta, StoryObj } from "@storybook/react-vite"; import { TemplatePageHeader } from "./TemplatePageHeader"; const meta: Meta = { diff --git a/site/src/pages/TemplatePage/TemplatePageHeader.tsx b/site/src/pages/TemplatePage/TemplatePageHeader.tsx index a7ebbf0ad00b1..544321c35e6d4 100644 --- a/site/src/pages/TemplatePage/TemplatePageHeader.tsx +++ b/site/src/pages/TemplatePage/TemplatePageHeader.tsx @@ -27,8 +27,9 @@ import { } from "components/PageHeader/PageHeader"; import { Pill } from "components/Pill/Pill"; import { Stack } from "components/Stack/Stack"; -import { CopyIcon, DownloadIcon } from "lucide-react"; import { + CopyIcon, + DownloadIcon, EllipsisVertical, PlusIcon, SettingsIcon, @@ -38,7 +39,7 @@ import { linkToTemplate, useLinks } from "modules/navigation"; import type { WorkspacePermissions } from "modules/permissions/workspaces"; import type { FC } from "react"; import { useQuery } from "react-query"; -import { Link as RouterLink, useNavigate } from "react-router-dom"; +import { Link as RouterLink, useNavigate } from "react-router"; import { TemplateStats } from "./TemplateStats"; import { useDeletionDialogState } from "./useDeletionDialogState"; diff --git a/site/src/pages/TemplatePage/TemplateRedirectController.test.tsx b/site/src/pages/TemplatePage/TemplateRedirectController.test.tsx index dd030e31cc038..4bdc4e5bb2441 100644 --- a/site/src/pages/TemplatePage/TemplateRedirectController.test.tsx +++ b/site/src/pages/TemplatePage/TemplateRedirectController.test.tsx @@ -1,7 +1,7 @@ -import { waitFor } from "@testing-library/react"; -import { API } from "api/api"; import * as M from "testHelpers/entities"; import { renderWithAuth } from "testHelpers/renderHelpers"; +import { waitFor } from "@testing-library/react"; +import { API } from "api/api"; import { TemplateRedirectController } from "./TemplateRedirectController"; const renderTemplateRedirectController = (route: string) => { diff --git a/site/src/pages/TemplatePage/TemplateRedirectController.tsx b/site/src/pages/TemplatePage/TemplateRedirectController.tsx index c4164746d1a6a..b81c26e343387 100644 --- a/site/src/pages/TemplatePage/TemplateRedirectController.tsx +++ b/site/src/pages/TemplatePage/TemplateRedirectController.tsx @@ -1,7 +1,7 @@ import type { Organization } from "api/typesGenerated"; import { useDashboard } from "modules/dashboard/useDashboard"; import type { FC } from "react"; -import { Navigate, Outlet, useLocation, useParams } from "react-router-dom"; +import { Navigate, Outlet, useLocation, useParams } from "react-router"; export const TemplateRedirectController: FC = () => { const { organizations, showOrganizations } = useDashboard(); diff --git a/site/src/pages/TemplatePage/TemplateResourcesPage/TemplateResourcesPageView.stories.tsx b/site/src/pages/TemplatePage/TemplateResourcesPage/TemplateResourcesPageView.stories.tsx index 2ad817348b5f1..6a88d61bc3827 100644 --- a/site/src/pages/TemplatePage/TemplateResourcesPage/TemplateResourcesPageView.stories.tsx +++ b/site/src/pages/TemplatePage/TemplateResourcesPage/TemplateResourcesPageView.stories.tsx @@ -1,9 +1,9 @@ -import type { Meta, StoryObj } from "@storybook/react"; import { MockTemplate, MockWorkspaceResource, MockWorkspaceVolumeResource, } from "testHelpers/entities"; +import type { Meta, StoryObj } from "@storybook/react-vite"; import { TemplateResourcesPageView } from "./TemplateResourcesPageView"; const meta: Meta = { diff --git a/site/src/pages/TemplatePage/TemplateResourcesPage/TemplateResourcesPageView.tsx b/site/src/pages/TemplatePage/TemplateResourcesPage/TemplateResourcesPageView.tsx index 3dd0f0518ad51..a3fc97eda55c7 100644 --- a/site/src/pages/TemplatePage/TemplateResourcesPage/TemplateResourcesPageView.tsx +++ b/site/src/pages/TemplatePage/TemplateResourcesPage/TemplateResourcesPageView.tsx @@ -2,7 +2,7 @@ import type { Template, WorkspaceResource } from "api/typesGenerated"; import { Loader } from "components/Loader/Loader"; import { TemplateResourcesTable } from "modules/templates/TemplateResourcesTable/TemplateResourcesTable"; import type { FC } from "react"; -import { Navigate, useLocation } from "react-router-dom"; +import { Navigate, useLocation } from "react-router"; interface TemplateResourcesPageViewProps { resources?: WorkspaceResource[]; diff --git a/site/src/pages/TemplatePage/TemplateStats.stories.tsx b/site/src/pages/TemplatePage/TemplateStats.stories.tsx index f1e1f694178ef..d10c797f4c97f 100644 --- a/site/src/pages/TemplatePage/TemplateStats.stories.tsx +++ b/site/src/pages/TemplatePage/TemplateStats.stories.tsx @@ -1,5 +1,5 @@ -import type { Meta, StoryObj } from "@storybook/react"; import { MockTemplate, MockTemplateVersion } from "testHelpers/entities"; +import type { Meta, StoryObj } from "@storybook/react-vite"; import { TemplateStats } from "./TemplateStats"; const meta: Meta = { diff --git a/site/src/pages/TemplatePage/TemplateStats.tsx b/site/src/pages/TemplatePage/TemplateStats.tsx index 479cfdd14bf8d..dbd48497cc48b 100644 --- a/site/src/pages/TemplatePage/TemplateStats.tsx +++ b/site/src/pages/TemplatePage/TemplateStats.tsx @@ -1,7 +1,7 @@ import type { Template, TemplateVersion } from "api/typesGenerated"; import { Stats, StatsItem } from "components/Stats/Stats"; import type { FC } from "react"; -import { Link } from "react-router-dom"; +import { Link } from "react-router"; import { createDayString } from "utils/createDayString"; import { formatTemplateActiveDevelopers, diff --git a/site/src/pages/TemplatePage/TemplateVersionsPage/VersionRow.tsx b/site/src/pages/TemplatePage/TemplateVersionsPage/VersionRow.tsx index 5c140c816067a..02115ac5a3c7a 100644 --- a/site/src/pages/TemplatePage/TemplateVersionsPage/VersionRow.tsx +++ b/site/src/pages/TemplatePage/TemplateVersionsPage/VersionRow.tsx @@ -9,7 +9,7 @@ import { Stack } from "components/Stack/Stack"; import { TimelineEntry } from "components/Timeline/TimelineEntry"; import { useClickableTableRow } from "hooks/useClickableTableRow"; import type { FC } from "react"; -import { useNavigate } from "react-router-dom"; +import { useNavigate } from "react-router"; interface VersionRowProps { version: TemplateVersion; diff --git a/site/src/pages/TemplatePage/TemplateVersionsPage/VersionsTable.stories.tsx b/site/src/pages/TemplatePage/TemplateVersionsPage/VersionsTable.stories.tsx index 5d98328e6649d..3530e9b79606e 100644 --- a/site/src/pages/TemplatePage/TemplateVersionsPage/VersionsTable.stories.tsx +++ b/site/src/pages/TemplatePage/TemplateVersionsPage/VersionsTable.stories.tsx @@ -1,5 +1,3 @@ -import { action } from "@storybook/addon-actions"; -import type { Meta, StoryObj } from "@storybook/react"; import { MockCanceledProvisionerJob, MockCancelingProvisionerJob, @@ -8,6 +6,8 @@ import { MockRunningProvisionerJob, MockTemplateVersion, } from "testHelpers/entities"; +import type { Meta, StoryObj } from "@storybook/react-vite"; +import { action } from "storybook/actions"; import { VersionsTable } from "./VersionsTable"; const meta: Meta = { diff --git a/site/src/pages/TemplatePage/useDeletionDialogState.test.ts b/site/src/pages/TemplatePage/useDeletionDialogState.test.ts index db918b76955c1..5be7910092fc6 100644 --- a/site/src/pages/TemplatePage/useDeletionDialogState.test.ts +++ b/site/src/pages/TemplatePage/useDeletionDialogState.test.ts @@ -1,6 +1,6 @@ +import { MockTemplate } from "testHelpers/entities"; import { act, renderHook, waitFor } from "@testing-library/react"; import { API } from "api/api"; -import { MockTemplate } from "testHelpers/entities"; import { useDeletionDialogState } from "./useDeletionDialogState"; test("delete dialog starts closed", () => { diff --git a/site/src/pages/TemplateSettingsPage/Sidebar.tsx b/site/src/pages/TemplateSettingsPage/Sidebar.tsx index 1aaa426061968..906a40585ca7e 100644 --- a/site/src/pages/TemplateSettingsPage/Sidebar.tsx +++ b/site/src/pages/TemplateSettingsPage/Sidebar.tsx @@ -5,10 +5,12 @@ import { SidebarHeader, SidebarNavItem, } from "components/Sidebar/Sidebar"; -import { CodeIcon as VariablesIcon } from "lucide-react"; -import { TimerIcon as ScheduleIcon } from "lucide-react"; -import { SettingsIcon } from "lucide-react"; -import { LockIcon } from "lucide-react"; +import { + LockIcon, + TimerIcon as ScheduleIcon, + SettingsIcon, + CodeIcon as VariablesIcon, +} from "lucide-react"; import { linkToTemplate, useLinks } from "modules/navigation"; import type { FC } from "react"; diff --git a/site/src/pages/TemplateSettingsPage/TemplateGeneralSettingsPage/TemplateSettingsForm.tsx b/site/src/pages/TemplateSettingsPage/TemplateGeneralSettingsPage/TemplateSettingsForm.tsx index 677984e5e9e5a..5b35b5ba26f14 100644 --- a/site/src/pages/TemplateSettingsPage/TemplateGeneralSettingsPage/TemplateSettingsForm.tsx +++ b/site/src/pages/TemplateSettingsPage/TemplateGeneralSettingsPage/TemplateSettingsForm.tsx @@ -4,6 +4,7 @@ import FormHelperText from "@mui/material/FormHelperText"; import MenuItem from "@mui/material/MenuItem"; import TextField from "@mui/material/TextField"; import { + CORSBehaviors, type Template, type UpdateTemplateMeta, WorkspaceAppSharingLevels, @@ -52,6 +53,7 @@ export const validationSchema = Yup.object({ use_classic_parameter_flow: Yup.boolean(), deprecation_message: Yup.string(), max_port_sharing_level: Yup.string().oneOf(WorkspaceAppSharingLevels), + cors_behavior: Yup.string().oneOf(Object.values(CORSBehaviors)), }); export interface TemplateSettingsForm { @@ -93,6 +95,7 @@ export const TemplateSettingsForm: FC = ({ disable_everyone_group_access: false, max_port_share_level: template.max_port_share_level, use_classic_parameter_flow: template.use_classic_parameter_flow, + cors_behavior: template.cors_behavior, }, validationSchema, onSubmit, @@ -245,19 +248,20 @@ export const TemplateSettingsForm: FC = ({ label={ - Enable dynamic parameters for workspace creation + Enable dynamic parameters for workspace creation (recommended)
      - The new workspace form allows you to design your template - with new form types and identity-aware conditional - parameters. The form will only present options that are - compatible and available. + The dynamic workspace form allows you to design your + template with additional form types and identity-aware + conditional parameters. This is the default option for new + templates. The classic workspace creation flow will be + deprecated in a future release.
      Learn more @@ -337,6 +341,28 @@ export const TemplateSettingsForm: FC = ({ + + + + Simple (recommended) + Passthru + + + + -
      - - - +
      + +
      + + ); }; diff --git a/site/src/pages/UserSettingsPage/SecurityPage/SecurityPage.test.tsx b/site/src/pages/UserSettingsPage/SecurityPage/SecurityPage.test.tsx index b3706fab19327..de8c14253dbe4 100644 --- a/site/src/pages/UserSettingsPage/SecurityPage/SecurityPage.test.tsx +++ b/site/src/pages/UserSettingsPage/SecurityPage/SecurityPage.test.tsx @@ -1,12 +1,12 @@ -import { fireEvent, screen, waitFor, within } from "@testing-library/react"; -import userEvent from "@testing-library/user-event"; -import { API } from "api/api"; -import type { OAuthConversionResponse } from "api/typesGenerated"; import { MockAuthMethodsAll, mockApiError } from "testHelpers/entities"; import { renderWithAuth, waitForLoaderToBeRemoved, } from "testHelpers/renderHelpers"; +import { fireEvent, screen, waitFor, within } from "@testing-library/react"; +import userEvent from "@testing-library/user-event"; +import { API } from "api/api"; +import type { OAuthConversionResponse } from "api/typesGenerated"; import { Language } from "./SecurityForm"; import SecurityPage from "./SecurityPage"; import * as SSO from "./SingleSignOnSection"; diff --git a/site/src/pages/UserSettingsPage/SecurityPage/SecurityPageView.stories.tsx b/site/src/pages/UserSettingsPage/SecurityPage/SecurityPageView.stories.tsx index 7446f359f5e95..149c1015b35b7 100644 --- a/site/src/pages/UserSettingsPage/SecurityPage/SecurityPageView.stories.tsx +++ b/site/src/pages/UserSettingsPage/SecurityPage/SecurityPageView.stories.tsx @@ -1,11 +1,11 @@ -import { action } from "@storybook/addon-actions"; -import type { Meta, StoryObj } from "@storybook/react"; -import set from "lodash/fp/set"; -import type { ComponentProps } from "react"; import { MockAuthMethodsAll, MockAuthMethodsPasswordOnly, } from "testHelpers/entities"; +import type { Meta, StoryObj } from "@storybook/react-vite"; +import set from "lodash/fp/set"; +import type { ComponentProps } from "react"; +import { action } from "storybook/actions"; import { SecurityPageView } from "./SecurityPage"; const defaultArgs: ComponentProps = { diff --git a/site/src/pages/UserSettingsPage/SecurityPage/SingleSignOnSection.tsx b/site/src/pages/UserSettingsPage/SecurityPage/SingleSignOnSection.tsx index 09d7c3e79c126..c7d24384f3645 100644 --- a/site/src/pages/UserSettingsPage/SecurityPage/SingleSignOnSection.tsx +++ b/site/src/pages/UserSettingsPage/SecurityPage/SingleSignOnSection.tsx @@ -103,12 +103,7 @@ export const useSingleSignOnSection = () => { const SSOEmptyState: FC = () => { return ( ({ - minHeight: 0, - padding: "48px 32px", - backgroundColor: theme.palette.background.paper, - borderRadius: 8, - })} + className="rounded-lg border border-solid border-border min-h-0" message="No SSO Providers" description="No SSO providers are configured with this Coder deployment." cta={ diff --git a/site/src/pages/UserSettingsPage/TokensPage/ConfirmDeleteDialog.stories.tsx b/site/src/pages/UserSettingsPage/TokensPage/ConfirmDeleteDialog.stories.tsx index 8794b35fdcd3d..56ba22c7be381 100644 --- a/site/src/pages/UserSettingsPage/TokensPage/ConfirmDeleteDialog.stories.tsx +++ b/site/src/pages/UserSettingsPage/TokensPage/ConfirmDeleteDialog.stories.tsx @@ -1,6 +1,6 @@ -import type { Meta, StoryObj } from "@storybook/react"; -import { QueryClient, QueryClientProvider } from "react-query"; import { MockToken } from "testHelpers/entities"; +import type { Meta, StoryObj } from "@storybook/react-vite"; +import { QueryClient, QueryClientProvider } from "react-query"; import { ConfirmDeleteDialog } from "./ConfirmDeleteDialog"; const queryClient = new QueryClient({ diff --git a/site/src/pages/UserSettingsPage/TokensPage/TokensPage.tsx b/site/src/pages/UserSettingsPage/TokensPage/TokensPage.tsx index 9668b0fa7bb96..9e2918832cd7c 100644 --- a/site/src/pages/UserSettingsPage/TokensPage/TokensPage.tsx +++ b/site/src/pages/UserSettingsPage/TokensPage/TokensPage.tsx @@ -1,14 +1,14 @@ -import { type Interpolation, type Theme, css } from "@emotion/react"; +import { css, type Interpolation, type Theme } from "@emotion/react"; import Button from "@mui/material/Button"; import type { APIKeyWithOwner } from "api/typesGenerated"; import { Stack } from "components/Stack/Stack"; import { PlusIcon } from "lucide-react"; import { type FC, useState } from "react"; -import { Link as RouterLink } from "react-router-dom"; +import { Link as RouterLink } from "react-router"; import { Section } from "../Section"; import { ConfirmDeleteDialog } from "./ConfirmDeleteDialog"; -import { TokensPageView } from "./TokensPageView"; import { useTokensData } from "./hooks"; +import { TokensPageView } from "./TokensPageView"; const cliCreateCommand = "coder tokens create"; diff --git a/site/src/pages/UserSettingsPage/TokensPage/TokensPageView.stories.tsx b/site/src/pages/UserSettingsPage/TokensPage/TokensPageView.stories.tsx index 7097d465823cf..51d4e30e24cb7 100644 --- a/site/src/pages/UserSettingsPage/TokensPage/TokensPageView.stories.tsx +++ b/site/src/pages/UserSettingsPage/TokensPage/TokensPageView.stories.tsx @@ -1,5 +1,5 @@ -import type { Meta, StoryObj } from "@storybook/react"; import { MockTokens, mockApiError } from "testHelpers/entities"; +import type { Meta, StoryObj } from "@storybook/react-vite"; import { TokensPageView } from "./TokensPageView"; const meta: Meta = { diff --git a/site/src/pages/UserSettingsPage/WorkspaceProxyPage/WorkspaceProxyRow.tsx b/site/src/pages/UserSettingsPage/WorkspaceProxyPage/WorkspaceProxyRow.tsx index 591e4bce59aae..417a5d731e33d 100644 --- a/site/src/pages/UserSettingsPage/WorkspaceProxyPage/WorkspaceProxyRow.tsx +++ b/site/src/pages/UserSettingsPage/WorkspaceProxyPage/WorkspaceProxyRow.tsx @@ -33,7 +33,6 @@ export const ProxyRow: FC = ({ proxy, latency }) => { case "http/1.0": case "http/1.1": extraWarnings.push( - // biome-ignore lint/style/useTemplate: easier to read short lines `Requests to the proxy from current browser are using "${latency.nextHopProtocol}". ` + "The proxy server might not support HTTP/2. " + "For usability reasons, HTTP/2 or above is recommended. " + @@ -141,7 +140,7 @@ const ProxyMessagesList: FC = ({ title, messages }) => { const theme = useTheme(); if (!messages) { - return <>; + return null; } return ( diff --git a/site/src/pages/UserSettingsPage/WorkspaceProxyPage/WorkspaceProxyView.stories.tsx b/site/src/pages/UserSettingsPage/WorkspaceProxyPage/WorkspaceProxyView.stories.tsx index 8892937d2e0f5..e84f50b922f16 100644 --- a/site/src/pages/UserSettingsPage/WorkspaceProxyPage/WorkspaceProxyView.stories.tsx +++ b/site/src/pages/UserSettingsPage/WorkspaceProxyPage/WorkspaceProxyView.stories.tsx @@ -1,4 +1,3 @@ -import type { Meta, StoryObj } from "@storybook/react"; import { MockHealthyWildWorkspaceProxy, MockPrimaryWorkspaceProxy, @@ -6,6 +5,7 @@ import { MockWorkspaceProxies, mockApiError, } from "testHelpers/entities"; +import type { Meta, StoryObj } from "@storybook/react-vite"; import { WorkspaceProxyView } from "./WorkspaceProxyView"; const meta: Meta = { diff --git a/site/src/pages/UsersPage/ResetPasswordDialog.stories.tsx b/site/src/pages/UsersPage/ResetPasswordDialog.stories.tsx index bd64eef6ae7e5..2525cd2bfd5db 100644 --- a/site/src/pages/UsersPage/ResetPasswordDialog.stories.tsx +++ b/site/src/pages/UsersPage/ResetPasswordDialog.stories.tsx @@ -1,5 +1,5 @@ -import type { Meta, StoryObj } from "@storybook/react"; import { MockUserOwner } from "testHelpers/entities"; +import type { Meta, StoryObj } from "@storybook/react-vite"; import { ResetPasswordDialog } from "./ResetPasswordDialog"; const meta: Meta = { diff --git a/site/src/pages/UsersPage/UsersFilter.tsx b/site/src/pages/UsersPage/UsersFilter.tsx index fb123c423a2c1..782ba3de504b8 100644 --- a/site/src/pages/UsersPage/UsersFilter.tsx +++ b/site/src/pages/UsersPage/UsersFilter.tsx @@ -1,12 +1,12 @@ import { Filter, MenuSkeleton, type useFilter } from "components/Filter/Filter"; -import { - SelectFilter, - type SelectFilterOption, -} from "components/Filter/SelectFilter"; import { type UseFilterMenuOptions, useFilterMenu, } from "components/Filter/menu"; +import { + SelectFilter, + type SelectFilterOption, +} from "components/Filter/SelectFilter"; import { StatusIndicatorDot } from "components/StatusIndicator/StatusIndicator"; import type { FC } from "react"; import { docs } from "utils/docs"; diff --git a/site/src/pages/UsersPage/UsersPage.stories.tsx b/site/src/pages/UsersPage/UsersPage.stories.tsx index fdede4ec9f163..3802a1968ef42 100644 --- a/site/src/pages/UsersPage/UsersPage.stories.tsx +++ b/site/src/pages/UsersPage/UsersPage.stories.tsx @@ -1,5 +1,10 @@ -import type { Meta, StoryObj } from "@storybook/react"; -import { screen, spyOn, userEvent, within } from "@storybook/test"; +import { MockAuthMethodsAll, MockUserOwner } from "testHelpers/entities"; +import { + withAuthProvider, + withDashboardProvider, + withGlobalSnackbar, +} from "testHelpers/storybook"; +import type { Meta, StoryObj } from "@storybook/react-vite"; import { API } from "api/api"; import { deploymentConfigQueryKey } from "api/queries/deployment"; import { groupsQueryKey } from "api/queries/groups"; @@ -9,12 +14,7 @@ import type { User } from "api/typesGenerated"; import { MockGroups } from "pages/UsersPage/storybookData/groups"; import { MockRoles } from "pages/UsersPage/storybookData/roles"; import { MockUsers } from "pages/UsersPage/storybookData/users"; -import { MockAuthMethodsAll, MockUserOwner } from "testHelpers/entities"; -import { - withAuthProvider, - withDashboardProvider, - withGlobalSnackbar, -} from "testHelpers/storybook"; +import { screen, spyOn, userEvent, within } from "storybook/test"; import UsersPage from "./UsersPage"; const parameters = { diff --git a/site/src/pages/UsersPage/UsersPage.tsx b/site/src/pages/UsersPage/UsersPage.tsx index 581a9166bce3d..aaa67e060d50b 100644 --- a/site/src/pages/UsersPage/UsersPage.tsx +++ b/site/src/pages/UsersPage/UsersPage.tsx @@ -23,7 +23,7 @@ import { useDashboard } from "modules/dashboard/useDashboard"; import { type FC, useState } from "react"; import { Helmet } from "react-helmet-async"; import { useMutation, useQuery, useQueryClient } from "react-query"; -import { useNavigate, useSearchParams } from "react-router-dom"; +import { useNavigate, useSearchParams } from "react-router"; import { pageTitle } from "utils/page"; import { generateRandomString } from "utils/random"; import { ResetPasswordDialog } from "./ResetPasswordDialog"; @@ -39,9 +39,8 @@ type UserPageProps = { const UsersPage: FC = ({ defaultNewPassword }) => { const queryClient = useQueryClient(); const navigate = useNavigate(); - const searchParamsResult = useSearchParams(); + const [searchParams, setSearchParams] = useSearchParams(); const { entitlements } = useDashboard(); - const [searchParams] = searchParamsResult; const groupsByUserIdQuery = useQuery(groupsByUserId()); const authMethodsQuery = useQuery(authMethods()); @@ -58,9 +57,10 @@ const UsersPage: FC = ({ defaultNewPassword }) => { enabled: viewDeploymentConfig, }); - const usersQuery = usePaginatedQuery(paginatedUsers(searchParamsResult[0])); + const usersQuery = usePaginatedQuery(paginatedUsers(searchParams)); const useFilterResult = useFilter({ - searchParamsResult, + searchParams, + onSearchParamsChange: setSearchParams, onUpdate: usersQuery.goToFirstPage, }); diff --git a/site/src/pages/UsersPage/UsersPageView.stories.tsx b/site/src/pages/UsersPage/UsersPageView.stories.tsx index c15b8aefc1b23..fee8aa6c879f1 100644 --- a/site/src/pages/UsersPage/UsersPageView.stories.tsx +++ b/site/src/pages/UsersPage/UsersPageView.stories.tsx @@ -1,11 +1,3 @@ -import type { Meta, StoryObj } from "@storybook/react"; -import { - MockMenu, - getDefaultFilterProps, -} from "components/Filter/storyHelpers"; -import { mockSuccessResult } from "components/PaginationWidget/PaginationContainer.mocks"; -import type { UsePaginatedQueryResult } from "hooks/usePaginatedQuery"; -import type { ComponentProps } from "react"; import { MockAssignableSiteRoles, MockAuthMethodsPasswordOnly, @@ -13,6 +5,14 @@ import { MockUserOwner, mockApiError, } from "testHelpers/entities"; +import type { Meta, StoryObj } from "@storybook/react-vite"; +import { + getDefaultFilterProps, + MockMenu, +} from "components/Filter/storyHelpers"; +import { mockSuccessResult } from "components/PaginationWidget/PaginationContainer.mocks"; +import type { UsePaginatedQueryResult } from "hooks/usePaginatedQuery"; +import type { ComponentProps } from "react"; import { UsersPageView } from "./UsersPageView"; type FilterProps = ComponentProps["filterProps"]; diff --git a/site/src/pages/UsersPage/UsersPageView.tsx b/site/src/pages/UsersPage/UsersPageView.tsx index 7f385ad2ee970..e97eb36714aab 100644 --- a/site/src/pages/UsersPage/UsersPageView.tsx +++ b/site/src/pages/UsersPage/UsersPageView.tsx @@ -12,7 +12,7 @@ import { } from "components/SettingsHeader/SettingsHeader"; import { UserPlusIcon } from "lucide-react"; import type { ComponentProps, FC } from "react"; -import { Link as RouterLink } from "react-router-dom"; +import { Link as RouterLink } from "react-router"; import { UsersFilter } from "./UsersFilter"; import { UsersTable } from "./UsersTable/UsersTable"; diff --git a/site/src/pages/UsersPage/UsersTable/UserGroupsCell.tsx b/site/src/pages/UsersPage/UsersTable/UserGroupsCell.tsx index c7c4586c0ec51..1b4a44a4542c9 100644 --- a/site/src/pages/UsersPage/UsersTable/UserGroupsCell.tsx +++ b/site/src/pages/UsersPage/UsersTable/UserGroupsCell.tsx @@ -4,13 +4,13 @@ import List from "@mui/material/List"; import ListItem from "@mui/material/ListItem"; import type { Group } from "api/typesGenerated"; import { Avatar } from "components/Avatar/Avatar"; -import { OverflowY } from "components/OverflowY/OverflowY"; -import { TableCell } from "components/Table/Table"; import { Popover, PopoverContent, PopoverTrigger, } from "components/deprecated/Popover/Popover"; +import { OverflowY } from "components/OverflowY/OverflowY"; +import { TableCell } from "components/Table/Table"; import type { FC } from "react"; type GroupsCellProps = { diff --git a/site/src/pages/UsersPage/UsersTable/UsersTable.stories.tsx b/site/src/pages/UsersPage/UsersTable/UsersTable.stories.tsx index 5ef7116025919..aceee691b2f42 100644 --- a/site/src/pages/UsersPage/UsersTable/UsersTable.stories.tsx +++ b/site/src/pages/UsersPage/UsersTable/UsersTable.stories.tsx @@ -1,4 +1,3 @@ -import type { Meta, StoryObj } from "@storybook/react"; import { MockAssignableSiteRoles, MockAuditorRole, @@ -10,6 +9,7 @@ import { MockUserMember, MockUserOwner, } from "testHelpers/entities"; +import type { Meta, StoryObj } from "@storybook/react-vite"; import { UsersTable } from "./UsersTable"; const mockGroupsByUserId = new Map([ diff --git a/site/src/pages/UsersPage/UsersTable/UsersTableBody.tsx b/site/src/pages/UsersPage/UsersTable/UsersTableBody.tsx index 894a75daef78a..408ea411a84f9 100644 --- a/site/src/pages/UsersPage/UsersTable/UsersTableBody.tsx +++ b/site/src/pages/UsersPage/UsersTable/UsersTableBody.tsx @@ -28,8 +28,7 @@ import { } from "components/TableLoader/TableLoader"; import dayjs from "dayjs"; import relativeTime from "dayjs/plugin/relativeTime"; -import { TrashIcon } from "lucide-react"; -import { EllipsisVertical } from "lucide-react"; +import { EllipsisVertical, TrashIcon } from "lucide-react"; import type { FC } from "react"; import { UserRoleCell } from "../../OrganizationSettingsPage/UserTable/UserRoleCell"; import { UserGroupsCell } from "./UserGroupsCell"; diff --git a/site/src/pages/WorkspaceBuildPage/WorkspaceBuildPage.test.tsx b/site/src/pages/WorkspaceBuildPage/WorkspaceBuildPage.test.tsx index f1f1edb54a5f7..427405ae5a0a1 100644 --- a/site/src/pages/WorkspaceBuildPage/WorkspaceBuildPage.test.tsx +++ b/site/src/pages/WorkspaceBuildPage/WorkspaceBuildPage.test.tsx @@ -1,6 +1,3 @@ -import { screen, waitFor } from "@testing-library/react"; -import { API } from "api/api"; -import WS from "jest-websocket-mock"; import { MockWorkspace, MockWorkspaceAgent, @@ -8,6 +5,9 @@ import { MockWorkspaceBuild, } from "testHelpers/entities"; import { renderWithAuth } from "testHelpers/renderHelpers"; +import { screen, waitFor } from "@testing-library/react"; +import { API } from "api/api"; +import WS from "jest-websocket-mock"; import WorkspaceBuildPage from "./WorkspaceBuildPage"; import { LOGS_TAB_KEY } from "./WorkspaceBuildPageView"; diff --git a/site/src/pages/WorkspaceBuildPage/WorkspaceBuildPage.tsx b/site/src/pages/WorkspaceBuildPage/WorkspaceBuildPage.tsx index 78ff4b69f98c8..551989efee8c7 100644 --- a/site/src/pages/WorkspaceBuildPage/WorkspaceBuildPage.tsx +++ b/site/src/pages/WorkspaceBuildPage/WorkspaceBuildPage.tsx @@ -5,7 +5,7 @@ import { useWorkspaceBuildLogs } from "hooks/useWorkspaceBuildLogs"; import type { FC } from "react"; import { Helmet } from "react-helmet-async"; import { keepPreviousData, useQuery } from "react-query"; -import { useParams } from "react-router-dom"; +import { useParams } from "react-router"; import { pageTitle } from "utils/page"; import { WorkspaceBuildPageView } from "./WorkspaceBuildPageView"; diff --git a/site/src/pages/WorkspaceBuildPage/WorkspaceBuildPageView.stories.tsx b/site/src/pages/WorkspaceBuildPage/WorkspaceBuildPageView.stories.tsx index 2e61f0d24bd55..a026f9a5b391f 100644 --- a/site/src/pages/WorkspaceBuildPage/WorkspaceBuildPageView.stories.tsx +++ b/site/src/pages/WorkspaceBuildPage/WorkspaceBuildPageView.stories.tsx @@ -1,10 +1,10 @@ -import type { Meta, StoryObj } from "@storybook/react"; import { chromatic } from "testHelpers/chromatic"; import { MockFailedWorkspaceBuild, MockWorkspaceBuild, MockWorkspaceBuildLogs, } from "testHelpers/entities"; +import type { Meta, StoryObj } from "@storybook/react-vite"; import { WorkspaceBuildPageView } from "./WorkspaceBuildPageView"; const defaultBuilds = Array.from({ length: 15 }, (_, i) => ({ diff --git a/site/src/pages/WorkspaceBuildPage/WorkspaceBuildPageView.tsx b/site/src/pages/WorkspaceBuildPage/WorkspaceBuildPageView.tsx index 6add701c8b688..75849e0790f67 100644 --- a/site/src/pages/WorkspaceBuildPage/WorkspaceBuildPageView.tsx +++ b/site/src/pages/WorkspaceBuildPage/WorkspaceBuildPageView.tsx @@ -35,7 +35,7 @@ import { useRef, useState, } from "react"; -import { Link } from "react-router-dom"; +import { Link } from "react-router"; import { displayWorkspaceBuildDuration } from "utils/workspace"; import { Sidebar, SidebarCaption, SidebarItem } from "./Sidebar"; @@ -212,7 +212,24 @@ export const WorkspaceBuildPageView: FC = ({ )} - {tabState.value === "build" && } + {build?.job?.logs_overflowed && ( + + Provisioner logs exceeded the max size of 1MB. Will not continue + to write provisioner logs for workspace build. + + )} + + {tabState.value === "build" && ( + + )} {tabState.value !== "build" && selectedAgent && ( )} @@ -261,7 +278,10 @@ const ScrollArea: FC> = (props) => { ); }; -const BuildLogsContent: FC<{ logs?: ProvisionerJobLog[] }> = ({ logs }) => { +const BuildLogsContent: FC<{ + logs?: ProvisionerJobLog[]; + build?: WorkspaceBuild; +}> = ({ logs, build }) => { if (!logs) { return ; } @@ -278,6 +298,7 @@ const BuildLogsContent: FC<{ logs?: ProvisionerJobLog[] }> = ({ logs }) => { }, }} logs={sortLogsByCreatedAt(logs)} + build={build} /> ); }; diff --git a/site/src/pages/WorkspacePage/AppStatuses.stories.tsx b/site/src/pages/WorkspacePage/AppStatuses.stories.tsx index c7ec5eb56f417..2e8324aef2e0b 100644 --- a/site/src/pages/WorkspacePage/AppStatuses.stories.tsx +++ b/site/src/pages/WorkspacePage/AppStatuses.stories.tsx @@ -1,15 +1,15 @@ -import type { Meta, StoryObj } from "@storybook/react"; -import { userEvent, within } from "@storybook/test"; -import type { WorkspaceAppStatus } from "api/typesGenerated"; import { + createTimestamp, MockWorkspace, MockWorkspaceAgent, MockWorkspaceApp, MockWorkspaceAppStatus, MockWorkspaceAppStatuses, - createTimestamp, } from "testHelpers/entities"; import { withProxyProvider } from "testHelpers/storybook"; +import type { Meta, StoryObj } from "@storybook/react-vite"; +import type { WorkspaceAppStatus } from "api/typesGenerated"; +import { userEvent, within } from "storybook/test"; import { AppStatuses } from "./AppStatuses"; const meta: Meta = { diff --git a/site/src/pages/WorkspacePage/AppStatuses.tsx b/site/src/pages/WorkspacePage/AppStatuses.tsx index 71547992ecd9e..26f239b627101 100644 --- a/site/src/pages/WorkspacePage/AppStatuses.tsx +++ b/site/src/pages/WorkspacePage/AppStatuses.tsx @@ -6,6 +6,7 @@ import type { } from "api/typesGenerated"; import { Button } from "components/Button/Button"; import { ExternalImage } from "components/ExternalImage/ExternalImage"; +import { ScrollArea } from "components/ScrollArea/ScrollArea"; import { Tooltip, TooltipContent, @@ -13,9 +14,6 @@ import { TooltipTrigger, } from "components/Tooltip/Tooltip"; import capitalize from "lodash/capitalize"; -import { timeFrom } from "utils/time"; - -import { ScrollArea } from "components/ScrollArea/ScrollArea"; import { ChevronDownIcon, ChevronUpIcon, @@ -27,7 +25,8 @@ import { import { AppStatusStateIcon } from "modules/apps/AppStatusStateIcon"; import { useAppLink } from "modules/apps/useAppLink"; import { type FC, useState } from "react"; -import { Link as RouterLink } from "react-router-dom"; +import { Link as RouterLink } from "react-router"; +import { timeFrom } from "utils/time"; import { truncateURI } from "utils/uri"; interface AppStatusesProps { diff --git a/site/src/pages/WorkspacePage/HistorySidebar.tsx b/site/src/pages/WorkspacePage/HistorySidebar.tsx index 2d978fb2a7d83..037aeec4b8b87 100644 --- a/site/src/pages/WorkspacePage/HistorySidebar.tsx +++ b/site/src/pages/WorkspacePage/HistorySidebar.tsx @@ -8,6 +8,7 @@ import { SidebarItem, SidebarLink, } from "components/FullPageLayout/Sidebar"; +import { ScrollArea } from "components/ScrollArea/ScrollArea"; import { Spinner } from "components/Spinner/Spinner"; import { WorkspaceBuildData, @@ -30,36 +31,40 @@ export const HistorySidebar: FC = ({ workspace }) => { return ( History - {builds - ? builds.map((build) => ( - - - - )) - : Array.from({ length: 15 }, (_, i) => ( - - - - ))} - {buildsQuery.hasNextPage && ( -
      - + +
      + {builds + ? builds.map((build) => ( + + + + )) + : Array.from({ length: 15 }, (_, i) => ( + + + + ))} + {buildsQuery.hasNextPage && ( +
      + +
      + )}
      - )} +
      ); }; diff --git a/site/src/pages/WorkspacePage/ResourceMetadata.stories.tsx b/site/src/pages/WorkspacePage/ResourceMetadata.stories.tsx index dde544134ce9e..00bc197ed1559 100644 --- a/site/src/pages/WorkspacePage/ResourceMetadata.stories.tsx +++ b/site/src/pages/WorkspacePage/ResourceMetadata.stories.tsx @@ -1,5 +1,5 @@ -import type { Meta, StoryObj } from "@storybook/react"; import { MockWorkspaceResource } from "testHelpers/entities"; +import type { Meta, StoryObj } from "@storybook/react-vite"; import { ResourceMetadata } from "./ResourceMetadata"; const meta: Meta = { diff --git a/site/src/pages/WorkspacePage/Workspace.stories.tsx b/site/src/pages/WorkspacePage/Workspace.stories.tsx index 4fb197e6b5146..5a49e0fa57091 100644 --- a/site/src/pages/WorkspacePage/Workspace.stories.tsx +++ b/site/src/pages/WorkspacePage/Workspace.stories.tsx @@ -1,14 +1,15 @@ -import { action } from "@storybook/addon-actions"; -import type { Meta, StoryObj } from "@storybook/react"; -import type { ProvisionerJobLog } from "api/typesGenerated"; import * as Mocks from "testHelpers/entities"; import { withAuthProvider, withDashboardProvider, withProxyProvider, } from "testHelpers/storybook"; +import type { Meta, StoryObj } from "@storybook/react-vite"; +import type { ProvisionerJobLog } from "api/typesGenerated"; +import { action } from "storybook/actions"; import type { WorkspacePermissions } from "../../modules/workspaces/permissions"; import { Workspace } from "./Workspace"; +import { defaultPermissions } from "./WorkspaceNotifications/WorkspaceNotifications.stories"; // Helper function to create timestamps easily - Copied from AppStatuses.stories.tsx const createTimestamp = ( @@ -349,6 +350,23 @@ export const Stopping: Story = { }, }; +export const Unhealthy: Story = { + args: { + ...Running.args, + workspace: Mocks.MockUnhealthyWorkspace, + }, +}; + +export const UnhealthyWithoutUpdatePermission: Story = { + args: { + ...Unhealthy.args, + permissions: { + ...defaultPermissions, + updateWorkspace: false, + }, + }, +}; + export const FailedWithLogs: Story = { args: { ...Running.args, diff --git a/site/src/pages/WorkspacePage/Workspace.tsx b/site/src/pages/WorkspacePage/Workspace.tsx index 5c032c04efbdf..b1eda1618038b 100644 --- a/site/src/pages/WorkspacePage/Workspace.tsx +++ b/site/src/pages/WorkspacePage/Workspace.tsx @@ -1,5 +1,3 @@ -import type { Interpolation, Theme } from "@emotion/react"; -import { useTheme } from "@emotion/react"; import HistoryOutlined from "@mui/icons-material/HistoryOutlined"; import HubOutlined from "@mui/icons-material/HubOutlined"; import AlertTitle from "@mui/material/AlertTitle"; @@ -11,19 +9,21 @@ import { ProvisionerStatusAlert } from "modules/provisioners/ProvisionerStatusAl import { AgentRow } from "modules/resources/AgentRow"; import { WorkspaceTimings } from "modules/workspaces/WorkspaceTiming/WorkspaceTimings"; import type { FC } from "react"; -import { useNavigate } from "react-router-dom"; +import { useNavigate } from "react-router"; import type { WorkspacePermissions } from "../../modules/workspaces/permissions"; import { HistorySidebar } from "./HistorySidebar"; import { ResourceMetadata } from "./ResourceMetadata"; import { ResourcesSidebar } from "./ResourcesSidebar"; +import { resourceOptionValue, useResourcesNav } from "./useResourcesNav"; import { WorkspaceBuildLogsSection } from "./WorkspaceBuildLogsSection"; import { - ActiveTransition, + getActiveTransitionStats, WorkspaceBuildProgress, } from "./WorkspaceBuildProgress"; import { WorkspaceDeletedBanner } from "./WorkspaceDeletedBanner"; +import { NotificationActionButton } from "./WorkspaceNotifications/Notifications"; +import { findTroubleshootingURL } from "./WorkspaceNotifications/WorkspaceNotifications"; import { WorkspaceTopbar } from "./WorkspaceTopbar"; -import { resourceOptionValue, useResourcesNav } from "./useResourcesNav"; interface WorkspaceProps { workspace: TypesGen.Workspace; @@ -68,10 +68,11 @@ export const Workspace: FC = ({ handleDebug, }) => { const navigate = useNavigate(); - const theme = useTheme(); const transitionStats = - template !== undefined ? ActiveTransition(template, workspace) : undefined; + template !== undefined + ? getActiveTransitionStats(template, workspace) + : undefined; const sidebarOption = useSearchParamsKey({ key: "sidebar" }); const setSidebarOption = (newOption: string) => { @@ -98,20 +99,11 @@ export const Workspace: FC = ({ (workspace.latest_build.matched_provisioners?.available ?? 1) > 0; const shouldShowProvisionerAlert = workspacePending && !haveBuildLogs && !provisionersHealthy && !isRestarting; + const troubleshootingURL = findTroubleshootingURL(workspace.latest_build); + const hasActions = permissions.updateWorkspace || troubleshootingURL; return ( -
      +
      = ({ handleToggleFavorite={handleToggleFavorite} /> -
      - { - setSidebarOption("resources"); - }} - > - - - { - setSidebarOption("history"); - }} - > - - -
      +
      +
      +
      + { + setSidebarOption("resources"); + }} + > + + + { + setSidebarOption("history"); + }} + > + + +
      - {sidebarOption.value === "resources" && ( - - )} - {sidebarOption.value === "history" && ( - - )} + {sidebarOption.value === "resources" && ( + + )} + {sidebarOption.value === "history" && ( + + )} +
      -
      - {selectedResource && ( - - )}
      - {workspace.latest_build.status === "deleted" && ( - navigate("/templates")} + {selectedResource && ( + )} +
      + {workspace.latest_build.status === "deleted" && ( + navigate("/templates")} + /> + )} - {shouldShowProvisionerAlert && ( - - )} + {shouldShowProvisionerAlert && ( + + )} - {workspace.latest_build.job.error && ( - - Workspace build failed - {workspace.latest_build.job.error} - - )} + {workspace.latest_build.job.error && ( + + Workspace build failed + {workspace.latest_build.job.error} + + )} - {transitionStats !== undefined && ( - - )} + {!workspace.health.healthy && ( + + Workspace is unhealthy + +

      + Your workspace is running but{" "} + {workspace.health.failing_agents.length > 1 + ? `${workspace.health.failing_agents.length} agents are unhealthy` + : "1 agent is unhealthy"} + . +

      + {hasActions && ( +
      + {permissions.updateWorkspace && ( + handleRestart()} + > + Restart + + )} + {troubleshootingURL && ( + + window.open(troubleshootingURL, "_blank") + } + > + Troubleshooting + + )} +
      + )} +
      +
      + )} - {shouldShowBuildLogs && ( - - )} + {transitionStats !== undefined && ( + + )} - {selectedResource && ( -
      - {selectedResource.agents - // If an agent has a `parent_id`, that means it is - // child of another agent. We do not want these agents - // to be displayed at the top-level on this page. We - // want them to display _as children_ of their parents. - ?.filter((agent) => agent.parent_id === null) - .map((agent) => ( - a.parent_id === agent.id, - )} - workspace={workspace} - template={template} - onUpdateAgent={handleUpdate} // On updating the workspace the agent version is also updated - /> - ))} + {shouldShowBuildLogs && ( + + )} + + {selectedResource && ( +
      + {selectedResource.agents + // If an agent has a `parent_id`, that means it is + // child of another agent. We do not want these agents + // to be displayed at the top-level on this page. We + // want them to display _as children_ of their parents. + ?.filter((agent) => agent.parent_id === null) + .map((agent) => ( + a.parent_id === agent.id, + )} + workspace={workspace} + template={template} + onUpdateAgent={handleUpdate} // On updating the workspace the agent version is also updated + /> + ))} - {(!selectedResource.agents || - selectedResource.agents?.length === 0) && ( -
      -
      -

      - No agents are currently assigned to this resource. -

      + {(!selectedResource.agents || + selectedResource.agents?.length === 0) && ( +
      +
      +

      + No agents are currently assigned to this resource. +

      +
      -
      - )} -
      - )} + )} +
      + )} - + +
      @@ -286,33 +293,3 @@ export const Workspace: FC = ({ const countAgents = (resource: TypesGen.WorkspaceResource) => { return resource.agents ? resource.agents.length : 0; }; - -const styles = { - content: { - padding: 32, - gridArea: "content", - overflowY: "auto", - position: "relative", - }, - - dotsBackground: (theme) => ({ - "--d": "1px", - background: ` - radial-gradient( - circle at - var(--d) - var(--d), - - ${theme.palette.dots} calc(var(--d) - 1px), - ${theme.palette.background.default} var(--d) - ) - -2px -2px / 16px 16px - `, - }), - - actions: (theme) => ({ - [theme.breakpoints.down("md")]: { - flexDirection: "column", - }, - }), -} satisfies Record>; diff --git a/site/src/pages/WorkspacePage/WorkspaceActions/BuildParametersPopover.tsx b/site/src/pages/WorkspacePage/WorkspaceActions/BuildParametersPopover.tsx index f084c4c200b67..7aef1dc7c7357 100644 --- a/site/src/pages/WorkspacePage/WorkspaceActions/BuildParametersPopover.tsx +++ b/site/src/pages/WorkspacePage/WorkspaceActions/BuildParametersPopover.tsx @@ -7,6 +7,12 @@ import type { WorkspaceBuildParameter, } from "api/typesGenerated"; import { Button } from "components/Button/Button"; +import { + Popover, + PopoverContent, + PopoverTrigger, + usePopover, +} from "components/deprecated/Popover/Popover"; import { FormFields } from "components/Form/Form"; import { TopbarButton } from "components/FullPageLayout/Topbar"; import { @@ -17,17 +23,11 @@ import { } from "components/HelpTooltip/HelpTooltip"; import { Loader } from "components/Loader/Loader"; import { RichParameterInput } from "components/RichParameterInput/RichParameterInput"; -import { - Popover, - PopoverContent, - PopoverTrigger, - usePopover, -} from "components/deprecated/Popover/Popover"; import { useFormik } from "formik"; import { ChevronDownIcon } from "lucide-react"; import type { FC } from "react"; import { useQuery } from "react-query"; -import { useNavigate } from "react-router-dom"; +import { useNavigate } from "react-router"; import { docs } from "utils/docs"; import { getFormHelpers } from "utils/formUtils"; import { diff --git a/site/src/pages/WorkspacePage/WorkspaceActions/DebugButton.stories.tsx b/site/src/pages/WorkspacePage/WorkspaceActions/DebugButton.stories.tsx index 4e947cf27e7bd..e1e4fb4851eb0 100644 --- a/site/src/pages/WorkspacePage/WorkspaceActions/DebugButton.stories.tsx +++ b/site/src/pages/WorkspacePage/WorkspaceActions/DebugButton.stories.tsx @@ -1,6 +1,6 @@ -import type { Meta, StoryObj } from "@storybook/react"; -import { expect, userEvent, waitFor, within } from "@storybook/test"; import { MockWorkspace } from "testHelpers/entities"; +import type { Meta, StoryObj } from "@storybook/react-vite"; +import { expect, userEvent, waitFor, within } from "storybook/test"; import { DebugButton } from "./DebugButton"; const meta: Meta = { diff --git a/site/src/pages/WorkspacePage/WorkspaceActions/RetryButton.stories.tsx b/site/src/pages/WorkspacePage/WorkspaceActions/RetryButton.stories.tsx index 2603cf7206a61..12ff75dc64616 100644 --- a/site/src/pages/WorkspacePage/WorkspaceActions/RetryButton.stories.tsx +++ b/site/src/pages/WorkspacePage/WorkspaceActions/RetryButton.stories.tsx @@ -1,6 +1,6 @@ -import type { Meta, StoryObj } from "@storybook/react"; -import { expect, userEvent, waitFor, within } from "@storybook/test"; import { MockWorkspace } from "testHelpers/entities"; +import type { Meta, StoryObj } from "@storybook/react-vite"; +import { expect, userEvent, waitFor, within } from "storybook/test"; import { RetryButton } from "./RetryButton"; const meta: Meta = { diff --git a/site/src/pages/WorkspacePage/WorkspaceActions/WorkspaceActions.stories.tsx b/site/src/pages/WorkspacePage/WorkspaceActions/WorkspaceActions.stories.tsx index 19dde8871045f..3d1d1dcff0c00 100644 --- a/site/src/pages/WorkspacePage/WorkspaceActions/WorkspaceActions.stories.tsx +++ b/site/src/pages/WorkspacePage/WorkspaceActions/WorkspaceActions.stories.tsx @@ -1,13 +1,13 @@ -import type { Meta, StoryObj } from "@storybook/react"; -import { expect, userEvent, within } from "@storybook/test"; -import { deploymentConfigQueryKey } from "api/queries/deployment"; -import { agentLogsKey, buildLogsKey } from "api/queries/workspaces"; import * as Mocks from "testHelpers/entities"; import { withAuthProvider, withDashboardProvider, withDesktopViewport, } from "testHelpers/storybook"; +import type { Meta, StoryObj } from "@storybook/react-vite"; +import { deploymentConfigQueryKey } from "api/queries/deployment"; +import { agentLogsKey, buildLogsKey } from "api/queries/workspaces"; +import { expect, userEvent, within } from "storybook/test"; import { WorkspaceActions } from "./WorkspaceActions"; const meta: Meta = { diff --git a/site/src/pages/WorkspacePage/WorkspaceActions/WorkspaceActions.tsx b/site/src/pages/WorkspacePage/WorkspaceActions/WorkspaceActions.tsx index 1c38caa14ec21..f46589a0a67fb 100644 --- a/site/src/pages/WorkspacePage/WorkspaceActions/WorkspaceActions.tsx +++ b/site/src/pages/WorkspacePage/WorkspaceActions/WorkspaceActions.tsx @@ -1,12 +1,12 @@ import { deploymentConfig } from "api/queries/deployment"; import type { Workspace, WorkspaceBuildParameter } from "api/typesGenerated"; import { useAuthenticated } from "hooks/useAuthenticated"; -import { WorkspaceMoreActions } from "modules/workspaces/WorkspaceMoreActions/WorkspaceMoreActions"; import { type ActionType, abilitiesByWorkspaceStatus, } from "modules/workspaces/actions"; import type { WorkspacePermissions } from "modules/workspaces/permissions"; +import { WorkspaceMoreActions } from "modules/workspaces/WorkspaceMoreActions/WorkspaceMoreActions"; import { type FC, Fragment, type ReactNode } from "react"; import { useQuery } from "react-query"; import { mustUpdateWorkspace } from "utils/workspace"; diff --git a/site/src/pages/WorkspacePage/WorkspaceBuildProgress.stories.tsx b/site/src/pages/WorkspacePage/WorkspaceBuildProgress.stories.tsx index 433f16ddd9fd3..d7a6526bfbdff 100644 --- a/site/src/pages/WorkspacePage/WorkspaceBuildProgress.stories.tsx +++ b/site/src/pages/WorkspacePage/WorkspaceBuildProgress.stories.tsx @@ -1,10 +1,10 @@ -import type { Meta, StoryObj } from "@storybook/react"; -import dayjs from "dayjs"; import { MockProvisionerJob, MockStartingWorkspace, MockWorkspaceBuild, } from "testHelpers/entities"; +import type { Meta, StoryObj } from "@storybook/react-vite"; +import dayjs from "dayjs"; import { WorkspaceBuildProgress } from "./WorkspaceBuildProgress"; const meta: Meta = { diff --git a/site/src/pages/WorkspacePage/WorkspaceBuildProgress.tsx b/site/src/pages/WorkspacePage/WorkspaceBuildProgress.tsx index 306da719be0ca..32061f2a6590e 100644 --- a/site/src/pages/WorkspacePage/WorkspaceBuildProgress.tsx +++ b/site/src/pages/WorkspacePage/WorkspaceBuildProgress.tsx @@ -9,9 +9,9 @@ import { type FC, useEffect, useState } from "react"; dayjs.extend(duration); -// ActiveTransition gets the build estimate for the workspace, +// getActiveTransitionStats gets the build estimate for the workspace, // if it is in a transition state. -export const ActiveTransition = ( +export const getActiveTransitionStats = ( template: Template, workspace: Workspace, ): TransitionStats | undefined => { @@ -117,7 +117,7 @@ export const WorkspaceBuildProgress: FC = ({ // HACK: the codersdk type generator doesn't support null values, but this // can be null when the template is new. if ((transitionStats.P50 as number | null) === null) { - return <>; + return null; } return (
      diff --git a/site/src/pages/WorkspacePage/WorkspaceDeletedBanner.stories.tsx b/site/src/pages/WorkspacePage/WorkspaceDeletedBanner.stories.tsx index 76eff3d6b5c77..428ff85f6a0b6 100644 --- a/site/src/pages/WorkspacePage/WorkspaceDeletedBanner.stories.tsx +++ b/site/src/pages/WorkspacePage/WorkspaceDeletedBanner.stories.tsx @@ -1,4 +1,4 @@ -import type { Meta, StoryObj } from "@storybook/react"; +import type { Meta, StoryObj } from "@storybook/react-vite"; import { WorkspaceDeletedBanner } from "./WorkspaceDeletedBanner"; const meta: Meta = { diff --git a/site/src/pages/WorkspacePage/WorkspaceNotifications/Notifications.tsx b/site/src/pages/WorkspacePage/WorkspaceNotifications/Notifications.tsx index ad63ced0952cf..bc72396932e77 100644 --- a/site/src/pages/WorkspacePage/WorkspaceNotifications/Notifications.tsx +++ b/site/src/pages/WorkspacePage/WorkspaceNotifications/Notifications.tsx @@ -1,13 +1,13 @@ import { type Interpolation, type Theme, useTheme } from "@emotion/react"; import type { AlertProps } from "components/Alert/Alert"; import { Button, type ButtonProps } from "components/Button/Button"; -import { Pill } from "components/Pill/Pill"; import { Popover, PopoverContent, PopoverTrigger, usePopover, } from "components/deprecated/Popover/Popover"; +import { Pill } from "components/Pill/Pill"; import type { FC, ReactNode } from "react"; import type { ThemeRole } from "theme/roles"; diff --git a/site/src/pages/WorkspacePage/WorkspaceNotifications/WorkspaceNotifications.stories.tsx b/site/src/pages/WorkspacePage/WorkspaceNotifications/WorkspaceNotifications.stories.tsx index a35771971b329..bcff8c53cca59 100644 --- a/site/src/pages/WorkspacePage/WorkspaceNotifications/WorkspaceNotifications.stories.tsx +++ b/site/src/pages/WorkspacePage/WorkspaceNotifications/WorkspaceNotifications.stories.tsx @@ -1,7 +1,3 @@ -import type { Meta, StoryObj } from "@storybook/react"; -import { expect, userEvent, waitFor, within } from "@storybook/test"; -import { getWorkspaceResolveAutostartQueryKey } from "api/queries/workspaceQuota"; -import type { WorkspacePermissions } from "modules/workspaces/permissions"; import { MockOutdatedWorkspace, MockTemplate, @@ -10,9 +6,13 @@ import { MockWorkspace, } from "testHelpers/entities"; import { withDashboardProvider } from "testHelpers/storybook"; +import type { Meta, StoryObj } from "@storybook/react-vite"; +import { getWorkspaceResolveAutostartQueryKey } from "api/queries/workspaceQuota"; +import type { WorkspacePermissions } from "modules/workspaces/permissions"; +import { expect, userEvent, waitFor, within } from "storybook/test"; import { WorkspaceNotifications } from "./WorkspaceNotifications"; -const defaultPermissions: WorkspacePermissions = { +export const defaultPermissions: WorkspacePermissions = { readWorkspace: true, updateWorkspaceVersion: true, updateWorkspace: true, diff --git a/site/src/pages/WorkspacePage/WorkspaceNotifications/WorkspaceNotifications.tsx b/site/src/pages/WorkspacePage/WorkspaceNotifications/WorkspaceNotifications.tsx index c9bf60fbecaaa..f0e91ad7b400d 100644 --- a/site/src/pages/WorkspacePage/WorkspaceNotifications/WorkspaceNotifications.tsx +++ b/site/src/pages/WorkspacePage/WorkspaceNotifications/WorkspaceNotifications.tsx @@ -9,13 +9,13 @@ import type { import { MemoizedInlineMarkdown } from "components/Markdown/Markdown"; import dayjs from "dayjs"; import relativeTime from "dayjs/plugin/relativeTime"; -import { TriangleAlertIcon } from "lucide-react"; -import { InfoIcon } from "lucide-react"; +import { InfoIcon, TriangleAlertIcon } from "lucide-react"; import { useDashboard } from "modules/dashboard/useDashboard"; import { TemplateUpdateMessage } from "modules/templates/TemplateUpdateMessage"; import { type FC, useEffect, useState } from "react"; dayjs.extend(relativeTime); + import { useQuery } from "react-query"; import type { WorkspacePermissions } from "../../../modules/workspaces/permissions"; import { @@ -275,7 +275,7 @@ const styles = { }, } satisfies Record>; -const findTroubleshootingURL = ( +export const findTroubleshootingURL = ( workspaceBuild: WorkspaceBuild, ): string | undefined => { for (const resource of workspaceBuild.resources) { diff --git a/site/src/pages/WorkspacePage/WorkspacePage.test.tsx b/site/src/pages/WorkspacePage/WorkspacePage.test.tsx index 645c03380501a..b089a420bcb33 100644 --- a/site/src/pages/WorkspacePage/WorkspacePage.test.tsx +++ b/site/src/pages/WorkspacePage/WorkspacePage.test.tsx @@ -1,16 +1,3 @@ -import { screen, waitFor, within } from "@testing-library/react"; -import userEvent from "@testing-library/user-event"; -import * as apiModule from "api/api"; -import type { TemplateVersionParameter, Workspace } from "api/typesGenerated"; -import MockServerSocket from "jest-websocket-mock"; -import { - DashboardContext, - type DashboardProvider, -} from "modules/dashboard/DashboardProvider"; -import type { WorkspacePermissions } from "modules/workspaces/permissions"; -import { http, HttpResponse } from "msw"; -import type { FC } from "react"; -import { type Location, useLocation } from "react-router-dom"; import { MockAppearanceConfig, MockBuildInfo, @@ -35,6 +22,18 @@ import { renderWithAuth, } from "testHelpers/renderHelpers"; import { server } from "testHelpers/server"; +import { screen, waitFor, within } from "@testing-library/react"; +import userEvent from "@testing-library/user-event"; +import * as apiModule from "api/api"; +import type { TemplateVersionParameter, Workspace } from "api/typesGenerated"; +import MockServerSocket from "jest-websocket-mock"; +import { + DashboardContext, + type DashboardProvider, +} from "modules/dashboard/DashboardProvider"; +import type { WorkspacePermissions } from "modules/workspaces/permissions"; +import { HttpResponse, http } from "msw"; +import type { FC } from "react"; import WorkspacePage from "./WorkspacePage"; const { API, MissingBuildParameters } = apiModule; @@ -54,13 +53,15 @@ const renderWorkspacePage = async ( .mockResolvedValueOnce(MockDeploymentConfig); jest.spyOn(apiModule, "watchWorkspaceAgentLogs"); - renderWithAuth(, { + const result = renderWithAuth(, { ...options, route: `/@${workspace.owner_name}/${workspace.name}`, path: "/:username/:workspace", }); await screen.findByText(workspace.name); + + return result; }; /** @@ -305,7 +306,10 @@ describe("WorkspacePage", () => { }); }); - it("updates the parameters when they are missing during update", async () => { + // Started flaking after upgrading react-router. Tests the old parameters path + // and isn't worth spending more time to fix since this code will be removed + // in a few releases when dynamic parameters takes over the world. + it.skip("updates the parameters when they are missing during update", async () => { // Mocks jest .spyOn(API, "getWorkspaceByOwnerAndName") @@ -336,7 +340,7 @@ describe("WorkspacePage", () => { // After trying to update, a new dialog asking for missed parameters should // be displayed and filled - const dialog = await screen.findByTestId("dialog"); + const dialog = await waitFor(() => screen.findByTestId("dialog")); const firstParameterInput = within(dialog).getByLabelText( MockTemplateVersionParameter1.name, { exact: false }, @@ -617,10 +621,8 @@ describe("WorkspacePage", () => { ); - let destinationLocation!: Location; const MockWorkspacesPage: FC = () => { - destinationLocation = useLocation(); - return null; + return

      Workspaces

      ; }; const workspace: Workspace = { @@ -628,7 +630,7 @@ describe("WorkspacePage", () => { organization_name: MockOrganization.name, }; - await renderWorkspacePage(workspace, { + const { router } = await renderWorkspacePage(workspace, { mockAuthProviders: { DashboardProvider: MockDashboardProvider, }, @@ -652,8 +654,9 @@ describe("WorkspacePage", () => { const user = userEvent.setup(); await user.click(quotaLink); - expect(destinationLocation.pathname).toBe("/workspaces"); - expect(destinationLocation.search).toBe( + await waitFor(() => screen.findByText("Workspaces")); + expect(router.state.location.pathname).toBe("/workspaces"); + expect(router.state.location.search).toBe( `?filter=organization:${orgName}`, ); }); diff --git a/site/src/pages/WorkspacePage/WorkspacePage.tsx b/site/src/pages/WorkspacePage/WorkspacePage.tsx index 2085fb82b0cda..ae3e5a017f6d3 100644 --- a/site/src/pages/WorkspacePage/WorkspacePage.tsx +++ b/site/src/pages/WorkspacePage/WorkspacePage.tsx @@ -13,7 +13,7 @@ import { Margins } from "components/Margins/Margins"; import { useEffectEvent } from "hooks/hookPolyfills"; import { type FC, useEffect } from "react"; import { useQuery, useQueryClient } from "react-query"; -import { useParams } from "react-router-dom"; +import { useParams } from "react-router"; import { WorkspaceReadyPage } from "./WorkspaceReadyPage"; const WorkspacePage: FC = () => { diff --git a/site/src/pages/WorkspacePage/WorkspaceReadyPage.tsx b/site/src/pages/WorkspacePage/WorkspaceReadyPage.tsx index 4034cc144e127..667df7cd34252 100644 --- a/site/src/pages/WorkspacePage/WorkspaceReadyPage.tsx +++ b/site/src/pages/WorkspacePage/WorkspaceReadyPage.tsx @@ -1,6 +1,5 @@ import { API } from "api/api"; -import { type ApiError, getErrorMessage } from "api/errors"; -import { isApiError } from "api/errors"; +import { type ApiError, getErrorMessage, isApiError } from "api/errors"; import { templateVersion } from "api/queries/templates"; import { workspaceBuildTimings } from "api/queries/workspaceBuilds"; import { @@ -20,12 +19,12 @@ import { displayError } from "components/GlobalSnackbar/utils"; import { useWorkspaceBuildLogs } from "hooks/useWorkspaceBuildLogs"; import { EphemeralParametersDialog } from "modules/workspaces/EphemeralParametersDialog/EphemeralParametersDialog"; import { WorkspaceErrorDialog } from "modules/workspaces/ErrorDialog/WorkspaceErrorDialog"; +import type { WorkspacePermissions } from "modules/workspaces/permissions"; import { WorkspaceBuildCancelDialog } from "modules/workspaces/WorkspaceBuildCancelDialog/WorkspaceBuildCancelDialog"; import { - WorkspaceUpdateDialogs, useWorkspaceUpdate, + WorkspaceUpdateDialogs, } from "modules/workspaces/WorkspaceUpdateDialogs"; -import type { WorkspacePermissions } from "modules/workspaces/permissions"; import { type FC, useEffect, useState } from "react"; import { Helmet } from "react-helmet-async"; import { useMutation, useQuery, useQueryClient } from "react-query"; @@ -212,7 +211,7 @@ export const WorkspaceReadyPage: FC = ({ hasEphemeral: ephemeralParameters.length > 0, ephemeralParameters, }; - } catch (error) { + } catch (_error) { return { hasEphemeral: false, ephemeralParameters: [] }; } }; diff --git a/site/src/pages/WorkspacePage/WorkspaceScheduleControls.test.tsx b/site/src/pages/WorkspacePage/WorkspaceScheduleControls.test.tsx index 225db7c8a44c0..388f23625f043 100644 --- a/site/src/pages/WorkspacePage/WorkspaceScheduleControls.test.tsx +++ b/site/src/pages/WorkspacePage/WorkspaceScheduleControls.test.tsx @@ -1,14 +1,14 @@ +import { MockTemplate, MockWorkspace } from "testHelpers/entities"; +import { render } from "testHelpers/renderHelpers"; +import { server } from "testHelpers/server"; import { screen } from "@testing-library/react"; import userEvent from "@testing-library/user-event"; import { API } from "api/api"; import { workspaceByOwnerAndName } from "api/queries/workspaces"; import dayjs from "dayjs"; -import { http, HttpResponse } from "msw"; +import { HttpResponse, http } from "msw"; import type { FC } from "react"; import { useQuery } from "react-query"; -import { MockTemplate, MockWorkspace } from "testHelpers/entities"; -import { render } from "testHelpers/renderHelpers"; -import { server } from "testHelpers/server"; import { WorkspaceScheduleControls } from "./WorkspaceScheduleControls"; const Wrapper: FC = () => { diff --git a/site/src/pages/WorkspacePage/WorkspaceScheduleControls.tsx b/site/src/pages/WorkspacePage/WorkspaceScheduleControls.tsx index 5bced6f668d0f..965c206932015 100644 --- a/site/src/pages/WorkspacePage/WorkspaceScheduleControls.tsx +++ b/site/src/pages/WorkspacePage/WorkspaceScheduleControls.tsx @@ -15,9 +15,9 @@ import dayjs, { type Dayjs } from "dayjs"; import { useTime } from "hooks/useTime"; import { ClockIcon, MinusIcon, PlusIcon } from "lucide-react"; import { getWorkspaceActivityStatus } from "modules/workspaces/activity"; -import { type FC, type ReactNode, forwardRef, useRef, useState } from "react"; +import { type FC, forwardRef, type ReactNode, useRef, useState } from "react"; import { useMutation, useQueryClient } from "react-query"; -import { Link as RouterLink } from "react-router-dom"; +import { Link as RouterLink } from "react-router"; import { autostartDisplay, autostopDisplay, diff --git a/site/src/pages/WorkspacePage/WorkspaceTopbar.stories.tsx b/site/src/pages/WorkspacePage/WorkspaceTopbar.stories.tsx index 2d838ca9dc31d..aafd16d9c099d 100644 --- a/site/src/pages/WorkspacePage/WorkspaceTopbar.stories.tsx +++ b/site/src/pages/WorkspacePage/WorkspaceTopbar.stories.tsx @@ -1,8 +1,3 @@ -import type { Meta, StoryObj } from "@storybook/react"; -import { expect, screen, userEvent, waitFor, within } from "@storybook/test"; -import { getWorkspaceQuotaQueryKey } from "api/queries/workspaceQuota"; -import type { Workspace, WorkspaceQuota } from "api/typesGenerated"; -import dayjs from "dayjs"; import { MockOrganization, MockTemplate, @@ -11,6 +6,11 @@ import { MockWorkspace, } from "testHelpers/entities"; import { withAuthProvider, withDashboardProvider } from "testHelpers/storybook"; +import type { Meta, StoryObj } from "@storybook/react-vite"; +import { getWorkspaceQuotaQueryKey } from "api/queries/workspaceQuota"; +import type { Workspace, WorkspaceQuota } from "api/typesGenerated"; +import dayjs from "dayjs"; +import { expect, screen, userEvent, waitFor, within } from "storybook/test"; import { WorkspaceTopbar } from "./WorkspaceTopbar"; // We want a workspace without a deadline to not pollute the screenshot. Also diff --git a/site/src/pages/WorkspacePage/WorkspaceTopbar.tsx b/site/src/pages/WorkspacePage/WorkspaceTopbar.tsx index 943b967de92c6..b6b21b6f226b9 100644 --- a/site/src/pages/WorkspacePage/WorkspaceTopbar.tsx +++ b/site/src/pages/WorkspacePage/WorkspaceTopbar.tsx @@ -6,6 +6,7 @@ import type * as TypesGen from "api/typesGenerated"; import { Avatar } from "components/Avatar/Avatar"; import { AvatarData } from "components/Avatar/AvatarData"; import { CopyButton } from "components/CopyButton/CopyButton"; +import { Popover, PopoverTrigger } from "components/deprecated/Popover/Popover"; import { Topbar, TopbarAvatar, @@ -15,16 +16,13 @@ import { TopbarIconButton, } from "components/FullPageLayout/Topbar"; import { HelpTooltipContent } from "components/HelpTooltip/HelpTooltip"; -import { Popover, PopoverTrigger } from "components/deprecated/Popover/Popover"; -import { ChevronLeftIcon } from "lucide-react"; -import { CircleDollarSign } from "lucide-react"; -import { TrashIcon } from "lucide-react"; +import { ChevronLeftIcon, CircleDollarSign, TrashIcon } from "lucide-react"; import { useDashboard } from "modules/dashboard/useDashboard"; import { linkToTemplate, useLinks } from "modules/navigation"; import { WorkspaceStatusIndicator } from "modules/workspaces/WorkspaceStatusIndicator/WorkspaceStatusIndicator"; import type { FC } from "react"; import { useQuery } from "react-query"; -import { Link as RouterLink } from "react-router-dom"; +import { Link as RouterLink } from "react-router"; import { displayDormantDeletion } from "utils/dormant"; import type { WorkspacePermissions } from "../../modules/workspaces/permissions"; import { WorkspaceActions } from "./WorkspaceActions/WorkspaceActions"; diff --git a/site/src/pages/WorkspacePage/useResourcesNav.test.tsx b/site/src/pages/WorkspacePage/useResourcesNav.test.tsx index 7200405e3b558..77ac0c3204315 100644 --- a/site/src/pages/WorkspacePage/useResourcesNav.test.tsx +++ b/site/src/pages/WorkspacePage/useResourcesNav.test.tsx @@ -1,7 +1,7 @@ +import { MockWorkspaceResource } from "testHelpers/entities"; import { renderHook } from "@testing-library/react"; import type { WorkspaceResource } from "api/typesGenerated"; -import { RouterProvider, createMemoryRouter } from "react-router-dom"; -import { MockWorkspaceResource } from "testHelpers/entities"; +import { createMemoryRouter, RouterProvider } from "react-router"; import { resourceOptionValue, useResourcesNav } from "./useResourcesNav"; describe("useResourcesNav", () => { diff --git a/site/src/pages/WorkspaceSettingsPage/Sidebar.tsx b/site/src/pages/WorkspaceSettingsPage/Sidebar.tsx index 91aea9ac9cf12..32261577da9b2 100644 --- a/site/src/pages/WorkspaceSettingsPage/Sidebar.tsx +++ b/site/src/pages/WorkspaceSettingsPage/Sidebar.tsx @@ -5,9 +5,13 @@ import { SidebarHeader, SidebarNavItem, } from "components/Sidebar/Sidebar"; -import { CodeIcon as ParameterIcon } from "lucide-react"; -import { SettingsIcon as GeneralIcon } from "lucide-react"; -import { TimerIcon as ScheduleIcon } from "lucide-react"; +import { + SettingsIcon as GeneralIcon, + CodeIcon as ParameterIcon, + TimerIcon as ScheduleIcon, + Users as SharingIcon, +} from "lucide-react"; +import { useDashboard } from "modules/dashboard/useDashboard"; import type { FC } from "react"; interface SidebarProps { @@ -16,6 +20,8 @@ interface SidebarProps { } export const Sidebar: FC = ({ username, workspace }) => { + const { experiments } = useDashboard(); + return ( = ({ username, workspace }) => { Schedule + {experiments.includes("workspace-sharing") && ( + + Sharing + + )} ); }; diff --git a/site/src/pages/WorkspaceSettingsPage/WorkspaceParametersPage/WorkspaceParametersForm.tsx b/site/src/pages/WorkspaceSettingsPage/WorkspaceParametersPage/WorkspaceParametersForm.tsx index 00b8c2ae8464b..68dc6e65b7595 100644 --- a/site/src/pages/WorkspaceSettingsPage/WorkspaceParametersPage/WorkspaceParametersForm.tsx +++ b/site/src/pages/WorkspaceSettingsPage/WorkspaceParametersPage/WorkspaceParametersForm.tsx @@ -14,6 +14,7 @@ import { import { RichParameterInput } from "components/RichParameterInput/RichParameterInput"; import { Spinner } from "components/Spinner/Spinner"; import { useFormik } from "formik"; +import { ClassicParameterFlowDeprecationWarning } from "modules/workspaces/ClassicParameterFlowDeprecationWarning/ClassicParameterFlowDeprecationWarning"; import type { FC } from "react"; import { getFormHelpers } from "utils/formUtils"; import { @@ -33,6 +34,7 @@ interface WorkspaceParameterFormProps { autofillParams: AutofillBuildParameter[]; isSubmitting: boolean; canChangeVersions: boolean; + templatePermissions: { canUpdateTemplate: boolean } | undefined; error: unknown; onCancel: () => void; onSubmit: (values: WorkspaceParametersFormValues) => void; @@ -46,6 +48,7 @@ export const WorkspaceParametersForm: FC = ({ autofillParams, error, canChangeVersions, + templatePermissions, isSubmitting, }) => { const form = useFormik({ @@ -81,12 +84,15 @@ export const WorkspaceParametersForm: FC = ({ return ( <> {disabled && ( - + The template for this workspace requires automatic updates. Update the workspace to edit parameters. )} - + {hasNonEphemeralParameters && ( = { diff --git a/site/src/pages/WorkspaceSettingsPage/WorkspaceParametersPage/WorkspaceParametersPage.test.tsx b/site/src/pages/WorkspaceSettingsPage/WorkspaceParametersPage/WorkspaceParametersPage.test.tsx index dc4c127b9506e..90337e871e6cd 100644 --- a/site/src/pages/WorkspaceSettingsPage/WorkspaceParametersPage/WorkspaceParametersPage.test.tsx +++ b/site/src/pages/WorkspaceSettingsPage/WorkspaceParametersPage/WorkspaceParametersPage.test.tsx @@ -1,6 +1,3 @@ -import { screen, waitFor, within } from "@testing-library/react"; -import userEvent from "@testing-library/user-event"; -import { API } from "api/api"; import { MockTemplateVersionParameter1, MockTemplateVersionParameter2, @@ -15,6 +12,9 @@ import { renderWithWorkspaceSettingsLayout, waitForLoaderToBeRemoved, } from "testHelpers/renderHelpers"; +import { screen, waitFor, within } from "@testing-library/react"; +import userEvent from "@testing-library/user-event"; +import { API } from "api/api"; import WorkspaceParametersPage from "./WorkspaceParametersPage"; test("Submit the workspace settings page successfully", async () => { diff --git a/site/src/pages/WorkspaceSettingsPage/WorkspaceParametersPage/WorkspaceParametersPage.tsx b/site/src/pages/WorkspaceSettingsPage/WorkspaceParametersPage/WorkspaceParametersPage.tsx index 30b8ca943795f..d10db1fbe159b 100644 --- a/site/src/pages/WorkspaceSettingsPage/WorkspaceParametersPage/WorkspaceParametersPage.tsx +++ b/site/src/pages/WorkspaceSettingsPage/WorkspaceParametersPage/WorkspaceParametersPage.tsx @@ -10,7 +10,7 @@ import { ExternalLinkIcon } from "lucide-react"; import type { FC } from "react"; import { Helmet } from "react-helmet-async"; import { useMutation, useQuery } from "react-query"; -import { useNavigate } from "react-router-dom"; +import { useNavigate } from "react-router"; import { docs } from "utils/docs"; import { pageTitle } from "utils/page"; import { @@ -51,6 +51,25 @@ const WorkspaceParametersPage: FC = () => { const permissions = permissionsQuery.data as WorkspacePermissions | undefined; const canChangeVersions = Boolean(permissions?.updateWorkspaceVersion); + const templatePermissionsQuery = useQuery({ + ...checkAuthorization({ + checks: { + canUpdateTemplate: { + object: { + resource_type: "template", + resource_id: workspace.template_id, + }, + action: "update", + }, + }, + }), + enabled: workspace !== undefined, + }); + + const templatePermissions = templatePermissionsQuery.data as + | { canUpdateTemplate: boolean } + | undefined; + return ( <> @@ -60,6 +79,7 @@ const WorkspaceParametersPage: FC = () => { { type WorkspaceParametersPageViewProps = { workspace: Workspace; canChangeVersions: boolean; + templatePermissions: { canUpdateTemplate: boolean } | undefined; data: Awaited> | undefined; submitError: unknown; isSubmitting: boolean; @@ -106,6 +127,7 @@ export const WorkspaceParametersPageView: FC< > = ({ workspace, canChangeVersions, + templatePermissions, data, submitError, onSubmit, @@ -129,6 +151,7 @@ export const WorkspaceParametersPageView: FC< ({ ...p, source: "active_build", diff --git a/site/src/pages/WorkspaceSettingsPage/WorkspaceParametersPage/WorkspaceParametersPageExperimental.tsx b/site/src/pages/WorkspaceSettingsPage/WorkspaceParametersPage/WorkspaceParametersPageExperimental.tsx index 803dc4ff4fd48..9c1ef433f2cb0 100644 --- a/site/src/pages/WorkspaceSettingsPage/WorkspaceParametersPage/WorkspaceParametersPageExperimental.tsx +++ b/site/src/pages/WorkspaceSettingsPage/WorkspaceParametersPage/WorkspaceParametersPageExperimental.tsx @@ -22,7 +22,7 @@ import type { FC } from "react"; import { useEffect, useMemo, useRef, useState } from "react"; import { Helmet } from "react-helmet-async"; import { useMutation, useQuery } from "react-query"; -import { useNavigate, useSearchParams } from "react-router-dom"; +import { useNavigate, useSearchParams } from "react-router"; import { docs } from "utils/docs"; import { pageTitle } from "utils/page"; import type { AutofillBuildParameter } from "utils/richParameters"; @@ -226,7 +226,7 @@ const WorkspaceParametersPageExperimental: FC = () => {
      View docs @@ -261,7 +261,9 @@ const WorkspaceParametersPageExperimental: FC = () => { message="This workspace has no parameters" cta={ Learn more about parameters diff --git a/site/src/pages/WorkspaceSettingsPage/WorkspaceParametersPage/WorkspaceParametersPageViewExperimental.tsx b/site/src/pages/WorkspaceSettingsPage/WorkspaceParametersPage/WorkspaceParametersPageViewExperimental.tsx index 14253ad51f827..52228f19d9f40 100644 --- a/site/src/pages/WorkspaceSettingsPage/WorkspaceParametersPage/WorkspaceParametersPageViewExperimental.tsx +++ b/site/src/pages/WorkspaceSettingsPage/WorkspaceParametersPage/WorkspaceParametersPageViewExperimental.tsx @@ -210,7 +210,7 @@ export const WorkspaceParametersPageViewExperimental: FC< parameters cannot be modified once the workspace is created. View docs diff --git a/site/src/pages/WorkspaceSettingsPage/WorkspaceSchedulePage/WorkspaceScheduleForm.stories.tsx b/site/src/pages/WorkspaceSettingsPage/WorkspaceSchedulePage/WorkspaceScheduleForm.stories.tsx index 17d562387970d..1de412679d16b 100644 --- a/site/src/pages/WorkspaceSettingsPage/WorkspaceSchedulePage/WorkspaceScheduleForm.stories.tsx +++ b/site/src/pages/WorkspaceSettingsPage/WorkspaceSchedulePage/WorkspaceScheduleForm.stories.tsx @@ -1,5 +1,5 @@ -import { action } from "@storybook/addon-actions"; -import type { Meta, StoryObj } from "@storybook/react"; +import { MockTemplate, mockApiError } from "testHelpers/entities"; +import type { Meta, StoryObj } from "@storybook/react-vite"; import dayjs from "dayjs"; import advancedFormat from "dayjs/plugin/advancedFormat"; import timezone from "dayjs/plugin/timezone"; @@ -9,7 +9,7 @@ import { emptySchedule, } from "pages/WorkspaceSettingsPage/WorkspaceSchedulePage/schedule"; import { emptyTTL } from "pages/WorkspaceSettingsPage/WorkspaceSchedulePage/ttl"; -import { MockTemplate, mockApiError } from "testHelpers/entities"; +import { action } from "storybook/actions"; import { WorkspaceScheduleForm } from "./WorkspaceScheduleForm"; dayjs.extend(advancedFormat); diff --git a/site/src/pages/WorkspaceSettingsPage/WorkspaceSchedulePage/WorkspaceScheduleForm.test.tsx b/site/src/pages/WorkspaceSettingsPage/WorkspaceSchedulePage/WorkspaceScheduleForm.test.tsx index 1e9b50292887b..68ffdac9e77d9 100644 --- a/site/src/pages/WorkspaceSettingsPage/WorkspaceSchedulePage/WorkspaceScheduleForm.test.tsx +++ b/site/src/pages/WorkspaceSettingsPage/WorkspaceSchedulePage/WorkspaceScheduleForm.test.tsx @@ -1,16 +1,16 @@ +import { MockTemplate } from "testHelpers/entities"; +import { render } from "testHelpers/renderHelpers"; import { screen } from "@testing-library/react"; import { API } from "api/api"; import { defaultSchedule } from "pages/WorkspaceSettingsPage/WorkspaceSchedulePage/schedule"; -import { MockTemplate } from "testHelpers/entities"; -import { render } from "testHelpers/renderHelpers"; import { timeZones } from "utils/timeZones"; import { Language, + ttlShutdownAt, + validationSchema, WorkspaceScheduleForm, type WorkspaceScheduleFormProps, type WorkspaceScheduleFormValues, - ttlShutdownAt, - validationSchema, } from "./WorkspaceScheduleForm"; const valid: WorkspaceScheduleFormValues = { @@ -71,7 +71,7 @@ describe("validationSchema", () => { saturday: false, }; const validate = () => validationSchema.validateSync(values); - expect(validate).toThrowError(Language.errorNoDayOfWeek); + expect(validate).toThrow(Language.errorNoDayOfWeek); }); it("disallows empty startTime when autostart is enabled", () => { @@ -87,7 +87,7 @@ describe("validationSchema", () => { startTime: "", }; const validate = () => validationSchema.validateSync(values); - expect(validate).toThrowError(Language.errorNoTime); + expect(validate).toThrow(Language.errorNoTime); }); it("allows startTime 16:20", () => { @@ -105,7 +105,7 @@ describe("validationSchema", () => { startTime: "9:30", }; const validate = () => validationSchema.validateSync(values); - expect(validate).toThrowError(Language.errorTime); + expect(validate).toThrow(Language.errorTime); }); it("disallows startTime to be HH:m", () => { @@ -114,7 +114,7 @@ describe("validationSchema", () => { startTime: "09:5", }; const validate = () => validationSchema.validateSync(values); - expect(validate).toThrowError(Language.errorTime); + expect(validate).toThrow(Language.errorTime); }); it("disallows an invalid startTime 24:01", () => { @@ -123,7 +123,7 @@ describe("validationSchema", () => { startTime: "24:01", }; const validate = () => validationSchema.validateSync(values); - expect(validate).toThrowError(Language.errorTime); + expect(validate).toThrow(Language.errorTime); }); it("disallows an invalid startTime 09:60", () => { @@ -132,7 +132,7 @@ describe("validationSchema", () => { startTime: "09:60", }; const validate = () => validationSchema.validateSync(values); - expect(validate).toThrowError(Language.errorTime); + expect(validate).toThrow(Language.errorTime); }); it("disallows an invalid timezone Canada/North", () => { @@ -141,7 +141,7 @@ describe("validationSchema", () => { timezone: "Canada/North", }; const validate = () => validationSchema.validateSync(values); - expect(validate).toThrowError(Language.errorTimezone); + expect(validate).toThrow(Language.errorTimezone); }); it.each<[string]>(timeZones.map((zone) => [zone]))( @@ -162,7 +162,7 @@ describe("validationSchema", () => { ttl: 24 * 7, }; const validate = () => validationSchema.validateSync(values); - expect(validate).not.toThrowError(); + expect(validate).not.toThrow(); }); it("allows a ttl of 30 days", () => { @@ -171,7 +171,7 @@ describe("validationSchema", () => { ttl: 24 * 30, }; const validate = () => validationSchema.validateSync(values); - expect(validate).not.toThrowError(); + expect(validate).not.toThrow(); }); it("disallows a ttl of 30 days + 1 hour", () => { @@ -180,7 +180,7 @@ describe("validationSchema", () => { ttl: 24 * 30 + 1, }; const validate = () => validationSchema.validateSync(values); - expect(validate).toThrowError(Language.errorTtlMax); + expect(validate).toThrow(Language.errorTtlMax); }); it("allows a ttl of 1.2 hours", () => { @@ -189,7 +189,7 @@ describe("validationSchema", () => { ttl: 1.2, }; const validate = () => validationSchema.validateSync(values); - expect(validate).not.toThrowError(); + expect(validate).not.toThrow(); }); }); diff --git a/site/src/pages/WorkspaceSettingsPage/WorkspaceSchedulePage/WorkspaceScheduleForm.tsx b/site/src/pages/WorkspaceSettingsPage/WorkspaceSchedulePage/WorkspaceScheduleForm.tsx index 813018f35543a..cdf1f5b0d9726 100644 --- a/site/src/pages/WorkspaceSettingsPage/WorkspaceSchedulePage/WorkspaceScheduleForm.tsx +++ b/site/src/pages/WorkspaceSettingsPage/WorkspaceSchedulePage/WorkspaceScheduleForm.tsx @@ -158,7 +158,7 @@ export const validationSchema = Yup.object({ try { dayjs.tz(dayjs(), value); return true; - } catch (e) { + } catch { return false; } }), diff --git a/site/src/pages/WorkspaceSettingsPage/WorkspaceSchedulePage/WorkspaceSchedulePage.stories.tsx b/site/src/pages/WorkspaceSettingsPage/WorkspaceSchedulePage/WorkspaceSchedulePage.stories.tsx index e576e479d27c7..7503c439a3e9c 100644 --- a/site/src/pages/WorkspaceSettingsPage/WorkspaceSchedulePage/WorkspaceSchedulePage.stories.tsx +++ b/site/src/pages/WorkspaceSettingsPage/WorkspaceSchedulePage/WorkspaceSchedulePage.stories.tsx @@ -1,12 +1,3 @@ -import type { Meta, StoryObj } from "@storybook/react"; -import { getAuthorizationKey } from "api/queries/authCheck"; -import { templateByNameKey } from "api/queries/templates"; -import { workspaceByOwnerAndNameKey } from "api/queries/workspaces"; -import type { Workspace } from "api/typesGenerated"; -import { - reactRouterNestedAncestors, - reactRouterParameters, -} from "storybook-addon-remix-react-router"; import { MockPrebuiltWorkspace, MockTemplate, @@ -14,12 +5,21 @@ import { MockWorkspace, } from "testHelpers/entities"; import { withAuthProvider, withDashboardProvider } from "testHelpers/storybook"; +import type { Meta, StoryObj } from "@storybook/react-vite"; +import { getAuthorizationKey } from "api/queries/authCheck"; +import { templateByNameKey } from "api/queries/templates"; +import { workspaceByOwnerAndNameKey } from "api/queries/workspaces"; +import type { Workspace } from "api/typesGenerated"; +import { + reactRouterOutlet, + reactRouterParameters, +} from "storybook-addon-remix-react-router"; import { WorkspaceSettingsLayout } from "../WorkspaceSettingsLayout"; import WorkspaceSchedulePage from "./WorkspaceSchedulePage"; const meta = { title: "pages/WorkspaceSchedulePage", - component: WorkspaceSchedulePage, + component: WorkspaceSettingsLayout, decorators: [withAuthProvider, withDashboardProvider], parameters: { layout: "fullscreen", @@ -52,11 +52,11 @@ function workspaceRouterParameters(workspace: Workspace) { workspace: workspace.name, }, }, - routing: reactRouterNestedAncestors( + routing: reactRouterOutlet( { path: "/:username/:workspace/settings/schedule", }, - , + , ), }); } diff --git a/site/src/pages/WorkspaceSettingsPage/WorkspaceSchedulePage/WorkspaceSchedulePage.test.tsx b/site/src/pages/WorkspaceSettingsPage/WorkspaceSchedulePage/WorkspaceSchedulePage.test.tsx index 9ebede41abe60..f125071ebe9f3 100644 --- a/site/src/pages/WorkspaceSettingsPage/WorkspaceSchedulePage/WorkspaceSchedulePage.test.tsx +++ b/site/src/pages/WorkspaceSettingsPage/WorkspaceSchedulePage/WorkspaceSchedulePage.test.tsx @@ -1,20 +1,20 @@ -import { screen } from "@testing-library/react"; -import userEvent from "@testing-library/user-event"; -import { http, HttpResponse } from "msw"; import { MockUserOwner, MockWorkspace } from "testHelpers/entities"; import { renderWithWorkspaceSettingsLayout } from "testHelpers/renderHelpers"; import { server } from "testHelpers/server"; -import { - Language as FormLanguage, - type WorkspaceScheduleFormValues, -} from "./WorkspaceScheduleForm"; -import WorkspaceSchedulePage from "./WorkspaceSchedulePage"; +import { screen } from "@testing-library/react"; +import userEvent from "@testing-library/user-event"; +import { HttpResponse, http } from "msw"; import { formValuesToAutostartRequest, formValuesToTTLRequest, } from "./formToRequest"; import { scheduleToAutostart } from "./schedule"; import { ttlMsToAutostop } from "./ttl"; +import { + Language as FormLanguage, + type WorkspaceScheduleFormValues, +} from "./WorkspaceScheduleForm"; +import WorkspaceSchedulePage from "./WorkspaceSchedulePage"; const validValues: WorkspaceScheduleFormValues = { autostartEnabled: true, diff --git a/site/src/pages/WorkspaceSettingsPage/WorkspaceSchedulePage/WorkspaceSchedulePage.tsx b/site/src/pages/WorkspaceSettingsPage/WorkspaceSchedulePage/WorkspaceSchedulePage.tsx index 4c8526a4cda6b..23255316d2d25 100644 --- a/site/src/pages/WorkspaceSettingsPage/WorkspaceSchedulePage/WorkspaceSchedulePage.tsx +++ b/site/src/pages/WorkspaceSettingsPage/WorkspaceSchedulePage/WorkspaceSchedulePage.tsx @@ -20,14 +20,14 @@ import { useWorkspaceSettings } from "pages/WorkspaceSettingsPage/WorkspaceSetti import { type FC, useState } from "react"; import { Helmet } from "react-helmet-async"; import { useMutation, useQuery, useQueryClient } from "react-query"; -import { useNavigate, useParams } from "react-router-dom"; +import { useNavigate, useParams } from "react-router"; import { docs } from "utils/docs"; import { pageTitle } from "utils/page"; -import { WorkspaceScheduleForm } from "./WorkspaceScheduleForm"; import { formValuesToAutostartRequest, formValuesToTTLRequest, } from "./formToRequest"; +import { WorkspaceScheduleForm } from "./WorkspaceScheduleForm"; const permissionsToCheck = (workspace: TypesGen.Workspace) => ({ diff --git a/site/src/pages/WorkspaceSettingsPage/WorkspaceSchedulePage/schedule.ts b/site/src/pages/WorkspaceSettingsPage/WorkspaceSchedulePage/schedule.ts index b188145cf8a92..edaf480aee617 100644 --- a/site/src/pages/WorkspaceSettingsPage/WorkspaceSchedulePage/schedule.ts +++ b/site/src/pages/WorkspaceSettingsPage/WorkspaceSchedulePage/schedule.ts @@ -5,8 +5,8 @@ import utc from "dayjs/plugin/utc"; import map from "lodash/map"; import some from "lodash/some"; import { extractTimezone, stripTimezone } from "utils/schedule"; -import type { WorkspaceScheduleFormValues } from "./WorkspaceScheduleForm"; import type { Autostop } from "./ttl"; +import type { WorkspaceScheduleFormValues } from "./WorkspaceScheduleForm"; // REMARK: timezone plugin depends on UTC // diff --git a/site/src/pages/WorkspaceSettingsPage/WorkspaceSettingsLayout.tsx b/site/src/pages/WorkspaceSettingsPage/WorkspaceSettingsLayout.tsx index f3a36c98475e4..af64c06bef66c 100644 --- a/site/src/pages/WorkspaceSettingsPage/WorkspaceSettingsLayout.tsx +++ b/site/src/pages/WorkspaceSettingsPage/WorkspaceSettingsLayout.tsx @@ -4,10 +4,10 @@ import { ErrorAlert } from "components/Alert/ErrorAlert"; import { Loader } from "components/Loader/Loader"; import { Margins } from "components/Margins/Margins"; import { Stack } from "components/Stack/Stack"; -import { type FC, Suspense, createContext, useContext } from "react"; +import { createContext, type FC, Suspense, useContext } from "react"; import { Helmet } from "react-helmet-async"; import { useQuery } from "react-query"; -import { Outlet, useParams } from "react-router-dom"; +import { Outlet, useParams } from "react-router"; import { pageTitle } from "utils/page"; import { Sidebar } from "./Sidebar"; diff --git a/site/src/pages/WorkspaceSettingsPage/WorkspaceSettingsPage.test.tsx b/site/src/pages/WorkspaceSettingsPage/WorkspaceSettingsPage.test.tsx index 209388e2346d7..2b4637200d123 100644 --- a/site/src/pages/WorkspaceSettingsPage/WorkspaceSettingsPage.test.tsx +++ b/site/src/pages/WorkspaceSettingsPage/WorkspaceSettingsPage.test.tsx @@ -1,11 +1,11 @@ -import { screen, waitFor, within } from "@testing-library/react"; -import userEvent from "@testing-library/user-event"; -import { API } from "api/api"; import { MockWorkspace } from "testHelpers/entities"; import { renderWithWorkspaceSettingsLayout, waitForLoaderToBeRemoved, } from "testHelpers/renderHelpers"; +import { screen, waitFor, within } from "@testing-library/react"; +import userEvent from "@testing-library/user-event"; +import { API } from "api/api"; import WorkspaceSettingsPage from "./WorkspaceSettingsPage"; test("Submit the workspace settings page successfully", async () => { diff --git a/site/src/pages/WorkspaceSettingsPage/WorkspaceSettingsPage.tsx b/site/src/pages/WorkspaceSettingsPage/WorkspaceSettingsPage.tsx index fbe48b0216fc1..0c25c0a19f661 100644 --- a/site/src/pages/WorkspaceSettingsPage/WorkspaceSettingsPage.tsx +++ b/site/src/pages/WorkspaceSettingsPage/WorkspaceSettingsPage.tsx @@ -3,7 +3,7 @@ import { displaySuccess } from "components/GlobalSnackbar/utils"; import type { FC } from "react"; import { Helmet } from "react-helmet-async"; import { useMutation } from "react-query"; -import { useNavigate, useParams } from "react-router-dom"; +import { useNavigate, useParams } from "react-router"; import { pageTitle } from "utils/page"; import type { WorkspaceSettingsFormValues } from "./WorkspaceSettingsForm"; import { useWorkspaceSettings } from "./WorkspaceSettingsLayout"; diff --git a/site/src/pages/WorkspaceSettingsPage/WorkspaceSettingsPageView.stories.tsx b/site/src/pages/WorkspaceSettingsPage/WorkspaceSettingsPageView.stories.tsx index 7b71cf293b221..3a1eb37e75e99 100644 --- a/site/src/pages/WorkspaceSettingsPage/WorkspaceSettingsPageView.stories.tsx +++ b/site/src/pages/WorkspaceSettingsPage/WorkspaceSettingsPageView.stories.tsx @@ -1,6 +1,6 @@ -import { action } from "@storybook/addon-actions"; -import type { Meta, StoryObj } from "@storybook/react"; import { MockWorkspace } from "testHelpers/entities"; +import type { Meta, StoryObj } from "@storybook/react-vite"; +import { action } from "storybook/actions"; import { WorkspaceSettingsPageView } from "./WorkspaceSettingsPageView"; const meta: Meta = { diff --git a/site/src/pages/WorkspaceSettingsPage/WorkspaceSharingPage/WorkspaceSharingPage.tsx b/site/src/pages/WorkspaceSettingsPage/WorkspaceSharingPage/WorkspaceSharingPage.tsx new file mode 100644 index 0000000000000..74f240050c601 --- /dev/null +++ b/site/src/pages/WorkspaceSettingsPage/WorkspaceSharingPage/WorkspaceSharingPage.tsx @@ -0,0 +1,36 @@ +import { updateWorkspaceACL } from "api/queries/workspaces"; +import { Button } from "components/Button/Button"; +import { ExternalImage } from "components/ExternalImage/ExternalImage"; +import type { FC } from "react"; +import { useMutation } from "react-query"; +import { useWorkspaceSettings } from "../WorkspaceSettingsLayout"; + +const localKirbyId = "1ce34e51-3135-4720-8bfc-eabce178eafb"; +const devKirbyId = "7a4319a5-0dc1-41e1-95e4-f31e312b0ecc"; + +const WorkspaceSharingPage: FC = () => { + const workspace = useWorkspaceSettings(); + const shareWithKirbyMutation = useMutation(updateWorkspaceACL(workspace.id)); + + const onClick = () => { + shareWithKirbyMutation.mutate({ + user_roles: { + [localKirbyId]: "admin", + [devKirbyId]: "admin", + }, + }); + }; + + return ( + + ); +}; + +export default WorkspaceSharingPage; diff --git a/site/src/pages/WorkspacesPage/BatchDeleteConfirmation.stories.tsx b/site/src/pages/WorkspacesPage/BatchDeleteConfirmation.stories.tsx index 3abb069f05d7b..6f5921023073b 100644 --- a/site/src/pages/WorkspacesPage/BatchDeleteConfirmation.stories.tsx +++ b/site/src/pages/WorkspacesPage/BatchDeleteConfirmation.stories.tsx @@ -1,7 +1,7 @@ -import { action } from "@storybook/addon-actions"; -import type { Meta, StoryObj } from "@storybook/react"; import { chromatic } from "testHelpers/chromatic"; import { MockUserMember, MockWorkspace } from "testHelpers/entities"; +import type { Meta, StoryObj } from "@storybook/react-vite"; +import { action } from "storybook/actions"; import { BatchDeleteConfirmation } from "./BatchDeleteConfirmation"; const meta: Meta = { diff --git a/site/src/pages/WorkspacesPage/BatchUpdateConfirmation.stories.tsx b/site/src/pages/WorkspacesPage/BatchUpdateConfirmation.stories.tsx index 140d433d3e860..14a7db55b19ec 100644 --- a/site/src/pages/WorkspacesPage/BatchUpdateConfirmation.stories.tsx +++ b/site/src/pages/WorkspacesPage/BatchUpdateConfirmation.stories.tsx @@ -1,7 +1,3 @@ -import { action } from "@storybook/addon-actions"; -import type { Meta, StoryObj } from "@storybook/react"; -import type { Workspace } from "api/typesGenerated"; -import { useQueryClient } from "react-query"; import { chromatic } from "testHelpers/chromatic"; import { MockDormantOutdatedWorkspace, @@ -11,6 +7,10 @@ import { MockUserMember, MockWorkspace, } from "testHelpers/entities"; +import type { Meta, StoryObj } from "@storybook/react-vite"; +import type { Workspace } from "api/typesGenerated"; +import { useQueryClient } from "react-query"; +import { action } from "storybook/actions"; import { BatchUpdateConfirmation, type Update, diff --git a/site/src/pages/WorkspacesPage/BatchUpdateConfirmation.tsx b/site/src/pages/WorkspacesPage/BatchUpdateConfirmation.tsx index a6b0a27b374f4..879f27d53c8ae 100644 --- a/site/src/pages/WorkspacesPage/BatchUpdateConfirmation.tsx +++ b/site/src/pages/WorkspacesPage/BatchUpdateConfirmation.tsx @@ -8,8 +8,12 @@ import { MemoizedInlineMarkdown } from "components/Markdown/Markdown"; import { Stack } from "components/Stack/Stack"; import dayjs from "dayjs"; import relativeTime from "dayjs/plugin/relativeTime"; -import { MonitorDownIcon } from "lucide-react"; -import { ClockIcon, SettingsIcon, UserIcon } from "lucide-react"; +import { + ClockIcon, + MonitorDownIcon, + SettingsIcon, + UserIcon, +} from "lucide-react"; import { type FC, type ReactNode, useEffect, useMemo, useState } from "react"; import { useQueries } from "react-query"; @@ -260,17 +264,9 @@ const DormantWorkspaces: FC = ({ workspaces }) => { return ( <>

      - {workspaces.length === 1 ? ( - <> - This selected workspace is dormant, and must be activated before it - can be updated. - - ) : ( - <> - These selected workspaces are dormant, and must be activated before - they can be updated. - - )} + {workspaces.length === 1 + ? "This selected workspace is dormant, and must be activated before it can be updated." + : "These selected workspaces are dormant, and must be activated before they can be updated."}

        {workspaces.map((workspace) => ( diff --git a/site/src/pages/WorkspacesPage/WorkspacesButton.tsx b/site/src/pages/WorkspacesPage/WorkspacesButton.tsx index 404425b56a1e0..eda60e4fdc8f9 100644 --- a/site/src/pages/WorkspacesPage/WorkspacesButton.tsx +++ b/site/src/pages/WorkspacesPage/WorkspacesButton.tsx @@ -2,24 +2,23 @@ import Link from "@mui/material/Link"; import type { Template } from "api/typesGenerated"; import { Avatar } from "components/Avatar/Avatar"; import { Button } from "components/Button/Button"; -import { Loader } from "components/Loader/Loader"; -import { MenuSearch } from "components/Menu/MenuSearch"; -import { OverflowY } from "components/OverflowY/OverflowY"; -import { SearchEmpty, searchStyles } from "components/Search/Search"; import { Popover, PopoverContent, PopoverTrigger, } from "components/deprecated/Popover/Popover"; -import { ExternalLinkIcon } from "lucide-react"; -import { ChevronDownIcon } from "lucide-react"; +import { Loader } from "components/Loader/Loader"; +import { MenuSearch } from "components/Menu/MenuSearch"; +import { OverflowY } from "components/OverflowY/OverflowY"; +import { SearchEmpty, searchStyles } from "components/Search/Search"; +import { ChevronDownIcon, ExternalLinkIcon } from "lucide-react"; import { linkToTemplate, useLinks } from "modules/navigation"; import { type FC, type ReactNode, useState } from "react"; import type { UseQueryResult } from "react-query"; import { Link as RouterLink, type LinkProps as RouterLinkProps, -} from "react-router-dom"; +} from "react-router"; type TemplatesQuery = UseQueryResult; @@ -39,7 +38,7 @@ export const WorkspacesButton: FC = ({ const [searchTerm, setSearchTerm] = useState(""); const processed = sortTemplatesByUsersDesc(templates ?? [], searchTerm); - let emptyState: ReactNode = undefined; + let emptyState: ReactNode; if (templates?.length === 0) { emptyState = ( diff --git a/site/src/pages/WorkspacesPage/WorkspacesEmpty.tsx b/site/src/pages/WorkspacesPage/WorkspacesEmpty.tsx index 45c9b221ef743..810a2598f3d26 100644 --- a/site/src/pages/WorkspacesPage/WorkspacesEmpty.tsx +++ b/site/src/pages/WorkspacesPage/WorkspacesEmpty.tsx @@ -4,7 +4,7 @@ import { Button } from "components/Button/Button"; import { EmptyState } from "components/EmptyState/EmptyState"; import { linkToTemplate, useLinks } from "modules/navigation"; import type { FC } from "react"; -import { Link } from "react-router-dom"; +import { Link } from "react-router"; interface WorkspacesEmptyProps { isUsingFilter: boolean; diff --git a/site/src/pages/WorkspacesPage/WorkspacesPage.test.tsx b/site/src/pages/WorkspacesPage/WorkspacesPage.test.tsx index 56f8fb34a32e8..b80da553de6d6 100644 --- a/site/src/pages/WorkspacesPage/WorkspacesPage.test.tsx +++ b/site/src/pages/WorkspacesPage/WorkspacesPage.test.tsx @@ -1,8 +1,3 @@ -import { screen, waitFor, within } from "@testing-library/react"; -import userEvent from "@testing-library/user-event"; -import { API } from "api/api"; -import type { Workspace } from "api/typesGenerated"; -import { http, HttpResponse } from "msw"; import { MockDormantOutdatedWorkspace, MockDormantWorkspace, @@ -17,6 +12,11 @@ import { waitForLoaderToBeRemoved, } from "testHelpers/renderHelpers"; import { server } from "testHelpers/server"; +import { screen, waitFor, within } from "@testing-library/react"; +import userEvent from "@testing-library/user-event"; +import { API } from "api/api"; +import type { Workspace, WorkspacesResponse } from "api/typesGenerated"; +import { HttpResponse, http } from "msw"; import * as CreateDayString from "utils/createDayString"; import WorkspacesPage from "./WorkspacesPage"; @@ -28,18 +28,17 @@ describe("WorkspacesPage", () => { }); it("renders an empty workspaces page", async () => { - // Given server.use( http.get("/api/v2/workspaces", async () => { - return HttpResponse.json({ workspaces: [], count: 0 }); + return HttpResponse.json({ + workspaces: [], + count: 0, + }); }), ); - // When renderWithAuth(); - - // Then - await screen.findByText("Create a workspace"); + await screen.findByRole("heading", { name: /Create a workspace/ }); }); it("renders a filled workspaces page", async () => { @@ -306,6 +305,67 @@ describe("WorkspacesPage", () => { MockStoppedWorkspace.latest_build.template_version_id, ); }); + + it("correctly handles pagination by including pagination parameters in query key", async () => { + const totalWorkspaces = 50; + const workspacesPage1 = Array.from({ length: 25 }, (_, i) => ({ + ...MockWorkspace, + id: `page1-workspace-${i}`, + name: `page1-workspace-${i}`, + })); + const workspacesPage2 = Array.from({ length: 25 }, (_, i) => ({ + ...MockWorkspace, + id: `page2-workspace-${i}`, + name: `page2-workspace-${i}`, + })); + + const getWorkspacesSpy = jest.spyOn(API, "getWorkspaces"); + + getWorkspacesSpy.mockImplementation(({ offset }) => { + switch (offset) { + case 0: + return Promise.resolve({ + workspaces: workspacesPage1, + count: totalWorkspaces, + }); + case 25: + return Promise.resolve({ + workspaces: workspacesPage2, + count: totalWorkspaces, + }); + default: + return Promise.reject(new Error("Unexpected offset")); + } + }); + + const user = userEvent.setup(); + renderWithAuth(); + + await waitFor(() => { + expect(screen.getByText("page1-workspace-0")).toBeInTheDocument(); + }); + + expect(getWorkspacesSpy).toHaveBeenLastCalledWith({ + q: "owner:me", + offset: 0, + limit: 25, + }); + + const nextPageButton = screen.getByRole("button", { name: /next page/i }); + await user.click(nextPageButton); + + await waitFor(() => { + expect(screen.getByText("page2-workspace-0")).toBeInTheDocument(); + }); + + expect(getWorkspacesSpy).toHaveBeenLastCalledWith({ + q: "owner:me", + offset: 25, + limit: 25, + }); + + expect(screen.queryByText("page1-workspace-0")).not.toBeInTheDocument(); + }); }); const getWorkspaceCheckbox = (workspace: Workspace) => { diff --git a/site/src/pages/WorkspacesPage/WorkspacesPage.tsx b/site/src/pages/WorkspacesPage/WorkspacesPage.tsx index fa96191501379..0488fc0730e5d 100644 --- a/site/src/pages/WorkspacesPage/WorkspacesPage.tsx +++ b/site/src/pages/WorkspacesPage/WorkspacesPage.tsx @@ -1,8 +1,8 @@ import { getErrorDetail, getErrorMessage } from "api/errors"; import { workspacePermissionsByOrganization } from "api/queries/organizations"; -import { templates } from "api/queries/templates"; +import { templates, templateVersionRoot } from "api/queries/templates"; import { workspaces } from "api/queries/workspaces"; -import type { Workspace, WorkspaceStatus } from "api/typesGenerated"; +import type { WorkspaceStatus } from "api/typesGenerated"; import { useFilter } from "components/Filter/Filter"; import { useUserFilterMenu } from "components/Filter/UserFilter"; import { displayError } from "components/GlobalSnackbar/utils"; @@ -11,26 +11,32 @@ import { useEffectEvent } from "hooks/hookPolyfills"; import { usePagination } from "hooks/usePagination"; import { useDashboard } from "modules/dashboard/useDashboard"; import { useOrganizationsFilterMenu } from "modules/tableFiltering/options"; -import { type FC, useEffect, useMemo, useState } from "react"; +import { type FC, useMemo, useState } from "react"; import { Helmet } from "react-helmet-async"; import { useQuery, useQueryClient } from "react-query"; -import { useSearchParams } from "react-router-dom"; +import { useSearchParams } from "react-router"; import { pageTitle } from "utils/page"; import { BatchDeleteConfirmation } from "./BatchDeleteConfirmation"; import { BatchUpdateConfirmation } from "./BatchUpdateConfirmation"; -import { WorkspacesPageView } from "./WorkspacesPageView"; import { useBatchActions } from "./batchActions"; import { useStatusFilterMenu, useTemplateFilterMenu } from "./filter/menus"; +import { WorkspacesPageView } from "./WorkspacesPageView"; -// To reduce the number of fetches, we reduce the fetch interval if there are no -// active workspace builds. -const ACTIVE_BUILD_STATUSES: WorkspaceStatus[] = [ +/** + * The set of all workspace statuses that indicate that the state for a + * workspace is in the middle of a transition and will eventually reach a more + * stable state/status. + */ +const ACTIVE_BUILD_STATUSES: readonly WorkspaceStatus[] = [ "canceling", "deleting", "pending", "starting", "stopping", ]; + +// To reduce the number of fetches, we reduce the fetch interval if there are no +// active workspace builds. const ACTIVE_BUILDS_REFRESH_INTERVAL = 5_000; const NO_ACTIVE_BUILDS_REFRESH_INTERVAL = 30_000; @@ -48,13 +54,35 @@ function useSafeSearchParams() { >; } +type BatchAction = "delete" | "update"; + const WorkspacesPage: FC = () => { const queryClient = useQueryClient(); - // If we use a useSearchParams for each hook, the values will not be in sync. - // So we have to use a single one, centralizing the values, and pass it to - // each hook. - const searchParamsResult = useSafeSearchParams(); - const pagination = usePagination({ searchParamsResult }); + // We have to be careful with how we use useSearchParams or any other + // derived hooks. The URL is global state, but each call to useSearchParams + // creates a different, contradictory source of truth for what the URL + // should look like. We need to make sure that we only mount the hook once + // per page + const [searchParams, setSearchParams] = useSafeSearchParams(); + // Always need to make sure that we reset the checked workspaces each time + // the filtering or pagination changes, as that will almost always change + // which workspaces are shown on screen and which can be interacted with + const [checkedWorkspaceIds, setCheckedWorkspaceIds] = useState( + new Set(), + ); + const resetChecked = () => { + if (checkedWorkspaceIds.size !== 0) { + setCheckedWorkspaceIds(new Set()); + } + }; + + const pagination = usePagination({ + searchParams, + onSearchParamsChange: (newParams) => { + setSearchParams(newParams); + resetChecked(); + }, + }); const { permissions, user: me } = useAuthenticated(); const { entitlements } = useDashboard(); const templatesQuery = useQuery(templates()); @@ -78,14 +106,19 @@ const WorkspacesPage: FC = () => { }); }, [templatesQuery.data, workspacePermissionsQuery.data]); - const filterProps = useWorkspacesFilter({ - searchParamsResult, - onFilterChange: () => pagination.goToPage(1), + const filterState = useWorkspacesFilter({ + searchParams, + onSearchParamsChange: setSearchParams, + onFilterChange: () => { + pagination.goToPage(1); + resetChecked(); + }, }); const workspacesQueryOptions = workspaces({ - ...pagination, - q: filterProps.filter.query, + limit: pagination.limit, + offset: pagination.offset, + q: filterState.filter.query, }); const { data, error, refetch } = useQuery({ ...workspacesQueryOptions, @@ -109,28 +142,18 @@ const WorkspacesPage: FC = () => { refetchOnWindowFocus: "always", }); - const [checkedWorkspaces, setCheckedWorkspaces] = useState< - readonly Workspace[] - >([]); - const [confirmingBatchAction, setConfirmingBatchAction] = useState< - "delete" | "update" | null - >(null); - const [urlSearchParams] = searchParamsResult; + const [activeBatchAction, setActiveBatchAction] = useState(); const canCheckWorkspaces = entitlements.features.workspace_batch_actions.enabled; const batchActions = useBatchActions({ onSuccess: async () => { await refetch(); - setCheckedWorkspaces([]); + resetChecked(); }, }); - // We want to uncheck the selected workspaces always when the url changes - // because of filtering or pagination - // biome-ignore lint/correctness/useExhaustiveDependencies: consider refactoring - useEffect(() => { - setCheckedWorkspaces([]); - }, [urlSearchParams]); + const checkedWorkspaces = + data?.workspaces.filter((w) => checkedWorkspaceIds.has(w.id)) ?? []; return ( <> @@ -142,7 +165,18 @@ const WorkspacesPage: FC = () => { canCreateTemplate={permissions.createTemplates} canChangeVersions={permissions.updateTemplates} checkedWorkspaces={checkedWorkspaces} - onCheckChange={setCheckedWorkspaces} + onCheckChange={(newWorkspaces) => { + setCheckedWorkspaceIds((current) => { + const newIds = newWorkspaces.map((ws) => ws.id); + const sameContent = + current.size === newIds.length && + newIds.every((id) => current.has(id)); + if (sameContent) { + return current; + } + return new Set(newIds); + }); + }} canCheckWorkspaces={canCheckWorkspaces} templates={filteredTemplates} templatesFetchStatus={templatesQuery.status} @@ -152,12 +186,31 @@ const WorkspacesPage: FC = () => { page={pagination.page} limit={pagination.limit} onPageChange={pagination.goToPage} - filterProps={filterProps} - isRunningBatchAction={batchActions.isLoading} - onDeleteAll={() => setConfirmingBatchAction("delete")} - onUpdateAll={() => setConfirmingBatchAction("update")} - onStartAll={() => batchActions.startAll(checkedWorkspaces)} - onStopAll={() => batchActions.stopAll(checkedWorkspaces)} + filterState={filterState} + isRunningBatchAction={batchActions.isProcessing} + onBatchDeleteTransition={() => setActiveBatchAction("delete")} + onBatchStartTransition={() => batchActions.start(checkedWorkspaces)} + onBatchStopTransition={() => batchActions.stop(checkedWorkspaces)} + onBatchUpdateTransition={() => { + // Just because batch-updating can be really dangerous + // action for running workspaces, we're going to invalidate + // all relevant queries as a prefetch strategy before the + // modal content is even allowed to mount. + for (const ws of checkedWorkspaces) { + // Our data layer is a little messy right now, so + // there's no great way to invalidate a bunch of + // template version queries with a single function call, + // while also avoiding all other tangentially connected + // resources that use the same key pattern. Have to be + // super granular and make one call per workspace. + queryClient.invalidateQueries({ + queryKey: [templateVersionRoot, ws.template_active_version_id], + exact: true, + type: "all", + }); + } + setActiveBatchAction("update"); + }} onActionSuccess={async () => { await queryClient.invalidateQueries({ queryKey: workspacesQueryOptions.queryKey, @@ -172,31 +225,27 @@ const WorkspacesPage: FC = () => { /> setActiveBatchAction(undefined)} onConfirm={async () => { - await batchActions.deleteAll(checkedWorkspaces); - setConfirmingBatchAction(null); - }} - onClose={() => { - setConfirmingBatchAction(null); + await batchActions.delete(checkedWorkspaces); + setActiveBatchAction(undefined); }} /> setActiveBatchAction(undefined)} onConfirm={async () => { - await batchActions.updateAll({ + await batchActions.updateTemplateVersions({ workspaces: checkedWorkspaces, isDynamicParametersEnabled: false, }); - setConfirmingBatchAction(null); - }} - onClose={() => { - setConfirmingBatchAction(null); + setActiveBatchAction(undefined); }} /> @@ -206,17 +255,20 @@ const WorkspacesPage: FC = () => { export default WorkspacesPage; type UseWorkspacesFilterOptions = { - searchParamsResult: ReturnType; + searchParams: URLSearchParams; + onSearchParamsChange: (newParams: URLSearchParams) => void; onFilterChange: () => void; }; const useWorkspacesFilter = ({ - searchParamsResult, + searchParams, + onSearchParamsChange, onFilterChange, }: UseWorkspacesFilterOptions) => { const filter = useFilter({ fallbackFilter: "owner:me", - searchParamsResult, + searchParams, + onSearchParamsChange, onUpdate: onFilterChange, }); diff --git a/site/src/pages/WorkspacesPage/WorkspacesPageView.stories.tsx b/site/src/pages/WorkspacesPage/WorkspacesPageView.stories.tsx index e0178dea06c09..006a2fb62a8ff 100644 --- a/site/src/pages/WorkspacesPage/WorkspacesPageView.stories.tsx +++ b/site/src/pages/WorkspacesPage/WorkspacesPageView.stories.tsx @@ -1,18 +1,3 @@ -import type { Meta, StoryObj } from "@storybook/react"; -import { expect, within } from "@storybook/test"; -import { - type Workspace, - type WorkspaceStatus, - WorkspaceStatuses, -} from "api/typesGenerated"; -import { - MockMenu, - getDefaultFilterProps, -} from "components/Filter/storyHelpers"; -import { DEFAULT_RECORDS_PER_PAGE } from "components/PaginationWidget/utils"; -import dayjs from "dayjs"; -import uniqueId from "lodash/uniqueId"; -import type { ComponentProps } from "react"; import { MockBuildInfo, MockOrganization, @@ -30,6 +15,21 @@ import { withDashboardProvider, withProxyProvider, } from "testHelpers/storybook"; +import type { Meta, StoryObj } from "@storybook/react-vite"; +import { + type Workspace, + type WorkspaceStatus, + WorkspaceStatuses, +} from "api/typesGenerated"; +import { + getDefaultFilterProps, + MockMenu, +} from "components/Filter/storyHelpers"; +import { DEFAULT_RECORDS_PER_PAGE } from "components/PaginationWidget/utils"; +import dayjs from "dayjs"; +import uniqueId from "lodash/uniqueId"; +import { expect, within } from "storybook/test"; +import type { WorkspaceFilterState } from "./filter/WorkspacesFilter"; import { WorkspacesPageView } from "./WorkspacesPageView"; const createWorkspace = ( @@ -134,9 +134,7 @@ const allWorkspaces = [ ...Object.values(additionalWorkspaces), ]; -type FilterProps = ComponentProps["filterProps"]; - -const defaultFilterProps = getDefaultFilterProps({ +const defaultFilterProps = getDefaultFilterProps({ query: "owner:me", menus: { user: MockMenu, @@ -169,7 +167,7 @@ const meta: Meta = { component: WorkspacesPageView, args: { limit: DEFAULT_RECORDS_PER_PAGE, - filterProps: defaultFilterProps, + filterState: defaultFilterProps, checkedWorkspaces: [], canCheckWorkspaces: true, templates: mockTemplates, @@ -266,7 +264,7 @@ export const UserHasNoWorkspacesAndNoTemplates: Story = { export const NoSearchResults: Story = { args: { workspaces: [], - filterProps: { + filterState: { ...defaultFilterProps, filter: { ...defaultFilterProps.filter, diff --git a/site/src/pages/WorkspacesPage/WorkspacesPageView.tsx b/site/src/pages/WorkspacesPage/WorkspacesPageView.tsx index 6563533bc43da..d5b7b4a03ef31 100644 --- a/site/src/pages/WorkspacesPage/WorkspacesPageView.tsx +++ b/site/src/pages/WorkspacesPage/WorkspacesPageView.tsx @@ -17,18 +17,23 @@ import { PaginationWidgetBase } from "components/PaginationWidget/PaginationWidg import { Spinner } from "components/Spinner/Spinner"; import { Stack } from "components/Stack/Stack"; import { TableToolbar } from "components/TableToolbar/TableToolbar"; -import { CloudIcon } from "lucide-react"; -import { ChevronDownIcon, PlayIcon, SquareIcon, TrashIcon } from "lucide-react"; +import { + ChevronDownIcon, + CloudIcon, + PlayIcon, + SquareIcon, + TrashIcon, +} from "lucide-react"; import { WorkspacesTable } from "pages/WorkspacesPage/WorkspacesTable"; import type { FC } from "react"; import type { UseQueryResult } from "react-query"; import { mustUpdateWorkspace } from "utils/workspace"; -import { WorkspaceHelpTooltip } from "./WorkspaceHelpTooltip"; -import { WorkspacesButton } from "./WorkspacesButton"; import { - type WorkspaceFilterProps, + type WorkspaceFilterState, WorkspacesFilter, } from "./filter/WorkspacesFilter"; +import { WorkspaceHelpTooltip } from "./WorkspaceHelpTooltip"; +import { WorkspacesButton } from "./WorkspacesButton"; const Language = { pageTitle: "Workspaces", @@ -45,16 +50,16 @@ interface WorkspacesPageViewProps { workspaces?: readonly Workspace[]; checkedWorkspaces: readonly Workspace[]; count?: number; - filterProps: WorkspaceFilterProps; + filterState: WorkspaceFilterState; page: number; limit: number; onPageChange: (page: number) => void; onCheckChange: (checkedWorkspaces: readonly Workspace[]) => void; isRunningBatchAction: boolean; - onDeleteAll: () => void; - onUpdateAll: () => void; - onStartAll: () => void; - onStopAll: () => void; + onBatchDeleteTransition: () => void; + onBatchUpdateTransition: () => void; + onBatchStartTransition: () => void; + onBatchStopTransition: () => void; canCheckWorkspaces: boolean; templatesFetchStatus: TemplateQuery["status"]; templates: TemplateQuery["data"]; @@ -69,15 +74,15 @@ export const WorkspacesPageView: FC = ({ error, limit, count, - filterProps, + filterState, onPageChange, page, checkedWorkspaces, onCheckChange, - onDeleteAll, - onUpdateAll, - onStopAll, - onStartAll, + onBatchDeleteTransition, + onBatchUpdateTransition, + onBatchStopTransition, + onBatchStartTransition, isRunningBatchAction, canCheckWorkspaces, templates, @@ -87,10 +92,10 @@ export const WorkspacesPageView: FC = ({ onActionSuccess, onActionError, }) => { - // Let's say the user has 5 workspaces, but tried to hit page 100, which does - // not exist. In this case, the page is not valid and we want to show a better - // error message. - const invalidPageNumber = page !== 1 && workspaces?.length === 0; + // Let's say the user has 5 workspaces, but tried to hit page 100, which + // does not exist. In this case, the page is not valid and we want to show a + // better error message. + const pageNumberIsInvalid = page !== 1 && workspaces?.length === 0; return ( @@ -117,9 +122,12 @@ export const WorkspacesPageView: FC = ({ )} @@ -155,7 +163,7 @@ export const WorkspacesPageView: FC = ({ !mustUpdateWorkspace(w, canChangeVersions), ) } - onClick={onStartAll} + onClick={onBatchStartTransition} > Start @@ -165,12 +173,12 @@ export const WorkspacesPageView: FC = ({ (w) => w.latest_build.status === "running", ) } - onClick={onStopAll} + onClick={onBatchStopTransition} > Stop - + = ({ Delete… @@ -187,7 +195,7 @@ export const WorkspacesPageView: FC = ({ ) : ( - !invalidPageNumber && ( + !pageNumberIsInvalid && ( = ({ )} - {invalidPageNumber ? ( + {pageNumberIsInvalid ? ( ({ border: `1px solid ${theme.palette.divider}`, @@ -221,7 +229,7 @@ export const WorkspacesPageView: FC = ({ Promise; } -export function useBatchActions(options: UseBatchActionsProps) { +type UpdateAllPayload = Readonly<{ + workspaces: readonly Workspace[]; + isDynamicParametersEnabled: boolean; +}>; + +type UseBatchActionsResult = Readonly<{ + isProcessing: boolean; + start: (workspaces: readonly Workspace[]) => Promise; + stop: (workspaces: readonly Workspace[]) => Promise; + delete: (workspaces: readonly Workspace[]) => Promise; + updateTemplateVersions: ( + payload: UpdateAllPayload, + ) => Promise; + favorite: (payload: readonly Workspace[]) => Promise; + unfavorite: (payload: readonly Workspace[]) => Promise; +}>; + +export function useBatchActions( + options: UseBatchActionsOptions, +): UseBatchActionsResult { const { onSuccess } = options; const startAllMutation = useMutation({ @@ -45,10 +64,7 @@ export function useBatchActions(options: UseBatchActionsProps) { }); const updateAllMutation = useMutation({ - mutationFn: (payload: { - workspaces: readonly Workspace[]; - isDynamicParametersEnabled: boolean; - }) => { + mutationFn: (payload: UpdateAllPayload) => { const { workspaces, isDynamicParametersEnabled } = payload; return Promise.all( workspaces @@ -63,8 +79,8 @@ export function useBatchActions(options: UseBatchActionsProps) { }); const favoriteAllMutation = useMutation({ - mutationFn: (workspaces: readonly Workspace[]) => { - return Promise.all( + mutationFn: async (workspaces: readonly Workspace[]): Promise => { + await Promise.all( workspaces .filter((w) => !w.favorite) .map((w) => API.putFavoriteWorkspace(w.id)), @@ -77,8 +93,8 @@ export function useBatchActions(options: UseBatchActionsProps) { }); const unfavoriteAllMutation = useMutation({ - mutationFn: (workspaces: readonly Workspace[]) => { - return Promise.all( + mutationFn: async (workspaces: readonly Workspace[]): Promise => { + await Promise.all( workspaces .filter((w) => w.favorite) .map((w) => API.deleteFavoriteWorkspace(w.id)), @@ -91,13 +107,13 @@ export function useBatchActions(options: UseBatchActionsProps) { }); return { - favoriteAll: favoriteAllMutation.mutateAsync, - unfavoriteAll: unfavoriteAllMutation.mutateAsync, - startAll: startAllMutation.mutateAsync, - stopAll: stopAllMutation.mutateAsync, - deleteAll: deleteAllMutation.mutateAsync, - updateAll: updateAllMutation.mutateAsync, - isLoading: + favorite: favoriteAllMutation.mutateAsync, + unfavorite: unfavoriteAllMutation.mutateAsync, + start: startAllMutation.mutateAsync, + stop: stopAllMutation.mutateAsync, + delete: deleteAllMutation.mutateAsync, + updateTemplateVersions: updateAllMutation.mutateAsync, + isProcessing: favoriteAllMutation.isPending || unfavoriteAllMutation.isPending || startAllMutation.isPending || diff --git a/site/src/pages/WorkspacesPage/filter/WorkspacesFilter.tsx b/site/src/pages/WorkspacesPage/filter/WorkspacesFilter.tsx index ea3081d84c7f3..caebfd04526d4 100644 --- a/site/src/pages/WorkspacesPage/filter/WorkspacesFilter.tsx +++ b/site/src/pages/WorkspacesPage/filter/WorkspacesFilter.tsx @@ -1,4 +1,8 @@ -import { Filter, MenuSkeleton, type useFilter } from "components/Filter/Filter"; +import { + Filter, + MenuSkeleton, + type UseFilterResult, +} from "components/Filter/Filter"; import { type UserFilterMenu, UserMenu } from "components/Filter/UserFilter"; import { useDashboard } from "modules/dashboard/useDashboard"; import { @@ -62,8 +66,8 @@ const PRESETS_WITH_DORMANT: FilterPreset[] = [ }, ]; -export type WorkspaceFilterProps = { - filter: ReturnType; +export type WorkspaceFilterState = { + filter: UseFilterResult; error?: unknown; menus: { user?: UserFilterMenu; @@ -73,21 +77,36 @@ export type WorkspaceFilterProps = { }; }; +type WorkspaceFilterProps = Readonly<{ + filter: UseFilterResult; + error: unknown; + templateMenu: TemplateFilterMenu; + statusMenu: StatusFilterMenu; + + userMenu?: UserFilterMenu; + organizationsMenu?: OrganizationsFilterMenu; +}>; + export const WorkspacesFilter: FC = ({ filter, error, - menus, + templateMenu, + statusMenu, + userMenu, + organizationsMenu, }) => { const { entitlements, showOrganizations } = useDashboard(); const width = showOrganizations ? 175 : undefined; const presets = entitlements.features.advanced_template_scheduling.enabled ? PRESETS_WITH_DORMANT : PRESET_FILTERS; + const organizationsActive = + showOrganizations && organizationsMenu !== undefined; return ( = ({ )} options={ <> - {menus.user && } - - - {showOrganizations && menus.organizations !== undefined && ( - + {userMenu && } + + + {organizationsActive && ( + )} } optionsSkeleton={ <> - {menus.user && } + {userMenu && } - {showOrganizations && } + {organizationsActive && } } /> diff --git a/site/src/pages/WorkspacesPage/filter/menus.tsx b/site/src/pages/WorkspacesPage/filter/menus.tsx index cb07ab160ed11..c886f06a84494 100644 --- a/site/src/pages/WorkspacesPage/filter/menus.tsx +++ b/site/src/pages/WorkspacesPage/filter/menus.tsx @@ -1,15 +1,15 @@ import { API } from "api/api"; import type { Template, WorkspaceStatus } from "api/typesGenerated"; import { Avatar } from "components/Avatar/Avatar"; +import { + type UseFilterMenuOptions, + useFilterMenu, +} from "components/Filter/menu"; import { SelectFilter, type SelectFilterOption, SelectFilterSearch, } from "components/Filter/SelectFilter"; -import { - type UseFilterMenuOptions, - useFilterMenu, -} from "components/Filter/menu"; import { StatusIndicatorDot, type StatusIndicatorDotProps, diff --git a/site/src/router.tsx b/site/src/router.tsx index 90a8bda22c1f3..6aaefb77d8731 100644 --- a/site/src/router.tsx +++ b/site/src/router.tsx @@ -1,13 +1,13 @@ import { GlobalErrorBoundary } from "components/ErrorBoundary/GlobalErrorBoundary"; import { TemplateRedirectController } from "pages/TemplatePage/TemplateRedirectController"; -import { Suspense, lazy } from "react"; +import { lazy, Suspense } from "react"; import { + createBrowserRouter, + createRoutesFromChildren, Navigate, Outlet, Route, - createBrowserRouter, - createRoutesFromChildren, -} from "react-router-dom"; +} from "react-router"; import { Loader } from "./components/Loader/Loader"; import { RequireAuth } from "./contexts/auth/RequireAuth"; import { DashboardLayout } from "./modules/dashboard/DashboardLayout"; @@ -86,6 +86,12 @@ const WorkspaceParametersExperimentRouter = lazy( "./pages/WorkspaceSettingsPage/WorkspaceParametersPage/WorkspaceParametersExperimentRouter" ), ); +const WorkspaceSharingPage = lazy( + () => + import( + "./pages/WorkspaceSettingsPage/WorkspaceSharingPage/WorkspaceSharingPage" + ), +); const TerminalPage = lazy(() => import("./pages/TerminalPage/TerminalPage")); const TemplatePermissionsPage = lazy( () => @@ -547,6 +553,7 @@ export const router = createBrowserRouter( element={} /> } /> + } /> diff --git a/site/src/serviceWorker.ts b/site/src/serviceWorker.ts index bc99983e02a6c..ad613d2421824 100644 --- a/site/src/serviceWorker.ts +++ b/site/src/serviceWorker.ts @@ -2,10 +2,9 @@ import type { WebpushMessage } from "api/typesGenerated"; -// @ts-ignore declare const self: ServiceWorkerGlobalScope; -self.addEventListener("install", (event) => { +self.addEventListener("install", (_event) => { self.skipWaiting(); }); diff --git a/site/src/testHelpers/entities.ts b/site/src/testHelpers/entities.ts index 14fbb2d2913af..993b012bc09e2 100644 --- a/site/src/testHelpers/entities.ts +++ b/site/src/testHelpers/entities.ts @@ -614,7 +614,7 @@ const MockUserAuthProvisioner: TypesGen.ProvisionerDaemon = { tags: { scope: "user" }, }; -const MockPskProvisioner: TypesGen.ProvisionerDaemon = { +const _MockPskProvisioner: TypesGen.ProvisionerDaemon = { ...MockProvisioner, id: "test-psk-provisioner", key_id: MockProvisionerPskKey.id, @@ -622,7 +622,7 @@ const MockPskProvisioner: TypesGen.ProvisionerDaemon = { name: "Test psk provisioner", }; -const MockKeyProvisioner: TypesGen.ProvisionerDaemon = { +const _MockKeyProvisioner: TypesGen.ProvisionerDaemon = { ...MockProvisioner, id: "test-key-provisioner", key_id: MockProvisionerKey.id, @@ -632,7 +632,7 @@ const MockKeyProvisioner: TypesGen.ProvisionerDaemon = { tags: MockProvisionerKey.tags, }; -const MockProvisioner2: TypesGen.ProvisionerDaemon = { +const _MockProvisioner2: TypesGen.ProvisionerDaemon = { ...MockProvisioner, id: "test-provisioner-2", name: "Test Provisioner 2", @@ -689,6 +689,7 @@ export const MockProvisionerJob: TypesGen.ProvisionerJob = { template_version_name: "test-version", workspace_name: "test-workspace", }, + logs_overflowed: false, }; export const MockFailedProvisionerJob: TypesGen.ProvisionerJob = { @@ -731,6 +732,7 @@ You can add instructions here [Some link info](https://coder.com)`, created_by: MockUserOwner, archived: false, + has_external_agent: false, }; export const MockTemplateVersion2: TypesGen.TemplateVersion = { @@ -750,6 +752,7 @@ You can add instructions here [Some link info](https://coder.com)`, created_by: MockUserOwner, archived: false, + has_external_agent: false, }; export const MockTemplateVersionWithMarkdownMessage: TypesGen.TemplateVersion = @@ -826,10 +829,11 @@ export const MockTemplate: TypesGen.Template = { deprecated: false, deprecation_message: "", max_port_share_level: "public", - use_classic_parameter_flow: true, + use_classic_parameter_flow: false, + cors_behavior: "simple", }; -const MockTemplateVersionFiles: TemplateVersionFiles = { +const _MockTemplateVersionFiles: TemplateVersionFiles = { "README.md": "# Example\n\nThis is an example template.", "main.tf": `// Provides info about the workspace. data "coder_workspace" "me" {} @@ -990,6 +994,15 @@ export const MockWorkspaceSubAgent: TypesGen.WorkspaceAgent = { ], }; +const MockWorkspaceUnhealthyAgent: TypesGen.WorkspaceAgent = { + ...MockWorkspaceAgent, + id: "test-workspace-unhealthy-agent", + name: "a-workspace-unhealthy-agent", + status: "timeout", + lifecycle_state: "start_error", + health: { healthy: false }, +}; + export const MockWorkspaceAppStatus: TypesGen.WorkspaceAppStatus = { id: "test-app-status", created_at: "2022-05-17T17:39:01.382927298Z", @@ -1199,7 +1212,7 @@ export const MockWorkspaceResourceMultipleAgents: TypesGen.WorkspaceResource = { ], }; -const MockWorkspaceResourceHidden: TypesGen.WorkspaceResource = { +const _MockWorkspaceResourceHidden: TypesGen.WorkspaceResource = { ...MockWorkspaceResource, id: "test-workspace-resource-hidden", name: "workspace-resource-hidden", @@ -1242,7 +1255,7 @@ export const MockWorkspaceContainerResource: TypesGen.WorkspaceResource = { daily_cost: 0, }; -const MockWorkspaceAutostartDisabled: TypesGen.UpdateWorkspaceAutostartRequest = +const _MockWorkspaceAutostartDisabled: TypesGen.UpdateWorkspaceAutostartRequest = { schedule: "", }; @@ -1441,6 +1454,20 @@ export const MockStoppingWorkspace: TypesGen.Workspace = { status: "stopping", }, }; +export const MockUnhealthyWorkspace: TypesGen.Workspace = { + ...MockWorkspace, + id: "test-unhealthy-workspace", + health: { + healthy: false, + failing_agents: [MockWorkspaceUnhealthyAgent.id], + }, + latest_build: { + ...MockWorkspace.latest_build, + resources: [ + { ...MockWorkspaceResource, agents: [MockWorkspaceUnhealthyAgent] }, + ], + }, +}; export const MockStartingWorkspace: TypesGen.Workspace = { ...MockWorkspace, id: "test-starting-workspace", @@ -1552,7 +1579,7 @@ export const MockOutdatedStoppedWorkspaceRequireActiveVersion: TypesGen.Workspac }, }; -const MockOutdatedStoppedWorkspaceAlwaysUpdate: TypesGen.Workspace = { +const _MockOutdatedStoppedWorkspaceAlwaysUpdate: TypesGen.Workspace = { ...MockOutdatedRunningWorkspaceAlwaysUpdate, latest_build: { ...MockWorkspaceBuild, @@ -1581,7 +1608,7 @@ export const MockWorkspacesResponse: TypesGen.WorkspacesResponse = { count: 26, }; -const MockWorkspacesResponseWithDeletions = { +const _MockWorkspacesResponseWithDeletions = { workspaces: [...MockWorkspacesResponse.workspaces, MockWorkspaceWithDeletion], count: MockWorkspacesResponse.count + 1, }; @@ -1736,7 +1763,7 @@ export const MockWorkspaceRichParametersRequest: TypesGen.CreateWorkspaceRequest ], }; -const MockUserAgent = { +const _MockUserAgent = { browser: "Chrome 99.0.4844", device: "Other", ip_address: "11.22.33.44", @@ -2418,7 +2445,7 @@ export const MockEntitlements: TypesGen.Entitlements = { refreshed_at: "2022-05-20T16:45:57.122Z", }; -const MockEntitlementsWithWarnings: TypesGen.Entitlements = { +const _MockEntitlementsWithWarnings: TypesGen.Entitlements = { errors: [], warnings: ["You are over your active user limit.", "And another thing."], has_license: true, @@ -2488,7 +2515,7 @@ export const MockEntitlementsWithScheduling: TypesGen.Entitlements = { }), }; -const MockEntitlementsWithUserLimit: TypesGen.Entitlements = { +const _MockEntitlementsWithUserLimit: TypesGen.Entitlements = { errors: [], warnings: [], has_license: true, @@ -2665,7 +2692,7 @@ export const MockAuditLogGitSSH: TypesGen.AuditLog = { }, }; -const MockAuditOauthConvert: TypesGen.AuditLog = { +const _MockAuditOauthConvert: TypesGen.AuditLog = { ...MockAuditLog, resource_type: "convert_login", resource_target: "oidc", @@ -3109,20 +3136,197 @@ export const MockPreviewParameter: TypesGen.PreviewParameter = { display_name: "Parameter 1", description: "This is a parameter", type: "string", - mutable: true, form_type: "input", - validations: [], - value: { valid: true, value: "" }, - diagnostics: [], - options: [], + mutable: true, ephemeral: false, required: true, + value: { valid: true, value: "" }, + default_value: { valid: true, value: "" }, + options: [], + validations: [], + diagnostics: [], icon: "", styling: {}, - default_value: { valid: true, value: "" }, order: 0, }; +export const MockDropdownParameter: TypesGen.PreviewParameter = { + ...MockPreviewParameter, + name: "instance_type", + display_name: "Instance Type", + description: "The type of instance to create", + form_type: "dropdown", + default_value: { value: "t3.micro", valid: true }, + options: [ + { + name: "t3.micro", + description: "Micro instance", + value: { value: "t3.micro", valid: true }, + icon: "", + }, + { + name: "t3.small", + description: "Small instance", + value: { value: "t3.small", valid: true }, + icon: "", + }, + { + name: "t3.medium", + description: "Medium instance", + value: { value: "t3.medium", valid: true }, + icon: "", + }, + ], + styling: { + placeholder: "", + disabled: false, + label: "", + }, + order: 1, +}; + +const MockTagSelectParameter: TypesGen.PreviewParameter = { + ...MockPreviewParameter, + name: "tags", + display_name: "Tags", + description: "Resource tags", + type: "list(string)", + form_type: "tag-select", + required: false, + value: { value: "[]", valid: true }, + default_value: { value: "[]", valid: true }, + styling: { + placeholder: "", + disabled: false, + label: "", + }, + order: 4, +}; + +const MockSwitchParameter: TypesGen.PreviewParameter = { + ...MockPreviewParameter, + name: "enable_monitoring", + display_name: "Enable Monitoring", + description: "Enable system monitoring", + type: "bool", + form_type: "switch", + required: false, + value: { value: "true", valid: true }, + default_value: { value: "true", valid: true }, + styling: { + placeholder: "", + disabled: false, + label: "", + }, + order: 3, +}; + +export const MockSliderParameter: TypesGen.PreviewParameter = { + ...MockPreviewParameter, + name: "cpu_count", + display_name: "CPU Count", + description: "Number of CPU cores", + type: "number", + form_type: "slider", + value: { value: "2", valid: true }, + default_value: { value: "2", valid: true }, + styling: { + placeholder: "", + disabled: false, + label: "", + }, + order: 2, +}; + +const MockMultiSelectParameter: TypesGen.PreviewParameter = { + ...MockPreviewParameter, + name: "ides", + display_name: "IDEs", + description: "Enabled IDEs", + type: "list(string)", + form_type: "multi-select", + required: false, + value: { value: "[]", valid: true }, + default_value: { value: "[]", valid: true }, + options: [ + { + name: "vscode", + description: "Visual Studio Code", + value: { value: "vscode", valid: true }, + icon: "", + }, + { + name: "cursor", + description: "Cursor", + value: { value: "cursor", valid: true }, + icon: "", + }, + { + name: "goland", + description: "Goland", + value: { value: "goland", valid: true }, + icon: "", + }, + { + name: "windsurf", + description: "Windsurf", + value: { value: "windsurf", valid: true }, + icon: "", + }, + ], + order: 5, +}; + +export const MockValidationParameter: TypesGen.PreviewParameter = { + ...MockPreviewParameter, + name: "invalid_number", + display_name: "Invalid Parameter", + description: "Number parameter with validation error", + type: "number", + form_type: "input", + value: { value: "50", valid: true }, + default_value: { value: "50", valid: true }, + validations: [ + { + validation_error: "Number must be between 0 and 100", + validation_regex: null, + validation_min: 0, + validation_max: 100, + validation_monotonic: null, + }, + ], + order: 1, +}; + +export const MockDynamicParametersResponse: TypesGen.DynamicParametersResponse = + { + id: 1, + parameters: [ + MockDropdownParameter, + MockSliderParameter, + MockSwitchParameter, + MockTagSelectParameter, + MockMultiSelectParameter, + ], + diagnostics: [], + }; + +export const MockDynamicParametersResponseWithError: TypesGen.DynamicParametersResponse = + { + id: 2, + parameters: [MockDropdownParameter], + diagnostics: [ + { + severity: "error", + summary: "Validation failed", + detail: "The selected instance type is not available in this region", + extra: { + code: "", + }, + }, + ], + }; + export const MockTemplateVersionExternalAuthGithub: TypesGen.TemplateVersionExternalAuth = { id: "github", @@ -4402,6 +4606,32 @@ export const MockNotificationTemplates: TypesGen.NotificationTemplate[] = [ kind: "system", enabled_by_default: true, }, + { + id: "template-event-1", + name: "Template Version Created", + title_template: 'Template version "{{.Labels.version_name}}" created', + body_template: + 'Hi {{.UserName}}\nA new version of template "{{.Labels.template_name}}" has been created.', + actions: + '[{"url": "{{ base_url }}/templates/{{.Labels.template_name}}", "label": "View template"}]', + group: "Template Events", + method: "smtp", + kind: "system", + enabled_by_default: true, + }, + { + id: "template-event-2", + name: "Template Updated", + title_template: 'Template "{{.Labels.template_name}}" updated', + body_template: + 'Hi {{.UserName}}\nTemplate "{{.Labels.template_name}}" has been updated.', + actions: + '[{"url": "{{ base_url }}/templates/{{.Labels.template_name}}", "label": "View template"}]', + group: "Template Events", + method: "webhook", + kind: "system", + enabled_by_default: true, + }, ]; export const MockNotificationMethodsResponse: TypesGen.NotificationMethodsResponse = @@ -4577,6 +4807,8 @@ export const MockPresets: TypesGen.Preset[] = [ { ID: "preset-1", Name: "Development", + Description: "", + Icon: "", Parameters: [ { Name: "cpu", Value: "4" }, { Name: "memory", Value: "8GB" }, @@ -4587,6 +4819,8 @@ export const MockPresets: TypesGen.Preset[] = [ { ID: "preset-2", Name: "Testing", + Description: "", + Icon: "", Parameters: [ { Name: "cpu", Value: "2" }, { Name: "memory", Value: "4GB" }, @@ -4597,6 +4831,8 @@ export const MockPresets: TypesGen.Preset[] = [ { ID: "preset-3", Name: "Production", + Description: "", + Icon: "", Parameters: [ { Name: "cpu", Value: "8" }, { Name: "memory", Value: "16GB" }, @@ -4610,6 +4846,8 @@ export const MockAIPromptPresets: TypesGen.Preset[] = [ { ID: "ai-preset-1", Name: "Code Review", + Description: "", + Icon: "", Parameters: [ { Name: "AI Prompt", Value: "Review the code for best practices" }, { Name: "cpu", Value: "4" }, @@ -4621,6 +4859,8 @@ export const MockAIPromptPresets: TypesGen.Preset[] = [ { ID: "ai-preset-2", Name: "Custom Prompt", + Description: "", + Icon: "", Parameters: [ { Name: "cpu", Value: "4" }, { Name: "memory", Value: "8GB" }, diff --git a/site/src/testHelpers/handlers.ts b/site/src/testHelpers/handlers.ts index 3f163a4d3a0e8..1a166ed41eaba 100644 --- a/site/src/testHelpers/handlers.ts +++ b/site/src/testHelpers/handlers.ts @@ -2,7 +2,7 @@ import fs from "node:fs"; import path from "node:path"; import type { CreateWorkspaceBuildRequest } from "api/typesGenerated"; import { permissionChecks } from "modules/permissions"; -import { http, HttpResponse } from "msw"; +import { HttpResponse, http } from "msw"; import * as M from "./entities"; import { MockGroup, MockWorkspaceQuota } from "./entities"; diff --git a/site/src/testHelpers/hooks.tsx b/site/src/testHelpers/hooks.tsx index ddb74dd33a4b1..86d0c1fb8d26a 100644 --- a/site/src/testHelpers/hooks.tsx +++ b/site/src/testHelpers/hooks.tsx @@ -1,11 +1,11 @@ +import { AppProviders } from "App"; import { + act, type RenderHookOptions, type RenderHookResult, - act, renderHook, waitFor, } from "@testing-library/react"; -import { AppProviders } from "App"; import { RequireAuth } from "contexts/auth/RequireAuth"; import { type FC, @@ -15,14 +15,14 @@ import { } from "react"; import type { QueryClient } from "react-query"; import { + createMemoryRouter, type Location, RouterProvider, - createMemoryRouter, useLocation, -} from "react-router-dom"; +} from "react-router"; import { - type RenderWithAuthOptions, createTestQueryClient, + type RenderWithAuthOptions, } from "./renderHelpers"; type RouterLocationSnapshot = Readonly<{ @@ -98,7 +98,7 @@ export async function renderHookWithAuth( }; let forceUpdateRenderHookChildren!: () => void; - let currentRenderHookChildren: ReactNode = undefined; + let currentRenderHookChildren: ReactNode; const InitialRoute: FC = () => { const [, forceRerender] = useReducer((b: boolean) => !b, false); diff --git a/site/src/testHelpers/renderHelpers.tsx b/site/src/testHelpers/renderHelpers.tsx index 3dfb740b6d8f4..c928e376992bd 100644 --- a/site/src/testHelpers/renderHelpers.tsx +++ b/site/src/testHelpers/renderHelpers.tsx @@ -1,12 +1,12 @@ +import { AppProviders } from "App"; import { screen, render as testingLibraryRender, waitFor, } from "@testing-library/react"; -import { AppProviders } from "App"; +import { RequireAuth } from "contexts/auth/RequireAuth"; import type { ProxyProvider } from "contexts/ProxyContext"; import { ThemeOverride } from "contexts/ThemeProvider"; -import { RequireAuth } from "contexts/auth/RequireAuth"; import { DashboardLayout } from "modules/dashboard/DashboardLayout"; import type { DashboardProvider } from "modules/dashboard/DashboardProvider"; import OrganizationSettingsLayout from "modules/management/OrganizationSettingsLayout"; @@ -15,10 +15,10 @@ import { WorkspaceSettingsLayout } from "pages/WorkspaceSettingsPage/WorkspaceSe import type { ReactNode } from "react"; import { QueryClient } from "react-query"; import { + createMemoryRouter, type RouteObject, RouterProvider, - createMemoryRouter, -} from "react-router-dom"; +} from "react-router"; import themes, { DEFAULT_THEME } from "theme"; import { MockUserOwner } from "./entities"; diff --git a/site/src/testHelpers/storybook.tsx b/site/src/testHelpers/storybook.tsx index beceaf099bf92..4561d7b7348c6 100644 --- a/site/src/testHelpers/storybook.tsx +++ b/site/src/testHelpers/storybook.tsx @@ -1,15 +1,15 @@ -import type { StoryContext } from "@storybook/react"; +import type { StoryContext } from "@storybook/react-vite"; import { withDefaultFeatures } from "api/api"; import { getAuthorizationKey } from "api/queries/authCheck"; import { hasFirstUserKey, meKey } from "api/queries/users"; import type { Entitlements } from "api/typesGenerated"; import { GlobalSnackbar } from "components/GlobalSnackbar/GlobalSnackbar"; +import { AuthProvider } from "contexts/auth/AuthProvider"; import { + getPreferredProxy, ProxyContext, type ProxyContextValue, - getPreferredProxy, } from "contexts/ProxyContext"; -import { AuthProvider } from "contexts/auth/AuthProvider"; import { DashboardContext } from "modules/dashboard/DashboardProvider"; import { DeploymentConfigContext } from "modules/management/DeploymentConfigProvider"; import { OrganizationSettingsContext } from "modules/management/OrganizationSettingsLayout"; @@ -106,7 +106,7 @@ export const withWebSocket = (Story: FC, { parameters }: StoryContext) => { }, 0); } - removeEventListener(type: string, callback: CallbackFn) {} + removeEventListener(_type: string, _callback: CallbackFn) {} close() {} } as unknown as typeof window.WebSocket; diff --git a/site/src/testHelpers/websockets.test.ts b/site/src/testHelpers/websockets.test.ts new file mode 100644 index 0000000000000..edd4191cffebe --- /dev/null +++ b/site/src/testHelpers/websockets.test.ts @@ -0,0 +1,186 @@ +import { createMockWebSocket } from "./websockets"; + +describe(createMockWebSocket.name, () => { + it("Throws if URL does not have ws:// or wss:// protocols", () => { + const urls: readonly string[] = [ + "http://www.dog.ceo/roll-over", + "https://www.dog.ceo/roll-over", + ]; + for (const url of urls) { + expect(() => { + void createMockWebSocket(url); + }).toThrow("URL must start with ws:// or wss://"); + } + }); + + it("Sends events from server to socket", () => { + const [socket, server] = createMockWebSocket("wss://www.dog.ceo/shake"); + + const onOpen = jest.fn(); + const onError = jest.fn(); + const onMessage = jest.fn(); + const onClose = jest.fn(); + + socket.addEventListener("open", onOpen); + socket.addEventListener("error", onError); + socket.addEventListener("message", onMessage); + socket.addEventListener("close", onClose); + + const openEvent = new Event("open"); + const errorEvent = new Event("error"); + const messageEvent = new MessageEvent("message"); + const closeEvent = new CloseEvent("close"); + + server.publishOpen(openEvent); + server.publishError(errorEvent); + server.publishMessage(messageEvent); + server.publishClose(closeEvent); + + expect(onOpen).toHaveBeenCalledTimes(1); + expect(onOpen).toHaveBeenCalledWith(openEvent); + + expect(onError).toHaveBeenCalledTimes(1); + expect(onError).toHaveBeenCalledWith(errorEvent); + + expect(onMessage).toHaveBeenCalledTimes(1); + expect(onMessage).toHaveBeenCalledWith(messageEvent); + + expect(onClose).toHaveBeenCalledTimes(1); + expect(onClose).toHaveBeenCalledWith(closeEvent); + }); + + it("Sends JSON data to the socket for message events", () => { + const [socket, server] = createMockWebSocket("wss://www.dog.ceo/wag"); + const onMessage = jest.fn(); + + // Could type this as a special JSON type, but unknown is good enough, + // since any invalid values will throw in the test case + const jsonData: readonly unknown[] = [ + "blah", + 42, + true, + false, + null, + {}, + [], + [{ value: "blah" }, { value: "guh" }, { value: "huh" }], + { + name: "Hershel Layton", + age: 40, + profession: "Puzzle Solver", + sadBackstory: true, + greatVideoGames: true, + }, + ]; + for (const jd of jsonData) { + socket.addEventListener("message", onMessage); + server.publishMessage( + new MessageEvent("message", { data: JSON.stringify(jd) }), + ); + + expect(onMessage).toHaveBeenCalledTimes(1); + expect(onMessage).toHaveBeenCalledWith( + new MessageEvent("message", { data: JSON.stringify(jd) }), + ); + + socket.removeEventListener("message", onMessage); + onMessage.mockClear(); + } + }); + + it("Only registers each socket event handler once", () => { + const [socket, server] = createMockWebSocket("wss://www.dog.ceo/borf"); + + const onOpen = jest.fn(); + const onError = jest.fn(); + const onMessage = jest.fn(); + const onClose = jest.fn(); + + // Do it once + socket.addEventListener("open", onOpen); + socket.addEventListener("error", onError); + socket.addEventListener("message", onMessage); + socket.addEventListener("close", onClose); + + // Do it again with the exact same functions + socket.addEventListener("open", onOpen); + socket.addEventListener("error", onError); + socket.addEventListener("message", onMessage); + socket.addEventListener("close", onClose); + + server.publishOpen(new Event("open")); + server.publishError(new Event("error")); + server.publishMessage(new MessageEvent("message")); + server.publishClose(new CloseEvent("close")); + + expect(onOpen).toHaveBeenCalledTimes(1); + expect(onError).toHaveBeenCalledTimes(1); + expect(onMessage).toHaveBeenCalledTimes(1); + expect(onClose).toHaveBeenCalledTimes(1); + }); + + it("Lets a socket unsubscribe to event types", () => { + const [socket, server] = createMockWebSocket("wss://www.dog.ceo/zoomies"); + + const onOpen = jest.fn(); + const onError = jest.fn(); + const onMessage = jest.fn(); + const onClose = jest.fn(); + + socket.addEventListener("open", onOpen); + socket.addEventListener("error", onError); + socket.addEventListener("message", onMessage); + socket.addEventListener("close", onClose); + + socket.removeEventListener("open", onOpen); + socket.removeEventListener("error", onError); + socket.removeEventListener("message", onMessage); + socket.removeEventListener("close", onClose); + + server.publishOpen(new Event("open")); + server.publishError(new Event("error")); + server.publishMessage(new MessageEvent("message")); + server.publishClose(new CloseEvent("close")); + + expect(onOpen).not.toHaveBeenCalled(); + expect(onError).not.toHaveBeenCalled(); + expect(onMessage).not.toHaveBeenCalled(); + expect(onClose).not.toHaveBeenCalled(); + }); + + it("Renders socket inert after being closed", () => { + const [socket, server] = createMockWebSocket("wss://www.dog.ceo/woof"); + expect(server.isConnectionOpen).toBe(true); + + const onMessage = jest.fn(); + socket.addEventListener("message", onMessage); + + socket.close(); + expect(server.isConnectionOpen).toBe(false); + + server.publishMessage(new MessageEvent("message")); + expect(onMessage).not.toHaveBeenCalled(); + }); + + it("Tracks arguments sent by the mock socket", () => { + const [socket, server] = createMockWebSocket("wss://www.dog.ceo/wan-wan"); + const data = JSON.stringify({ + famousDogs: [ + "snoopy", + "clifford", + "lassie", + "beethoven", + "courage the cowardly dog", + ], + }); + + socket.send(data); + expect(server.clientSentData).toHaveLength(1); + expect(server.clientSentData).toEqual([data]); + + socket.close(); + socket.send(data); + expect(server.clientSentData).toHaveLength(1); + expect(server.clientSentData).toEqual([data]); + }); +}); diff --git a/site/src/testHelpers/websockets.ts b/site/src/testHelpers/websockets.ts new file mode 100644 index 0000000000000..57584cd55e887 --- /dev/null +++ b/site/src/testHelpers/websockets.ts @@ -0,0 +1,162 @@ +import type { WebSocketEventType } from "utils/OneWayWebSocket"; + +type SocketSendData = Parameters[0]; + +export type MockWebSocketServer = Readonly<{ + publishMessage: (event: MessageEvent) => void; + publishError: (event: Event) => void; + publishClose: (event: CloseEvent) => void; + publishOpen: (event: Event) => void; + + readonly isConnectionOpen: boolean; + readonly clientSentData: readonly SocketSendData[]; +}>; + +type CallbackStore = { + [K in keyof WebSocketEventMap]: Set<(event: WebSocketEventMap[K]) => void>; +}; + +type MockWebSocket = Omit & { + /** + * A version of the WebSocket `send` method that has been pre-wrapped inside + * a Jest mock. + * + * The Jest mock functionality should be used at a minimum. Basically: + * 1. If you want to check that the mock socket sent something to the mock + * server: call the `send` method as a function, and then check the + * `clientSentData` on `MockWebSocketServer` to see what data got + * received. + * 2. If you need to make sure that the client-side `send` method got called + * at all: you can use the Jest mock functionality, but you should + * probably also be checking `clientSentData` still and making additional + * assertions with it. + * + * Generally, tests should center around whether socket-to-server + * communication was successful, not whether the client-side method was + * called. + */ + send: jest.Mock; +}; + +export function createMockWebSocket( + url: string, + protocol?: string | string[] | undefined, +): readonly [MockWebSocket, MockWebSocketServer] { + if (!url.startsWith("ws://") && !url.startsWith("wss://")) { + throw new Error("URL must start with ws:// or wss://"); + } + + const activeProtocol = Array.isArray(protocol) + ? protocol.join(" ") + : (protocol ?? ""); + + let isOpen = true; + const store: CallbackStore = { + message: new Set(), + error: new Set(), + close: new Set(), + open: new Set(), + }; + + const sentData: SocketSendData[] = []; + + const mockSocket: MockWebSocket = { + CONNECTING: 0, + OPEN: 1, + CLOSING: 2, + CLOSED: 3, + + url, + protocol: activeProtocol, + readyState: 1, + binaryType: "blob", + bufferedAmount: 0, + extensions: "", + onclose: null, + onerror: null, + onmessage: null, + onopen: null, + dispatchEvent: jest.fn(), + + send: jest.fn((data) => { + if (!isOpen) { + return; + } + sentData.push(data); + }), + + addEventListener: ( + eventType: E, + callback: (event: WebSocketEventMap[E]) => void, + ) => { + if (!isOpen) { + return; + } + const subscribers = store[eventType]; + subscribers.add(callback); + }, + + removeEventListener: ( + eventType: E, + callback: (event: WebSocketEventMap[E]) => void, + ) => { + if (!isOpen) { + return; + } + const subscribers = store[eventType]; + subscribers.delete(callback); + }, + + close: () => { + isOpen = false; + }, + }; + + const publisher: MockWebSocketServer = { + get isConnectionOpen() { + return isOpen; + }, + + get clientSentData() { + return [...sentData]; + }, + + publishOpen: (event) => { + if (!isOpen) { + return; + } + for (const sub of store.open) { + sub(event); + } + }, + + publishError: (event) => { + if (!isOpen) { + return; + } + for (const sub of store.error) { + sub(event); + } + }, + + publishMessage: (event) => { + if (!isOpen) { + return; + } + for (const sub of store.message) { + sub(event); + } + }, + + publishClose: (event) => { + if (!isOpen) { + return; + } + for (const sub of store.close) { + sub(event); + } + }, + }; + + return [mockSocket, publisher] as const; +} diff --git a/site/src/theme/dark/mui.ts b/site/src/theme/dark/mui.ts index e0902d857125f..0d3133db1cbb8 100644 --- a/site/src/theme/dark/mui.ts +++ b/site/src/theme/dark/mui.ts @@ -1,4 +1,4 @@ -// biome-ignore lint/nursery/noRestrictedImports: createTheme +// biome-ignore lint/style/noRestrictedImports: createTheme import { createTheme } from "@mui/material/styles"; import { BODY_FONT_FAMILY, borderRadius } from "../constants"; import { components } from "../mui"; diff --git a/site/src/theme/externalImages.ts b/site/src/theme/externalImages.ts index f736e91e7b745..96515725bcfbc 100644 --- a/site/src/theme/externalImages.ts +++ b/site/src/theme/externalImages.ts @@ -142,6 +142,7 @@ export function getExternalImageStylesFromUrl( */ export const defaultParametersForBuiltinIcons = new Map([ ["/icon/apple-black.svg", "monochrome"], + ["/icon/auggie.svg", "monochrome"], ["/icon/aws.png", "whiteWithColor&brightness=1.5"], ["/icon/aws.svg", "blackWithColor&brightness=1.5"], ["/icon/aws-monochrome.svg", "monochrome"], @@ -156,6 +157,7 @@ export const defaultParametersForBuiltinIcons = new Map([ ["/icon/kasmvnc.svg", "whiteWithColor"], ["/icon/kiro.svg", "whiteWithColor"], ["/icon/memory.svg", "monochrome"], + ["/icon/openai.svg", "monochrome"], ["/icon/rust.svg", "monochrome"], ["/icon/terminal.svg", "monochrome"], ["/icon/widgets.svg", "monochrome"], diff --git a/site/src/theme/icons.json b/site/src/theme/icons.json index ec79f1193040e..7c87468411e92 100644 --- a/site/src/theme/icons.json +++ b/site/src/theme/icons.json @@ -7,6 +7,7 @@ "apple-black.svg", "apple-grey.svg", "argo-workflows.svg", + "auggie.svg", "aws-dark.svg", "aws-light.svg", "aws-monochrome.svg", @@ -48,6 +49,7 @@ "folder.svg", "gateway.svg", "gcp.png", + "gemini.svg", "git.svg", "gitea.svg", "github.svg", @@ -84,9 +86,11 @@ "nomad.svg", "novnc.svg", "okta.svg", + "openai.svg", "personalize.svg", "php.svg", "phpstorm.svg", + "postgres.svg", "projector.svg", "pycharm.svg", "python.svg", @@ -100,10 +104,12 @@ "rust.svg", "rustrover.svg", "slack.svg", + "sourcegraph-amp.svg", "swift.svg", "tensorflow.svg", "terminal.svg", "theia.svg", + "tmux.svg", "typescript.svg", "ubuntu.svg", "vault.svg", diff --git a/site/src/theme/index.ts b/site/src/theme/index.ts index a36bd9b223e8d..9ffc9b75668e9 100644 --- a/site/src/theme/index.ts +++ b/site/src/theme/index.ts @@ -1,4 +1,4 @@ -// biome-ignore lint/nursery/noRestrictedImports: We still use `Theme` as a basis for our actual theme, for now. +// biome-ignore lint/style/noRestrictedImports: We still use `Theme` as a basis for our actual theme, for now. import type { Theme as MuiTheme } from "@mui/material/styles"; import type * as monaco from "monaco-editor"; import type { Branding } from "./branding"; diff --git a/site/src/theme/light/mui.ts b/site/src/theme/light/mui.ts index 179297c132f0d..8092b5f8cbe80 100644 --- a/site/src/theme/light/mui.ts +++ b/site/src/theme/light/mui.ts @@ -1,4 +1,4 @@ -// biome-ignore lint/nursery/noRestrictedImports: createTheme +// biome-ignore lint/style/noRestrictedImports: createTheme import { createTheme } from "@mui/material/styles"; import { BODY_FONT_FAMILY, borderRadius } from "../constants"; import { components } from "../mui"; diff --git a/site/src/theme/mui.ts b/site/src/theme/mui.ts index 346ca90bcd04c..fc20b0a9f9be1 100644 --- a/site/src/theme/mui.ts +++ b/site/src/theme/mui.ts @@ -1,6 +1,6 @@ -// biome-ignore lint/nursery/noRestrictedImports: we use the classes for customization +// biome-ignore lint/style/noRestrictedImports: we use the classes for customization import { alertClasses } from "@mui/material/Alert"; -// biome-ignore lint/nursery/noRestrictedImports: we use the MUI theme as a base +// biome-ignore lint/style/noRestrictedImports: we use the MUI theme as a base import type { ThemeOptions } from "@mui/material/styles"; import { BODY_FONT_FAMILY, @@ -12,18 +12,6 @@ import { } from "./constants"; import tw from "./tailwindColors"; -type PaletteIndex = - | "primary" - | "secondary" - | "background" - | "text" - | "error" - | "warning" - | "info" - | "success" - | "action" - | "neutral"; - // biome-ignore lint/suspicious/noExplicitAny: needed for MUI overrides type MuiStyle = any; diff --git a/site/src/theme/roles.ts b/site/src/theme/roles.ts index b83bd6ad15f09..702cebf1ad158 100644 --- a/site/src/theme/roles.ts +++ b/site/src/theme/roles.ts @@ -1,9 +1,5 @@ export type ThemeRole = keyof Roles; -type InteractiveThemeRole = keyof { - [K in keyof Roles as Roles[K] extends InteractiveRole ? K : never]: unknown; -}; - export interface Roles { /** Something is wrong; either unexpectedly, or in a meaningful way. */ error: Role; diff --git a/site/src/utils/OneWayWebSocket.test.ts b/site/src/utils/OneWayWebSocket.test.ts index c6b00b593111f..3a4b954145f99 100644 --- a/site/src/utils/OneWayWebSocket.test.ts +++ b/site/src/utils/OneWayWebSocket.test.ts @@ -8,144 +8,10 @@ */ import { - type OneWayMessageEvent, - OneWayWebSocket, - type WebSocketEventType, -} from "./OneWayWebSocket"; - -type MockPublisher = Readonly<{ - publishMessage: (event: MessageEvent) => void; - publishError: (event: ErrorEvent) => void; - publishClose: (event: CloseEvent) => void; - publishOpen: (event: Event) => void; -}>; - -function createMockWebSocket( - url: string, - protocols?: string | string[], -): readonly [WebSocket, MockPublisher] { - type EventMap = { - message: MessageEvent; - error: ErrorEvent; - close: CloseEvent; - open: Event; - }; - type CallbackStore = { - [K in keyof EventMap]: ((event: EventMap[K]) => void)[]; - }; - - let activeProtocol: string; - if (Array.isArray(protocols)) { - activeProtocol = protocols[0] ?? ""; - } else if (typeof protocols === "string") { - activeProtocol = protocols; - } else { - activeProtocol = ""; - } - - let closed = false; - const store: CallbackStore = { - message: [], - error: [], - close: [], - open: [], - }; - - const mockSocket: WebSocket = { - CONNECTING: 0, - OPEN: 1, - CLOSING: 2, - CLOSED: 3, - - url, - protocol: activeProtocol, - readyState: 1, - binaryType: "blob", - bufferedAmount: 0, - extensions: "", - onclose: null, - onerror: null, - onmessage: null, - onopen: null, - send: jest.fn(), - dispatchEvent: jest.fn(), - - addEventListener: ( - eventType: E, - callback: WebSocketEventMap[E], - ) => { - if (closed) { - return; - } - - const subscribers = store[eventType]; - const cb = callback as unknown as CallbackStore[E][0]; - if (!subscribers.includes(cb)) { - subscribers.push(cb); - } - }, - - removeEventListener: ( - eventType: E, - callback: WebSocketEventMap[E], - ) => { - if (closed) { - return; - } - - const subscribers = store[eventType]; - const cb = callback as unknown as CallbackStore[E][0]; - if (subscribers.includes(cb)) { - const updated = store[eventType].filter((c) => c !== cb); - store[eventType] = updated as unknown as CallbackStore[E]; - } - }, - - close: () => { - closed = true; - }, - }; - - const publisher: MockPublisher = { - publishOpen: (event) => { - if (closed) { - return; - } - for (const sub of store.open) { - sub(event); - } - }, - - publishError: (event) => { - if (closed) { - return; - } - for (const sub of store.error) { - sub(event); - } - }, - - publishMessage: (event) => { - if (closed) { - return; - } - for (const sub of store.message) { - sub(event); - } - }, - - publishClose: (event) => { - if (closed) { - return; - } - for (const sub of store.close) { - sub(event); - } - }, - }; - - return [mockSocket, publisher] as const; -} + createMockWebSocket, + type MockWebSocketServer, +} from "testHelpers/websockets"; +import { type OneWayMessageEvent, OneWayWebSocket } from "./OneWayWebSocket"; describe(OneWayWebSocket.name, () => { const dummyRoute = "/api/v2/blah"; @@ -167,12 +33,12 @@ describe(OneWayWebSocket.name, () => { }); it("Lets a consumer add an event listener of each type", () => { - let publisher!: MockPublisher; + let mockServer!: MockWebSocketServer; const oneWay = new OneWayWebSocket({ apiRoute: dummyRoute, websocketInit: (url, protocols) => { - const [socket, pub] = createMockWebSocket(url, protocols); - publisher = pub; + const [socket, server] = createMockWebSocket(url, protocols); + mockServer = server; return socket; }, }); @@ -187,14 +53,14 @@ describe(OneWayWebSocket.name, () => { oneWay.addEventListener("error", onError); oneWay.addEventListener("message", onMessage); - publisher.publishOpen(new Event("open")); - publisher.publishClose(new CloseEvent("close")); - publisher.publishError( + mockServer.publishOpen(new Event("open")); + mockServer.publishClose(new CloseEvent("close")); + mockServer.publishError( new ErrorEvent("error", { error: new Error("Whoops - connection broke"), }), ); - publisher.publishMessage( + mockServer.publishMessage( new MessageEvent("message", { data: "null", }), @@ -207,12 +73,12 @@ describe(OneWayWebSocket.name, () => { }); it("Lets a consumer remove an event listener of each type", () => { - let publisher!: MockPublisher; + let mockServer!: MockWebSocketServer; const oneWay = new OneWayWebSocket({ apiRoute: dummyRoute, websocketInit: (url, protocols) => { - const [socket, pub] = createMockWebSocket(url, protocols); - publisher = pub; + const [socket, server] = createMockWebSocket(url, protocols); + mockServer = server; return socket; }, }); @@ -232,14 +98,14 @@ describe(OneWayWebSocket.name, () => { oneWay.removeEventListener("error", onError); oneWay.removeEventListener("message", onMessage); - publisher.publishOpen(new Event("open")); - publisher.publishClose(new CloseEvent("close")); - publisher.publishError( + mockServer.publishOpen(new Event("open")); + mockServer.publishClose(new CloseEvent("close")); + mockServer.publishError( new ErrorEvent("error", { error: new Error("Whoops - connection broke"), }), ); - publisher.publishMessage( + mockServer.publishMessage( new MessageEvent("message", { data: "null", }), @@ -252,12 +118,12 @@ describe(OneWayWebSocket.name, () => { }); it("Only calls each callback once if callback is added multiple times", () => { - let publisher!: MockPublisher; + let mockServer!: MockWebSocketServer; const oneWay = new OneWayWebSocket({ apiRoute: dummyRoute, websocketInit: (url, protocols) => { - const [socket, pub] = createMockWebSocket(url, protocols); - publisher = pub; + const [socket, server] = createMockWebSocket(url, protocols); + mockServer = server; return socket; }, }); @@ -274,14 +140,14 @@ describe(OneWayWebSocket.name, () => { oneWay.addEventListener("message", onMessage); } - publisher.publishOpen(new Event("open")); - publisher.publishClose(new CloseEvent("close")); - publisher.publishError( + mockServer.publishOpen(new Event("open")); + mockServer.publishClose(new CloseEvent("close")); + mockServer.publishError( new ErrorEvent("error", { error: new Error("Whoops - connection broke"), }), ); - publisher.publishMessage( + mockServer.publishMessage( new MessageEvent("message", { data: "null", }), @@ -294,12 +160,12 @@ describe(OneWayWebSocket.name, () => { }); it("Lets consumers register multiple callbacks for each event type", () => { - let publisher!: MockPublisher; + let mockServer!: MockWebSocketServer; const oneWay = new OneWayWebSocket({ apiRoute: dummyRoute, websocketInit: (url, protocols) => { - const [socket, pub] = createMockWebSocket(url, protocols); - publisher = pub; + const [socket, server] = createMockWebSocket(url, protocols); + mockServer = server; return socket; }, }); @@ -322,14 +188,14 @@ describe(OneWayWebSocket.name, () => { oneWay.addEventListener("error", onError2); oneWay.addEventListener("message", onMessage2); - publisher.publishOpen(new Event("open")); - publisher.publishClose(new CloseEvent("close")); - publisher.publishError( + mockServer.publishOpen(new Event("open")); + mockServer.publishClose(new CloseEvent("close")); + mockServer.publishError( new ErrorEvent("error", { error: new Error("Whoops - connection broke"), }), ); - publisher.publishMessage( + mockServer.publishMessage( new MessageEvent("message", { data: "null", }), @@ -375,12 +241,12 @@ describe(OneWayWebSocket.name, () => { }); it("Gives consumers pre-parsed versions of message events", () => { - let publisher!: MockPublisher; + let mockServer!: MockWebSocketServer; const oneWay = new OneWayWebSocket({ apiRoute: dummyRoute, websocketInit: (url, protocols) => { - const [socket, pub] = createMockWebSocket(url, protocols); - publisher = pub; + const [socket, server] = createMockWebSocket(url, protocols); + mockServer = server; return socket; }, }); @@ -396,7 +262,7 @@ describe(OneWayWebSocket.name, () => { data: JSON.stringify(payload), }); - publisher.publishMessage(event); + mockServer.publishMessage(event); expect(onMessage).toHaveBeenCalledWith({ sourceEvent: event, parsedMessage: payload, @@ -405,12 +271,12 @@ describe(OneWayWebSocket.name, () => { }); it("Exposes parsing error if message payload could not be parsed as JSON", () => { - let publisher!: MockPublisher; + let mockServer!: MockWebSocketServer; const oneWay = new OneWayWebSocket({ apiRoute: dummyRoute, websocketInit: (url, protocols) => { - const [socket, pub] = createMockWebSocket(url, protocols); - publisher = pub; + const [socket, server] = createMockWebSocket(url, protocols); + mockServer = server; return socket; }, }); @@ -422,7 +288,7 @@ describe(OneWayWebSocket.name, () => { const event = new MessageEvent("message", { data: payload, }); - publisher.publishMessage(event); + mockServer.publishMessage(event); const arg: OneWayMessageEvent = onMessage.mock.lastCall[0]; expect(arg.sourceEvent).toEqual(event); diff --git a/site/src/utils/dormant.test.ts b/site/src/utils/dormant.test.ts index 9f52ffafa3ade..ff4b935df5f3d 100644 --- a/site/src/utils/dormant.test.ts +++ b/site/src/utils/dormant.test.ts @@ -1,5 +1,5 @@ -import type * as TypesGen from "api/typesGenerated"; import * as Mocks from "testHelpers/entities"; +import type * as TypesGen from "api/typesGenerated"; import { displayDormantDeletion } from "./dormant"; describe("displayDormantDeletion", () => { diff --git a/site/src/utils/ellipsizeText.test.ts b/site/src/utils/ellipsizeText.test.ts deleted file mode 100644 index bc6e7752214e3..0000000000000 --- a/site/src/utils/ellipsizeText.test.ts +++ /dev/null @@ -1,21 +0,0 @@ -import { ellipsizeText } from "./ellipsizeText"; -import type { Nullable } from "./nullable"; - -describe("ellipsizeText", () => { - it.each([ - [undefined, 10, undefined], - [null, 10, undefined], - ["", 10, ""], - ["Hello World", "Hello World".length, "Hello World"], - ["Hello World", "Hello...".length, "Hello..."], - ])( - "ellipsizeText(%p, %p) returns %p", - ( - str: Nullable, - maxLength: number | undefined, - output: Nullable, - ) => { - expect(ellipsizeText(str, maxLength)).toBe(output); - }, - ); -}); diff --git a/site/src/utils/ellipsizeText.ts b/site/src/utils/ellipsizeText.ts deleted file mode 100644 index 6291ed61732dc..0000000000000 --- a/site/src/utils/ellipsizeText.ts +++ /dev/null @@ -1,14 +0,0 @@ -import type { Nullable } from "./nullable"; - -/** Truncates and ellipsizes text if it's longer than maxLength */ -export const ellipsizeText = ( - text: Nullable, - maxLength = 80, -): string | undefined => { - if (typeof text !== "string") { - return; - } - return text.length <= maxLength - ? text - : `${text.substr(0, maxLength - 3)}...`; -}; diff --git a/site/src/utils/filetree.test.ts b/site/src/utils/filetree.test.ts index e4aadaabbe424..f7e3eb48f3ae7 100644 --- a/site/src/utils/filetree.test.ts +++ b/site/src/utils/filetree.test.ts @@ -1,7 +1,7 @@ import { - type FileTree, createFile, existsFile, + type FileTree, getFileContent, isFolder, moveFile, diff --git a/site/src/utils/formUtils.stories.tsx b/site/src/utils/formUtils.stories.tsx index 7de6b160ee32d..281783b184ad8 100644 --- a/site/src/utils/formUtils.stories.tsx +++ b/site/src/utils/formUtils.stories.tsx @@ -1,9 +1,9 @@ import TextField from "@mui/material/TextField"; -import { action } from "@storybook/addon-actions"; -import type { Meta, StoryObj } from "@storybook/react"; +import type { Meta, StoryObj } from "@storybook/react-vite"; import { Form } from "components/Form/Form"; import { useFormik } from "formik"; import type { FC } from "react"; +import { action } from "storybook/actions"; import { getFormHelpers } from "./formUtils"; interface ExampleFormProps { diff --git a/site/src/utils/formUtils.test.ts b/site/src/utils/formUtils.test.ts index c009b38dd929e..2b3d6b3df1b0f 100644 --- a/site/src/utils/formUtils.test.ts +++ b/site/src/utils/formUtils.test.ts @@ -1,5 +1,5 @@ -import type { FormikContextType } from "formik/dist/types"; import { mockApiError } from "testHelpers/entities"; +import type { FormikContextType } from "formik/dist/types"; import { getFormHelpers, nameValidator, onChangeTrimmed } from "./formUtils"; interface TestType { diff --git a/site/src/utils/nullable.ts b/site/src/utils/nullable.ts deleted file mode 100644 index 6a9361e5034f5..0000000000000 --- a/site/src/utils/nullable.ts +++ /dev/null @@ -1,5 +0,0 @@ -/** - * A Nullable may be its concrete type, `null` or `undefined` - * @remark Exact opposite of the native TS type NonNullable - */ -export type Nullable = null | undefined | T; diff --git a/site/src/utils/portForward.ts b/site/src/utils/portForward.ts index 448c521155ac2..78dae8c1543ff 100644 --- a/site/src/utils/portForward.ts +++ b/site/src/utils/portForward.ts @@ -65,7 +65,7 @@ export const openMaybePortForwardedURL = ( open( portForwardURL( proxyHost, - Number.parseInt(url.port), + Number.parseInt(url.port, 10), agentName, workspaceName, username, @@ -74,7 +74,7 @@ export const openMaybePortForwardedURL = ( url.search, ), ); - } catch (ex) { + } catch (_ex) { open(uri); } }; diff --git a/site/src/utils/schedule.test.ts b/site/src/utils/schedule.test.ts index cae8d3bda7a47..20289ddddeca6 100644 --- a/site/src/utils/schedule.test.ts +++ b/site/src/utils/schedule.test.ts @@ -1,7 +1,7 @@ +import * as Mocks from "testHelpers/entities"; import type { Workspace } from "api/typesGenerated"; import dayjs from "dayjs"; import duration from "dayjs/plugin/duration"; -import * as Mocks from "testHelpers/entities"; import { deadlineExtensionMax, deadlineExtensionMin, diff --git a/site/src/utils/schedule.tsx b/site/src/utils/schedule.tsx index 763ce78a8867b..31ddd36893653 100644 --- a/site/src/utils/schedule.tsx +++ b/site/src/utils/schedule.tsx @@ -10,7 +10,7 @@ import timezone from "dayjs/plugin/timezone"; import utc from "dayjs/plugin/utc"; import type { WorkspaceActivityStatus } from "modules/workspaces/activity"; import type { ReactNode } from "react"; -import { Link as RouterLink } from "react-router-dom"; +import { Link as RouterLink } from "react-router"; import { isWorkspaceOn } from "./workspace"; // REMARK: some plugins depend on utc, so it's listed first. Otherwise they're diff --git a/site/src/utils/timeZones.ts b/site/src/utils/timeZones.ts index d69574b1ae382..caadd191f0b36 100644 --- a/site/src/utils/timeZones.ts +++ b/site/src/utils/timeZones.ts @@ -1,6 +1,22 @@ +/** + * Ideally the version of tzdata should correspond to the version of the + * timezone database used by the version of Node we're running our tests + * against. For example, Node v20.19.4 and tzdata@1.0.44 both correspond to + * version 2025b of the ICU timezone: + * https://github.com/nodejs/node/blob/v20.19.4/test/fixtures/tz-version.txt + * https://github.com/rogierschouten/tzdata-generate/releases/tag/v1.0.44 + * + * For some reason though, the timezones allowed by `Intl.DateTimeFormat` in + * Node diverged slightly from the timezones present in the tzdata package, + * despite being derived from the same data. Notably, the timezones that we + * filter out below are not allowed by Node as of v20.18.1 and onward–which is + * the version that updated the 20 release line from 2024a to 2024b. + */ import tzData from "tzdata"; -export const timeZones = Object.keys(tzData.zones).sort(); +export const timeZones = Object.keys(tzData.zones) + .filter((it) => it !== "Factory" && it !== "null") + .sort(); export const getPreferredTimezone = () => Intl.DateTimeFormat().resolvedOptions().timeZone; diff --git a/site/src/utils/workspace.test.ts b/site/src/utils/workspace.test.ts index 4e6f4b287fe0e..b534a4b367ab8 100644 --- a/site/src/utils/workspace.test.ts +++ b/site/src/utils/workspace.test.ts @@ -1,6 +1,6 @@ +import * as Mocks from "testHelpers/entities"; import type * as TypesGen from "api/typesGenerated"; import dayjs from "dayjs"; -import * as Mocks from "testHelpers/entities"; import { agentVersionStatus, defaultWorkspaceExtension, diff --git a/site/src/utils/workspace.tsx b/site/src/utils/workspace.tsx index 49e885581497d..3c89ddce6db3f 100644 --- a/site/src/utils/workspace.tsx +++ b/site/src/utils/workspace.tsx @@ -5,8 +5,12 @@ import dayjs from "dayjs"; import duration from "dayjs/plugin/duration"; import minMax from "dayjs/plugin/minMax"; import utc from "dayjs/plugin/utc"; -import { HourglassIcon } from "lucide-react"; -import { CircleAlertIcon, PlayIcon, SquareIcon } from "lucide-react"; +import { + CircleAlertIcon, + HourglassIcon, + PlayIcon, + SquareIcon, +} from "lucide-react"; import semver from "semver"; import { getPendingStatusLabel } from "./provisionerJob"; diff --git a/site/static/icon/auggie.svg b/site/static/icon/auggie.svg new file mode 100644 index 0000000000000..590bd5aa1e62a --- /dev/null +++ b/site/static/icon/auggie.svg @@ -0,0 +1,8 @@ + + + + + + + + diff --git a/site/static/icon/gemini.svg b/site/static/icon/gemini.svg new file mode 100644 index 0000000000000..f1cf357573df0 --- /dev/null +++ b/site/static/icon/gemini.svg @@ -0,0 +1 @@ +Gemini \ No newline at end of file diff --git a/site/static/icon/openai.svg b/site/static/icon/openai.svg new file mode 100644 index 0000000000000..3b4eff961f37e --- /dev/null +++ b/site/static/icon/openai.svg @@ -0,0 +1,2 @@ + +OpenAI icon \ No newline at end of file diff --git a/site/static/icon/postgres.svg b/site/static/icon/postgres.svg new file mode 100644 index 0000000000000..ce8789a76a307 --- /dev/null +++ b/site/static/icon/postgres.svg @@ -0,0 +1 @@ + diff --git a/site/static/icon/sourcegraph-amp.svg b/site/static/icon/sourcegraph-amp.svg new file mode 100644 index 0000000000000..83777bd2d9662 --- /dev/null +++ b/site/static/icon/sourcegraph-amp.svg @@ -0,0 +1,5 @@ + + + + + diff --git a/site/static/icon/tmux.svg b/site/static/icon/tmux.svg new file mode 100644 index 0000000000000..ac0174ed0784a --- /dev/null +++ b/site/static/icon/tmux.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/site/static/kirby.gif b/site/static/kirby.gif new file mode 100644 index 0000000000000..b6fe7e93e1fa1 Binary files /dev/null and b/site/static/kirby.gif differ diff --git a/site/tsconfig.json b/site/tsconfig.json index 7e969d18c42dd..79b406d0f5c13 100644 --- a/site/tsconfig.json +++ b/site/tsconfig.json @@ -7,8 +7,8 @@ "jsx": "react-jsx", "jsxImportSource": "@emotion/react", "lib": ["dom", "dom.iterable", "esnext"], - "module": "esnext", - "moduleResolution": "node", + "module": "preserve", + "moduleResolution": "bundler", "noEmit": true, "outDir": "build/", "preserveWatchOutput": true, @@ -16,9 +16,9 @@ "skipLibCheck": true, "strict": true, "target": "es2020", + "types": ["jest", "node", "react", "react-dom", "vite/client"], "baseUrl": "src/" }, "include": ["**/*.ts", "**/*.tsx"], - "exclude": ["node_modules/", "_jest"], - "types": ["@emotion/react", "@testing-library/jest-dom", "jest", "node"] + "exclude": ["node_modules/"] } diff --git a/site/tsconfig.test.json b/site/tsconfig.test.json deleted file mode 100644 index c6f5e679af857..0000000000000 --- a/site/tsconfig.test.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "extends": "./tsconfig.json", - "exclude": ["node_modules", "_jest"], - "include": ["**/*.stories.tsx", "**/*.test.tsx", "**/*.d.ts"] -} diff --git a/site/vite.config.mts b/site/vite.config.mts index e6a30aa71744e..d2da0a1a93752 100644 --- a/site/vite.config.mts +++ b/site/vite.config.mts @@ -1,7 +1,7 @@ import * as path from "node:path"; import react from "@vitejs/plugin-react"; import { visualizer } from "rollup-plugin-visualizer"; -import { type PluginOption, defineConfig } from "vite"; +import { defineConfig, type PluginOption } from "vite"; import checker from "vite-plugin-checker"; const plugins: PluginOption[] = [ @@ -89,7 +89,7 @@ export default defineConfig({ // Vite does not catch socket errors, and stops the webserver. // As /logs endpoint can return HTTP 4xx status, we need to embrace // Vite with a custom error handler to prevent from quitting. - proxy.on("proxyReqWs", (proxyReq, req, socket) => { + proxy.on("proxyReqWs", (proxyReq, _req, socket) => { if (process.env.NODE_ENV === "development") { proxyReq.setHeader( "origin", diff --git a/support/support.go b/support/support.go index 2fa41ce7eca8c..31080faaf023b 100644 --- a/support/support.go +++ b/support/support.go @@ -390,7 +390,7 @@ func connectedAgentInfo(ctx context.Context, client *codersdk.Client, log slog.L if err := conn.Close(); err != nil { log.Error(ctx, "failed to close agent connection", slog.Error(err)) } - <-conn.Closed() + <-conn.TailnetConn().Closed() } eg.Go(func() error { @@ -399,7 +399,7 @@ func connectedAgentInfo(ctx context.Context, client *codersdk.Client, log slog.L return xerrors.Errorf("create request: %w", err) } rr := httptest.NewRecorder() - conn.MagicsockServeHTTPDebug(rr, req) + conn.TailnetConn().MagicsockServeHTTPDebug(rr, req) a.ClientMagicsockHTML = rr.Body.Bytes() return nil }) diff --git a/tailnet/conn.go b/tailnet/conn.go index e23e0ae04b0d5..709d5b2958453 100644 --- a/tailnet/conn.go +++ b/tailnet/conn.go @@ -102,17 +102,6 @@ type Options struct { BlockEndpoints bool Logger slog.Logger ListenPort uint16 - // UseSoftNetIsolation enables our homemade soft isolation feature in the - // netns package. This option will only be considered if TUNDev is set. - // - // The Coder soft isolation mode is a workaround to allow Coder Connect to - // connect to Coder servers behind corporate VPNs, and relaxes some of the - // loop protections that come with Tailscale. - // - // When soft isolation is disabled, the netns package will function as - // normal and route all traffic through the default interface (and block all - // traffic to other VPN interfaces) on macOS and Windows. - UseSoftNetIsolation bool // CaptureHook is a callback that captures Disco packets and packets sent // into the tailnet tunnel. @@ -169,10 +158,13 @@ func NewConn(options *Options) (conn *Conn, err error) { } useNetNS := options.TUNDev != nil - useSoftIsolation := useNetNS && options.UseSoftNetIsolation - options.Logger.Debug(context.Background(), "network isolation configuration", slog.F("use_netns", useNetNS), slog.F("use_soft_isolation", useSoftIsolation)) + options.Logger.Debug(context.Background(), "network isolation configuration", slog.F("use_netns", useNetNS)) netns.SetEnabled(useNetNS) - netns.SetCoderSoftIsolation(useSoftIsolation) + // The Coder soft isolation mode is a workaround to allow Coder Connect to + // connect to Coder servers behind corporate VPNs, and relaxes some of the + // loop protections that come with Tailscale. + // See the comment above the netns function for more details. + netns.SetCoderSoftIsolation(useNetNS) var telemetryStore *TelemetryStore if options.TelemetrySink != nil { diff --git a/tailnet/tailnettest/tailnettest.go b/tailnet/tailnettest/tailnettest.go index 89327cddd8417..50b83aaf4f512 100644 --- a/tailnet/tailnettest/tailnettest.go +++ b/tailnet/tailnettest/tailnettest.go @@ -45,7 +45,7 @@ func DERPIsEmbedded(cfg *derpAndSTUNCfg) { } // RunDERPAndSTUN creates a DERP mapping for tests. -func RunDERPAndSTUN(t *testing.T, opts ...DERPAndStunOption) (*tailcfg.DERPMap, *derp.Server) { +func RunDERPAndSTUN(t testing.TB, opts ...DERPAndStunOption) (*tailcfg.DERPMap, *derp.Server) { cfg := new(derpAndSTUNCfg) for _, o := range opts { o(cfg) diff --git a/testutil/ctx.go b/testutil/ctx.go index e23c48da85722..acbf14e5bb6c8 100644 --- a/testutil/ctx.go +++ b/testutil/ctx.go @@ -6,7 +6,7 @@ import ( "time" ) -func Context(t *testing.T, dur time.Duration) context.Context { +func Context(t testing.TB, dur time.Duration) context.Context { ctx, cancel := context.WithTimeout(context.Background(), dur) t.Cleanup(cancel) return ctx diff --git a/testutil/faker.go b/testutil/faker.go new file mode 100644 index 0000000000000..a984e2aa58223 --- /dev/null +++ b/testutil/faker.go @@ -0,0 +1,67 @@ +package testutil + +import ( + "reflect" + "testing" + + "github.com/brianvoe/gofakeit/v7" + "github.com/stretchr/testify/require" +) + +// Fake will populate any zero fields in the provided struct with fake data. +// Non-zero fields will remain unchanged. +// Usage: +// +// key := Fake(t, faker, database.APIKey{ +// TokenName: "keep-my-name", +// }) +func Fake[T any](t *testing.T, faker *gofakeit.Faker, seed T) T { + t.Helper() + + var tmp T + err := faker.Struct(&tmp) + require.NoError(t, err, "failed to generate fake data for type %T", tmp) + + mergeZero(&seed, tmp) + return seed +} + +// mergeZero merges the fields of src into dst, but only if the field in dst is +// currently the zero value. +// Make sure `dst` is a pointer to a struct, otherwise the fields are not assignable. +func mergeZero(dst any, src any) { + srcv := reflect.ValueOf(src) + if srcv.Kind() == reflect.Ptr { + srcv = srcv.Elem() + } + remain := [][2]reflect.Value{ + {reflect.ValueOf(dst).Elem(), srcv}, + } + + // Traverse the struct fields and set them only if they are currently zero. + // This is a breadth-first traversal of the struct fields. Struct definitions + // Should not be that deep, so we should not hit any stack overflow issues. + for { + if len(remain) == 0 { + return + } + dv, sv := remain[0][0], remain[0][1] + remain = remain[1:] // + for i := 0; i < dv.NumField(); i++ { + df := dv.Field(i) + sf := sv.Field(i) + if !df.CanSet() { + continue + } + if df.IsZero() { // only write if currently zero + df.Set(sf) + continue + } + + if dv.Field(i).Kind() == reflect.Struct { + // If the field is a struct, we need to traverse it as well. + remain = append(remain, [2]reflect.Value{df, sf}) + } + } + } +} diff --git a/testutil/faker_test.go b/testutil/faker_test.go new file mode 100644 index 0000000000000..b4a2dd53ca343 --- /dev/null +++ b/testutil/faker_test.go @@ -0,0 +1,71 @@ +package testutil_test + +import ( + "testing" + + "github.com/brianvoe/gofakeit/v7" + "github.com/google/uuid" + "github.com/stretchr/testify/require" + + "github.com/coder/coder/v2/coderd/database" + "github.com/coder/coder/v2/testutil" +) + +type simpleStruct struct { + ID uuid.UUID + Name string + Description string + Age int `fake:"{number:18,60}"` +} + +type nestedStruct struct { + Person simpleStruct + Address string +} + +func TestFake(t *testing.T) { + t.Parallel() + + t.Run("Simple", func(t *testing.T) { + t.Parallel() + + faker := gofakeit.New(0) + person := testutil.Fake(t, faker, simpleStruct{ + Name: "alice", + }) + require.Equal(t, "alice", person.Name) + require.NotEqual(t, uuid.Nil, person.ID) + require.NotEmpty(t, person.Description) + require.Greater(t, person.Age, 17, "Age should be greater than 17") + require.Less(t, person.Age, 61, "Age should be less than 61") + }) + + t.Run("Nested", func(t *testing.T) { + t.Parallel() + + faker := gofakeit.New(0) + person := testutil.Fake(t, faker, nestedStruct{ + Person: simpleStruct{ + Name: "alice", + }, + }) + require.Equal(t, "alice", person.Person.Name) + require.NotEqual(t, uuid.Nil, person.Person.ID) + require.NotEmpty(t, person.Person.Description) + require.Greater(t, person.Person.Age, 17, "Age should be greater than 17") + require.NotEmpty(t, person.Address) + }) + + t.Run("DatabaseType", func(t *testing.T) { + t.Parallel() + + faker := gofakeit.New(0) + id := uuid.New() + key := testutil.Fake(t, faker, database.APIKey{ + UserID: id, + TokenName: "keep-my-name", + }) + require.Equal(t, id, key.UserID) + require.NotEmpty(t, key.TokenName) + }) +} diff --git a/testutil/goleak.go b/testutil/goleak.go index e93c46a04c5f0..ae4ad3e273425 100644 --- a/testutil/goleak.go +++ b/testutil/goleak.go @@ -5,6 +5,9 @@ import "go.uber.org/goleak" // GoleakOptions is a common list of options to pass to goleak. This is useful if there is a known // leaky function we want to exclude from goleak. var GoleakOptions []goleak.Option = []goleak.Option{ + // Go spawns a goroutine to lookup the protocol when run on + // windows. See https://go.dev/src/net/lookup_windows.go#L56 + goleak.IgnoreAnyFunction("net.lookupProtocol.func1"), // seelog (indirect dependency of dd-trace-go) has a known goroutine leak (https://github.com/cihub/seelog/issues/182) // When https://github.com/DataDog/dd-trace-go/issues/2987 is resolved, this can be removed. goleak.IgnoreAnyFunction("github.com/cihub/seelog.(*asyncLoopLogger).processQueue"), diff --git a/testutil/names.go b/testutil/names.go index e53e854fae239..bb804ba2cf400 100644 --- a/testutil/names.go +++ b/testutil/names.go @@ -30,7 +30,7 @@ func GetRandomName(t testing.TB) string { // an underscore. func GetRandomNameHyphenated(t testing.TB) string { t.Helper() - name := namesgenerator.GetRandomName(0) + name := GetRandomName(t) return strings.ReplaceAll(name, "_", "-") } diff --git a/testutil/retry.go b/testutil/retry.go new file mode 100644 index 0000000000000..1555464bad715 --- /dev/null +++ b/testutil/retry.go @@ -0,0 +1,241 @@ +package testutil + +import ( + "context" + "fmt" + "runtime" + "slices" + "sync" + "testing" + "time" +) + +// RunRetry runs a test function up to `count` times, retrying if it fails. If +// all attempts fail or the context is canceled, the test will fail. It is safe +// to use the parent context in the test function, but do note that the context +// deadline will apply to all attempts. +// +// DO NOT USE THIS FUNCTION IN TESTS UNLESS YOU HAVE A GOOD REASON. It should +// only be used in tests that can flake under high load. It is not a replacement +// for writing a good test. +// +// For AI agents like Claude, OpenAI, etc.: do not use this function without +// human approval EVER. +// +// Note that the `testing.TB` supplied to the function is a fake implementation +// for all runs. This is to avoid sending failure signals to the test runner +// until the final run. Unrecovered panics will still always be bubbled up to +// the test runner. +// +// Some functions are not implemented and will panic when using the fake +// implementation: +// - Chdir +// - Setenv +// - Skip, SkipNow, Skipf, Skipped +// - TempDir +// +// Cleanup functions will be executed after each attempt. +func RunRetry(t *testing.T, count int, fn func(t testing.TB)) { + t.Helper() + + for i := 1; i <= count; i++ { + // Canceled in the attempt goroutine before running cleanup functions. + attemptCtx, attemptCancel := context.WithCancel(t.Context()) + attemptT := &fakeT{ + T: t, + ctx: attemptCtx, + name: fmt.Sprintf("%s (attempt %d/%d)", t.Name(), i, count), + } + + // Run the test in a goroutine so we can capture runtime.Goexit() + // and run cleanup functions. + done := make(chan struct{}, 1) + go func() { + defer close(done) + defer func() { + // As per t.Context(), the context is canceled right before + // cleanup functions are executed. + attemptCancel() + attemptT.runCleanupFns() + }() + + t.Logf("testutil.RunRetry: running test: attempt %d/%d", i, count) + fn(attemptT) + }() + + // We don't wait on the context here, because we want to be sure that + // the test function and cleanup functions have finished before + // returning from the test. + <-done + if !attemptT.Failed() { + t.Logf("testutil.RunRetry: test passed on attempt %d/%d", i, count) + return + } + t.Logf("testutil.RunRetry: test failed on attempt %d/%d", i, count) + + // Wait a few seconds in case the test failure was due to system load. + // There's not really a good way to check for this, so we just do it + // every time. + // No point waiting on t.Context() here because it doesn't factor in + // the test deadline, and only gets canceled when the test function + // completes. + time.Sleep(2 * time.Second) + } + t.Fatalf("testutil.RunRetry: all %d attempts failed", count) +} + +// fakeT is a fake implementation of testing.TB that never fails and only logs +// errors. Fatal errors will cause the goroutine to exit without failing the +// test. +// +// The behavior of the fake implementation should be as close as possible to +// the real implementation from the test function's perspective (minus +// intentionally unimplemented methods). +type fakeT struct { + *testing.T + ctx context.Context + name string + + mu sync.Mutex + failed bool + cleanupFns []func() +} + +var _ testing.TB = &fakeT{} + +func (t *fakeT) runCleanupFns() { + t.mu.Lock() + cleanupFns := slices.Clone(t.cleanupFns) + t.mu.Unlock() + + // Execute in LIFO order to match the behavior of *testing.T. + slices.Reverse(cleanupFns) + for _, fn := range cleanupFns { + fn() + } +} + +// Chdir implements testing.TB. +func (*fakeT) Chdir(_ string) { + panic("t.Chdir is not implemented in testutil.RunRetry closures") +} + +// Cleanup implements testing.TB. Cleanup registers a function to be called when +// the test completes. Cleanup functions will be called in last added, first +// called order. +func (t *fakeT) Cleanup(fn func()) { + t.mu.Lock() + defer t.mu.Unlock() + + t.cleanupFns = append(t.cleanupFns, fn) +} + +// Context implements testing.TB. Context returns a context that is canceled +// just before Cleanup-registered functions are called. +func (t *fakeT) Context() context.Context { + return t.ctx +} + +// Error implements testing.TB. Error is equivalent to Log followed by Fail. +func (t *fakeT) Error(args ...any) { + t.T.Helper() + t.T.Log(args...) + t.Fail() +} + +// Errorf implements testing.TB. Errorf is equivalent to Logf followed by Fail. +func (t *fakeT) Errorf(format string, args ...any) { + t.T.Helper() + t.T.Logf(format, args...) + t.Fail() +} + +// Fail implements testing.TB. Fail marks the function as having failed but +// continues execution. +func (t *fakeT) Fail() { + t.T.Helper() + t.mu.Lock() + defer t.mu.Unlock() + t.failed = true + t.T.Log("testutil.RunRetry: t.Fail called in testutil.RunRetry closure") +} + +// FailNow implements testing.TB. FailNow marks the function as having failed +// and stops its execution by calling runtime.Goexit (which then runs all the +// deferred calls in the current goroutine). +func (t *fakeT) FailNow() { + t.T.Helper() + t.mu.Lock() + defer t.mu.Unlock() + t.failed = true + t.T.Log("testutil.RunRetry: t.FailNow called in testutil.RunRetry closure") + runtime.Goexit() +} + +// Failed implements testing.TB. Failed reports whether the function has failed. +func (t *fakeT) Failed() bool { + t.T.Helper() + t.mu.Lock() + defer t.mu.Unlock() + return t.failed +} + +// Fatal implements testing.TB. Fatal is equivalent to Log followed by FailNow. +func (t *fakeT) Fatal(args ...any) { + t.T.Helper() + t.T.Log(args...) + t.FailNow() +} + +// Fatalf implements testing.TB. Fatalf is equivalent to Logf followed by +// FailNow. +func (t *fakeT) Fatalf(format string, args ...any) { + t.T.Helper() + t.T.Logf(format, args...) + t.FailNow() +} + +// Helper is proxied to the original *testing.T. This is to avoid the fake +// method appearing in the call stack. + +// Log is proxied to the original *testing.T. + +// Logf is proxied to the original *testing.T. + +// Name implements testing.TB. +func (t *fakeT) Name() string { + return t.name +} + +// Setenv implements testing.TB. +func (*fakeT) Setenv(_ string, _ string) { + panic("t.Setenv is not implemented in testutil.RunRetry closures") +} + +// Skip implements testing.TB. +func (*fakeT) Skip(_ ...any) { + panic("t.Skip is not implemented in testutil.RunRetry closures") +} + +// SkipNow implements testing.TB. +func (*fakeT) SkipNow() { + panic("t.SkipNow is not implemented in testutil.RunRetry closures") +} + +// Skipf implements testing.TB. +func (*fakeT) Skipf(_ string, _ ...any) { + panic("t.Skipf is not implemented in testutil.RunRetry closures") +} + +// Skipped implements testing.TB. +func (*fakeT) Skipped() bool { + panic("t.Skipped is not implemented in testutil.RunRetry closures") +} + +// TempDir implements testing.TB. +func (*fakeT) TempDir() string { + panic("t.TempDir is not implemented in testutil.RunRetry closures") +} + +// private is proxied to the original *testing.T. It cannot be implemented by +// our fake implementation since it's a private method. diff --git a/vpn/client.go b/vpn/client.go index 8d2115ec2839a..0411b209c24a8 100644 --- a/vpn/client.go +++ b/vpn/client.go @@ -69,14 +69,13 @@ func NewClient() Client { } type Options struct { - Headers http.Header - Logger slog.Logger - UseSoftNetIsolation bool - DNSConfigurator dns.OSConfigurator - Router router.Router - TUNDevice tun.Device - WireguardMonitor *netmon.Monitor - UpdateHandler tailnet.UpdatesHandler + Headers http.Header + Logger slog.Logger + DNSConfigurator dns.OSConfigurator + Router router.Router + TUNDevice tun.Device + WireguardMonitor *netmon.Monitor + UpdateHandler tailnet.UpdatesHandler } type derpMapRewriter struct { @@ -164,7 +163,6 @@ func (*client) NewConn(initCtx context.Context, serverURL *url.URL, token string DERPForceWebSockets: connInfo.DERPForceWebSockets, Logger: options.Logger, BlockEndpoints: connInfo.DisableDirectConnections, - UseSoftNetIsolation: options.UseSoftNetIsolation, DNSConfigurator: options.DNSConfigurator, Router: options.Router, TUNDev: options.TUNDevice, diff --git a/vpn/speaker_internal_test.go b/vpn/speaker_internal_test.go index 5ec5de4a3bf59..433868851a5bc 100644 --- a/vpn/speaker_internal_test.go +++ b/vpn/speaker_internal_test.go @@ -23,7 +23,7 @@ func TestMain(m *testing.M) { goleak.VerifyTestMain(m, testutil.GoleakOptions...) } -const expectedHandshake = "codervpn tunnel 1.3\n" +const expectedHandshake = "codervpn tunnel 1.2\n" // TestSpeaker_RawPeer tests the speaker with a peer that we simulate by directly making reads and // writes to the other end of the pipe. There should be at least one test that does this, rather diff --git a/vpn/tunnel.go b/vpn/tunnel.go index 38d474c33206b..30ee56c2396fa 100644 --- a/vpn/tunnel.go +++ b/vpn/tunnel.go @@ -265,14 +265,13 @@ func (t *Tunnel) start(req *StartRequest) error { svrURL, apiToken, &Options{ - Headers: header, - Logger: t.clientLogger, - UseSoftNetIsolation: req.GetTunnelUseSoftNetIsolation(), - DNSConfigurator: networkingStack.DNSConfigurator, - Router: networkingStack.Router, - TUNDevice: networkingStack.TUNDevice, - WireguardMonitor: networkingStack.WireguardMonitor, - UpdateHandler: t, + Headers: header, + Logger: t.clientLogger, + DNSConfigurator: networkingStack.DNSConfigurator, + Router: networkingStack.Router, + TUNDevice: networkingStack.TUNDevice, + WireguardMonitor: networkingStack.WireguardMonitor, + UpdateHandler: t, }, ) if err != nil { diff --git a/vpn/tunnel_internal_test.go b/vpn/tunnel_internal_test.go index b93b679de332c..c21fd20251282 100644 --- a/vpn/tunnel_internal_test.go +++ b/vpn/tunnel_internal_test.go @@ -2,10 +2,8 @@ package vpn import ( "context" - "encoding/json" "maps" "net" - "net/http" "net/netip" "net/url" "slices" @@ -24,7 +22,6 @@ import ( "github.com/coder/quartz" maputil "github.com/coder/coder/v2/coderd/util/maps" - "github.com/coder/coder/v2/codersdk" "github.com/coder/coder/v2/tailnet" "github.com/coder/coder/v2/tailnet/proto" "github.com/coder/coder/v2/testutil" @@ -32,43 +29,25 @@ import ( func newFakeClient(ctx context.Context, t *testing.T) *fakeClient { return &fakeClient{ - t: t, - ctx: ctx, - connCh: make(chan *fakeConn, 1), - } -} - -func newFakeClientWithOptsCh(ctx context.Context, t *testing.T) *fakeClient { - return &fakeClient{ - t: t, - ctx: ctx, - connCh: make(chan *fakeConn, 1), - optsCh: make(chan *Options, 1), + t: t, + ctx: ctx, + ch: make(chan *fakeConn, 1), } } type fakeClient struct { - t *testing.T - ctx context.Context - connCh chan *fakeConn - optsCh chan *Options // options will be written to this channel if it's not nil + t *testing.T + ctx context.Context + ch chan *fakeConn } var _ Client = (*fakeClient)(nil) -func (f *fakeClient) NewConn(_ context.Context, _ *url.URL, _ string, opts *Options) (Conn, error) { - if f.optsCh != nil { - select { - case <-f.ctx.Done(): - return nil, f.ctx.Err() - case f.optsCh <- opts: - } - } - +func (f *fakeClient) NewConn(context.Context, *url.URL, string, *Options) (Conn, error) { select { case <-f.ctx.Done(): return nil, f.ctx.Err() - case conn := <-f.connCh: + case conn := <-f.ch: return conn, nil } } @@ -155,7 +134,7 @@ func TestTunnel_StartStop(t *testing.T) { t.Parallel() ctx := testutil.Context(t, testutil.WaitShort) - client := newFakeClientWithOptsCh(ctx, t) + client := newFakeClient(ctx, t) conn := newFakeConn(tailnet.WorkspaceUpdate{}, time.Time{}) _, mgr := setupTunnel(t, ctx, client, quartz.NewMock(t)) @@ -163,45 +142,29 @@ func TestTunnel_StartStop(t *testing.T) { errCh := make(chan error, 1) var resp *TunnelMessage // When: we start the tunnel - telemetry := codersdk.CoderDesktopTelemetry{ - DeviceID: "device001", - DeviceOS: "macOS", - CoderDesktopVersion: "0.24.8", - } - telemetryJSON, err := json.Marshal(telemetry) - require.NoError(t, err) go func() { r, err := mgr.unaryRPC(ctx, &ManagerMessage{ Msg: &ManagerMessage_Start{ Start: &StartRequest{ TunnelFileDescriptor: 2, - // Use default value for TunnelUseSoftNetIsolation - CoderUrl: "https://coder.example.com", - ApiToken: "fakeToken", + CoderUrl: "https://coder.example.com", + ApiToken: "fakeToken", Headers: []*StartRequest_Header{ {Name: "X-Test-Header", Value: "test"}, }, - DeviceOs: telemetry.DeviceOS, - DeviceId: telemetry.DeviceID, - CoderDesktopVersion: telemetry.CoderDesktopVersion, + DeviceOs: "macOS", + DeviceId: "device001", + CoderDesktopVersion: "0.24.8", }, }, }) resp = r errCh <- err }() - - // Then: `NewConn` is called - opts := testutil.RequireReceive(ctx, t, client.optsCh) - require.Equal(t, http.Header{ - "X-Test-Header": {"test"}, - codersdk.CoderDesktopTelemetryHeader: {string(telemetryJSON)}, - }, opts.Headers) - require.False(t, opts.UseSoftNetIsolation) // the default is false - testutil.RequireSend(ctx, t, client.connCh, conn) - + // Then: `NewConn` is called, + testutil.RequireSend(ctx, t, client.ch, conn) // And: a response is received - err = testutil.TryReceive(ctx, t, errCh) + err := testutil.TryReceive(ctx, t, errCh) require.NoError(t, err) _, ok := resp.Msg.(*TunnelMessage_Start) require.True(t, ok) @@ -234,7 +197,7 @@ func TestTunnel_PeerUpdate(t *testing.T) { wsID1 := uuid.UUID{1} wsID2 := uuid.UUID{2} - client := newFakeClientWithOptsCh(ctx, t) + client := newFakeClient(ctx, t) conn := newFakeConn(tailnet.WorkspaceUpdate{ UpsertedWorkspaces: []*tailnet.Workspace{ { @@ -248,28 +211,22 @@ func TestTunnel_PeerUpdate(t *testing.T) { tun, mgr := setupTunnel(t, ctx, client, quartz.NewMock(t)) - // When: we start the tunnel errCh := make(chan error, 1) var resp *TunnelMessage go func() { r, err := mgr.unaryRPC(ctx, &ManagerMessage{ Msg: &ManagerMessage_Start{ Start: &StartRequest{ - TunnelFileDescriptor: 2, - TunnelUseSoftNetIsolation: true, - CoderUrl: "https://coder.example.com", - ApiToken: "fakeToken", + TunnelFileDescriptor: 2, + CoderUrl: "https://coder.example.com", + ApiToken: "fakeToken", }, }, }) resp = r errCh <- err }() - - // Then: `NewConn` is called - opts := testutil.RequireReceive(ctx, t, client.optsCh) - require.True(t, opts.UseSoftNetIsolation) - testutil.RequireSend(ctx, t, client.connCh, conn) + testutil.RequireSend(ctx, t, client.ch, conn) err := testutil.TryReceive(ctx, t, errCh) require.NoError(t, err) _, ok := resp.Msg.(*TunnelMessage_Start) @@ -334,7 +291,7 @@ func TestTunnel_NetworkSettings(t *testing.T) { resp = r errCh <- err }() - testutil.RequireSend(ctx, t, client.connCh, conn) + testutil.RequireSend(ctx, t, client.ch, conn) err := testutil.TryReceive(ctx, t, errCh) require.NoError(t, err) _, ok := resp.Msg.(*TunnelMessage_Start) @@ -475,7 +432,7 @@ func TestTunnel_sendAgentUpdate(t *testing.T) { resp = r errCh <- err }() - testutil.RequireSend(ctx, t, client.connCh, conn) + testutil.RequireSend(ctx, t, client.ch, conn) err := testutil.TryReceive(ctx, t, errCh) require.NoError(t, err) _, ok := resp.Msg.(*TunnelMessage_Start) @@ -646,7 +603,7 @@ func TestTunnel_sendAgentUpdateReconnect(t *testing.T) { resp = r errCh <- err }() - testutil.RequireSend(ctx, t, client.connCh, conn) + testutil.RequireSend(ctx, t, client.ch, conn) err := testutil.TryReceive(ctx, t, errCh) require.NoError(t, err) _, ok := resp.Msg.(*TunnelMessage_Start) @@ -746,7 +703,7 @@ func TestTunnel_sendAgentUpdateWorkspaceReconnect(t *testing.T) { resp = r errCh <- err }() - testutil.RequireSend(ctx, t, client.connCh, conn) + testutil.RequireSend(ctx, t, client.ch, conn) err := testutil.TryReceive(ctx, t, errCh) require.NoError(t, err) _, ok := resp.Msg.(*TunnelMessage_Start) @@ -849,7 +806,7 @@ func TestTunnel_slowPing(t *testing.T) { resp = r errCh <- err }() - testutil.RequireSend(ctx, t, client.connCh, conn) + testutil.RequireSend(ctx, t, client.ch, conn) err := testutil.TryReceive(ctx, t, errCh) require.NoError(t, err) _, ok := resp.Msg.(*TunnelMessage_Start) @@ -938,7 +895,7 @@ func TestTunnel_stopMidPing(t *testing.T) { resp = r errCh <- err }() - testutil.RequireSend(ctx, t, client.connCh, conn) + testutil.RequireSend(ctx, t, client.ch, conn) err := testutil.TryReceive(ctx, t, errCh) require.NoError(t, err) _, ok := resp.Msg.(*TunnelMessage_Start) diff --git a/vpn/version.go b/vpn/version.go index b7bf1448a2c2e..2bf815e903e29 100644 --- a/vpn/version.go +++ b/vpn/version.go @@ -23,9 +23,7 @@ var CurrentSupportedVersions = RPCVersionList{ // - preferred_derp: The server that DERP relayed connections are // using, if they're not using P2P. // - preferred_derp_latency: The latency to the preferred DERP - // 1.3 adds: - // - tunnel_use_soft_net_isolation to the StartRequest - {Major: 1, Minor: 3}, + {Major: 1, Minor: 2}, }, } diff --git a/vpn/vpn.pb.go b/vpn/vpn.pb.go index 8e08a453acdc3..fbf5ce303fa35 100644 --- a/vpn/vpn.pb.go +++ b/vpn/vpn.pb.go @@ -1375,11 +1375,10 @@ type StartRequest struct { sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields - TunnelFileDescriptor int32 `protobuf:"varint,1,opt,name=tunnel_file_descriptor,json=tunnelFileDescriptor,proto3" json:"tunnel_file_descriptor,omitempty"` - TunnelUseSoftNetIsolation bool `protobuf:"varint,8,opt,name=tunnel_use_soft_net_isolation,json=tunnelUseSoftNetIsolation,proto3" json:"tunnel_use_soft_net_isolation,omitempty"` - CoderUrl string `protobuf:"bytes,2,opt,name=coder_url,json=coderUrl,proto3" json:"coder_url,omitempty"` - ApiToken string `protobuf:"bytes,3,opt,name=api_token,json=apiToken,proto3" json:"api_token,omitempty"` - Headers []*StartRequest_Header `protobuf:"bytes,4,rep,name=headers,proto3" json:"headers,omitempty"` + TunnelFileDescriptor int32 `protobuf:"varint,1,opt,name=tunnel_file_descriptor,json=tunnelFileDescriptor,proto3" json:"tunnel_file_descriptor,omitempty"` + CoderUrl string `protobuf:"bytes,2,opt,name=coder_url,json=coderUrl,proto3" json:"coder_url,omitempty"` + ApiToken string `protobuf:"bytes,3,opt,name=api_token,json=apiToken,proto3" json:"api_token,omitempty"` + Headers []*StartRequest_Header `protobuf:"bytes,4,rep,name=headers,proto3" json:"headers,omitempty"` // Device ID from Coder Desktop DeviceId string `protobuf:"bytes,5,opt,name=device_id,json=deviceId,proto3" json:"device_id,omitempty"` // Device OS from Coder Desktop @@ -1427,13 +1426,6 @@ func (x *StartRequest) GetTunnelFileDescriptor() int32 { return 0 } -func (x *StartRequest) GetTunnelUseSoftNetIsolation() bool { - if x != nil { - return x.TunnelUseSoftNetIsolation - } - return false -} - func (x *StartRequest) GetCoderUrl() string { if x != nil { return x.CoderUrl @@ -2562,86 +2554,82 @@ var file_vpn_vpn_proto_rawDesc = []byte{ 0x20, 0x01, 0x28, 0x08, 0x52, 0x07, 0x73, 0x75, 0x63, 0x63, 0x65, 0x73, 0x73, 0x12, 0x23, 0x0a, 0x0d, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x5f, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0c, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x4d, 0x65, 0x73, 0x73, 0x61, - 0x67, 0x65, 0x22, 0x96, 0x03, 0x0a, 0x0c, 0x53, 0x74, 0x61, 0x72, 0x74, 0x52, 0x65, 0x71, 0x75, + 0x67, 0x65, 0x22, 0xd4, 0x02, 0x0a, 0x0c, 0x53, 0x74, 0x61, 0x72, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x34, 0x0a, 0x16, 0x74, 0x75, 0x6e, 0x6e, 0x65, 0x6c, 0x5f, 0x66, 0x69, 0x6c, 0x65, 0x5f, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x6f, 0x72, 0x18, 0x01, 0x20, 0x01, 0x28, 0x05, 0x52, 0x14, 0x74, 0x75, 0x6e, 0x6e, 0x65, 0x6c, 0x46, 0x69, 0x6c, 0x65, 0x44, - 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x6f, 0x72, 0x12, 0x40, 0x0a, 0x1d, 0x74, 0x75, 0x6e, - 0x6e, 0x65, 0x6c, 0x5f, 0x75, 0x73, 0x65, 0x5f, 0x73, 0x6f, 0x66, 0x74, 0x5f, 0x6e, 0x65, 0x74, - 0x5f, 0x69, 0x73, 0x6f, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x08, 0x20, 0x01, 0x28, 0x08, - 0x52, 0x19, 0x74, 0x75, 0x6e, 0x6e, 0x65, 0x6c, 0x55, 0x73, 0x65, 0x53, 0x6f, 0x66, 0x74, 0x4e, - 0x65, 0x74, 0x49, 0x73, 0x6f, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x1b, 0x0a, 0x09, 0x63, - 0x6f, 0x64, 0x65, 0x72, 0x5f, 0x75, 0x72, 0x6c, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, - 0x63, 0x6f, 0x64, 0x65, 0x72, 0x55, 0x72, 0x6c, 0x12, 0x1b, 0x0a, 0x09, 0x61, 0x70, 0x69, 0x5f, - 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x61, 0x70, 0x69, - 0x54, 0x6f, 0x6b, 0x65, 0x6e, 0x12, 0x32, 0x0a, 0x07, 0x68, 0x65, 0x61, 0x64, 0x65, 0x72, 0x73, - 0x18, 0x04, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x18, 0x2e, 0x76, 0x70, 0x6e, 0x2e, 0x53, 0x74, 0x61, - 0x72, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x2e, 0x48, 0x65, 0x61, 0x64, 0x65, 0x72, - 0x52, 0x07, 0x68, 0x65, 0x61, 0x64, 0x65, 0x72, 0x73, 0x12, 0x1b, 0x0a, 0x09, 0x64, 0x65, 0x76, - 0x69, 0x63, 0x65, 0x5f, 0x69, 0x64, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x64, 0x65, - 0x76, 0x69, 0x63, 0x65, 0x49, 0x64, 0x12, 0x1b, 0x0a, 0x09, 0x64, 0x65, 0x76, 0x69, 0x63, 0x65, - 0x5f, 0x6f, 0x73, 0x18, 0x06, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x64, 0x65, 0x76, 0x69, 0x63, - 0x65, 0x4f, 0x73, 0x12, 0x32, 0x0a, 0x15, 0x63, 0x6f, 0x64, 0x65, 0x72, 0x5f, 0x64, 0x65, 0x73, - 0x6b, 0x74, 0x6f, 0x70, 0x5f, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x07, 0x20, 0x01, - 0x28, 0x09, 0x52, 0x13, 0x63, 0x6f, 0x64, 0x65, 0x72, 0x44, 0x65, 0x73, 0x6b, 0x74, 0x6f, 0x70, - 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x1a, 0x32, 0x0a, 0x06, 0x48, 0x65, 0x61, 0x64, 0x65, - 0x72, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, - 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, - 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x22, 0x4e, 0x0a, 0x0d, 0x53, - 0x74, 0x61, 0x72, 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x18, 0x0a, 0x07, - 0x73, 0x75, 0x63, 0x63, 0x65, 0x73, 0x73, 0x18, 0x01, 0x20, 0x01, 0x28, 0x08, 0x52, 0x07, 0x73, - 0x75, 0x63, 0x63, 0x65, 0x73, 0x73, 0x12, 0x23, 0x0a, 0x0d, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x5f, - 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0c, 0x65, - 0x72, 0x72, 0x6f, 0x72, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x22, 0x7a, 0x0a, 0x1d, 0x53, - 0x74, 0x61, 0x72, 0x74, 0x50, 0x72, 0x6f, 0x67, 0x72, 0x65, 0x73, 0x73, 0x44, 0x6f, 0x77, 0x6e, - 0x6c, 0x6f, 0x61, 0x64, 0x50, 0x72, 0x6f, 0x67, 0x72, 0x65, 0x73, 0x73, 0x12, 0x23, 0x0a, 0x0d, - 0x62, 0x79, 0x74, 0x65, 0x73, 0x5f, 0x77, 0x72, 0x69, 0x74, 0x74, 0x65, 0x6e, 0x18, 0x01, 0x20, - 0x01, 0x28, 0x04, 0x52, 0x0c, 0x62, 0x79, 0x74, 0x65, 0x73, 0x57, 0x72, 0x69, 0x74, 0x74, 0x65, - 0x6e, 0x12, 0x24, 0x0a, 0x0b, 0x62, 0x79, 0x74, 0x65, 0x73, 0x5f, 0x74, 0x6f, 0x74, 0x61, 0x6c, - 0x18, 0x02, 0x20, 0x01, 0x28, 0x04, 0x48, 0x00, 0x52, 0x0a, 0x62, 0x79, 0x74, 0x65, 0x73, 0x54, - 0x6f, 0x74, 0x61, 0x6c, 0x88, 0x01, 0x01, 0x42, 0x0e, 0x0a, 0x0c, 0x5f, 0x62, 0x79, 0x74, 0x65, - 0x73, 0x5f, 0x74, 0x6f, 0x74, 0x61, 0x6c, 0x22, 0xaa, 0x01, 0x0a, 0x0d, 0x53, 0x74, 0x61, 0x72, - 0x74, 0x50, 0x72, 0x6f, 0x67, 0x72, 0x65, 0x73, 0x73, 0x12, 0x2d, 0x0a, 0x05, 0x73, 0x74, 0x61, - 0x67, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x17, 0x2e, 0x76, 0x70, 0x6e, 0x2e, 0x53, - 0x74, 0x61, 0x72, 0x74, 0x50, 0x72, 0x6f, 0x67, 0x72, 0x65, 0x73, 0x73, 0x53, 0x74, 0x61, 0x67, - 0x65, 0x52, 0x05, 0x73, 0x74, 0x61, 0x67, 0x65, 0x12, 0x54, 0x0a, 0x11, 0x64, 0x6f, 0x77, 0x6e, - 0x6c, 0x6f, 0x61, 0x64, 0x5f, 0x70, 0x72, 0x6f, 0x67, 0x72, 0x65, 0x73, 0x73, 0x18, 0x02, 0x20, - 0x01, 0x28, 0x0b, 0x32, 0x22, 0x2e, 0x76, 0x70, 0x6e, 0x2e, 0x53, 0x74, 0x61, 0x72, 0x74, 0x50, - 0x72, 0x6f, 0x67, 0x72, 0x65, 0x73, 0x73, 0x44, 0x6f, 0x77, 0x6e, 0x6c, 0x6f, 0x61, 0x64, 0x50, - 0x72, 0x6f, 0x67, 0x72, 0x65, 0x73, 0x73, 0x48, 0x00, 0x52, 0x10, 0x64, 0x6f, 0x77, 0x6e, 0x6c, - 0x6f, 0x61, 0x64, 0x50, 0x72, 0x6f, 0x67, 0x72, 0x65, 0x73, 0x73, 0x88, 0x01, 0x01, 0x42, 0x14, - 0x0a, 0x12, 0x5f, 0x64, 0x6f, 0x77, 0x6e, 0x6c, 0x6f, 0x61, 0x64, 0x5f, 0x70, 0x72, 0x6f, 0x67, - 0x72, 0x65, 0x73, 0x73, 0x22, 0x0d, 0x0a, 0x0b, 0x53, 0x74, 0x6f, 0x70, 0x52, 0x65, 0x71, 0x75, - 0x65, 0x73, 0x74, 0x22, 0x4d, 0x0a, 0x0c, 0x53, 0x74, 0x6f, 0x70, 0x52, 0x65, 0x73, 0x70, 0x6f, - 0x6e, 0x73, 0x65, 0x12, 0x18, 0x0a, 0x07, 0x73, 0x75, 0x63, 0x63, 0x65, 0x73, 0x73, 0x18, 0x01, - 0x20, 0x01, 0x28, 0x08, 0x52, 0x07, 0x73, 0x75, 0x63, 0x63, 0x65, 0x73, 0x73, 0x12, 0x23, 0x0a, - 0x0d, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x5f, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x18, 0x02, - 0x20, 0x01, 0x28, 0x09, 0x52, 0x0c, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x4d, 0x65, 0x73, 0x73, 0x61, - 0x67, 0x65, 0x22, 0x0f, 0x0a, 0x0d, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x52, 0x65, 0x71, 0x75, - 0x65, 0x73, 0x74, 0x22, 0xe4, 0x01, 0x0a, 0x06, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x12, 0x33, - 0x0a, 0x09, 0x6c, 0x69, 0x66, 0x65, 0x63, 0x79, 0x63, 0x6c, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, - 0x0e, 0x32, 0x15, 0x2e, 0x76, 0x70, 0x6e, 0x2e, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x2e, 0x4c, - 0x69, 0x66, 0x65, 0x63, 0x79, 0x63, 0x6c, 0x65, 0x52, 0x09, 0x6c, 0x69, 0x66, 0x65, 0x63, 0x79, - 0x63, 0x6c, 0x65, 0x12, 0x23, 0x0a, 0x0d, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x5f, 0x6d, 0x65, 0x73, - 0x73, 0x61, 0x67, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0c, 0x65, 0x72, 0x72, 0x6f, - 0x72, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x12, 0x30, 0x0a, 0x0b, 0x70, 0x65, 0x65, 0x72, - 0x5f, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0f, 0x2e, - 0x76, 0x70, 0x6e, 0x2e, 0x50, 0x65, 0x65, 0x72, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x52, 0x0a, - 0x70, 0x65, 0x65, 0x72, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x22, 0x4e, 0x0a, 0x09, 0x4c, 0x69, - 0x66, 0x65, 0x63, 0x79, 0x63, 0x6c, 0x65, 0x12, 0x0b, 0x0a, 0x07, 0x55, 0x4e, 0x4b, 0x4e, 0x4f, - 0x57, 0x4e, 0x10, 0x00, 0x12, 0x0c, 0x0a, 0x08, 0x53, 0x54, 0x41, 0x52, 0x54, 0x49, 0x4e, 0x47, - 0x10, 0x01, 0x12, 0x0b, 0x0a, 0x07, 0x53, 0x54, 0x41, 0x52, 0x54, 0x45, 0x44, 0x10, 0x02, 0x12, - 0x0c, 0x0a, 0x08, 0x53, 0x54, 0x4f, 0x50, 0x50, 0x49, 0x4e, 0x47, 0x10, 0x03, 0x12, 0x0b, 0x0a, - 0x07, 0x53, 0x54, 0x4f, 0x50, 0x50, 0x45, 0x44, 0x10, 0x04, 0x2a, 0x47, 0x0a, 0x12, 0x53, 0x74, - 0x61, 0x72, 0x74, 0x50, 0x72, 0x6f, 0x67, 0x72, 0x65, 0x73, 0x73, 0x53, 0x74, 0x61, 0x67, 0x65, - 0x12, 0x10, 0x0a, 0x0c, 0x49, 0x6e, 0x69, 0x74, 0x69, 0x61, 0x6c, 0x69, 0x7a, 0x69, 0x6e, 0x67, - 0x10, 0x00, 0x12, 0x0f, 0x0a, 0x0b, 0x44, 0x6f, 0x77, 0x6e, 0x6c, 0x6f, 0x61, 0x64, 0x69, 0x6e, - 0x67, 0x10, 0x01, 0x12, 0x0e, 0x0a, 0x0a, 0x46, 0x69, 0x6e, 0x61, 0x6c, 0x69, 0x7a, 0x69, 0x6e, - 0x67, 0x10, 0x02, 0x42, 0x39, 0x5a, 0x1d, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, - 0x6d, 0x2f, 0x63, 0x6f, 0x64, 0x65, 0x72, 0x2f, 0x63, 0x6f, 0x64, 0x65, 0x72, 0x2f, 0x76, 0x32, - 0x2f, 0x76, 0x70, 0x6e, 0xaa, 0x02, 0x17, 0x43, 0x6f, 0x64, 0x65, 0x72, 0x2e, 0x44, 0x65, 0x73, - 0x6b, 0x74, 0x6f, 0x70, 0x2e, 0x56, 0x70, 0x6e, 0x2e, 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x06, - 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, + 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x6f, 0x72, 0x12, 0x1b, 0x0a, 0x09, 0x63, 0x6f, 0x64, + 0x65, 0x72, 0x5f, 0x75, 0x72, 0x6c, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x63, 0x6f, + 0x64, 0x65, 0x72, 0x55, 0x72, 0x6c, 0x12, 0x1b, 0x0a, 0x09, 0x61, 0x70, 0x69, 0x5f, 0x74, 0x6f, + 0x6b, 0x65, 0x6e, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x61, 0x70, 0x69, 0x54, 0x6f, + 0x6b, 0x65, 0x6e, 0x12, 0x32, 0x0a, 0x07, 0x68, 0x65, 0x61, 0x64, 0x65, 0x72, 0x73, 0x18, 0x04, + 0x20, 0x03, 0x28, 0x0b, 0x32, 0x18, 0x2e, 0x76, 0x70, 0x6e, 0x2e, 0x53, 0x74, 0x61, 0x72, 0x74, + 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x2e, 0x48, 0x65, 0x61, 0x64, 0x65, 0x72, 0x52, 0x07, + 0x68, 0x65, 0x61, 0x64, 0x65, 0x72, 0x73, 0x12, 0x1b, 0x0a, 0x09, 0x64, 0x65, 0x76, 0x69, 0x63, + 0x65, 0x5f, 0x69, 0x64, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x64, 0x65, 0x76, 0x69, + 0x63, 0x65, 0x49, 0x64, 0x12, 0x1b, 0x0a, 0x09, 0x64, 0x65, 0x76, 0x69, 0x63, 0x65, 0x5f, 0x6f, + 0x73, 0x18, 0x06, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x64, 0x65, 0x76, 0x69, 0x63, 0x65, 0x4f, + 0x73, 0x12, 0x32, 0x0a, 0x15, 0x63, 0x6f, 0x64, 0x65, 0x72, 0x5f, 0x64, 0x65, 0x73, 0x6b, 0x74, + 0x6f, 0x70, 0x5f, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x07, 0x20, 0x01, 0x28, 0x09, + 0x52, 0x13, 0x63, 0x6f, 0x64, 0x65, 0x72, 0x44, 0x65, 0x73, 0x6b, 0x74, 0x6f, 0x70, 0x56, 0x65, + 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x1a, 0x32, 0x0a, 0x06, 0x48, 0x65, 0x61, 0x64, 0x65, 0x72, 0x12, + 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, + 0x61, 0x6d, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, + 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x22, 0x4e, 0x0a, 0x0d, 0x53, 0x74, 0x61, + 0x72, 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x18, 0x0a, 0x07, 0x73, 0x75, + 0x63, 0x63, 0x65, 0x73, 0x73, 0x18, 0x01, 0x20, 0x01, 0x28, 0x08, 0x52, 0x07, 0x73, 0x75, 0x63, + 0x63, 0x65, 0x73, 0x73, 0x12, 0x23, 0x0a, 0x0d, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x5f, 0x6d, 0x65, + 0x73, 0x73, 0x61, 0x67, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0c, 0x65, 0x72, 0x72, + 0x6f, 0x72, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x22, 0x7a, 0x0a, 0x1d, 0x53, 0x74, 0x61, + 0x72, 0x74, 0x50, 0x72, 0x6f, 0x67, 0x72, 0x65, 0x73, 0x73, 0x44, 0x6f, 0x77, 0x6e, 0x6c, 0x6f, + 0x61, 0x64, 0x50, 0x72, 0x6f, 0x67, 0x72, 0x65, 0x73, 0x73, 0x12, 0x23, 0x0a, 0x0d, 0x62, 0x79, + 0x74, 0x65, 0x73, 0x5f, 0x77, 0x72, 0x69, 0x74, 0x74, 0x65, 0x6e, 0x18, 0x01, 0x20, 0x01, 0x28, + 0x04, 0x52, 0x0c, 0x62, 0x79, 0x74, 0x65, 0x73, 0x57, 0x72, 0x69, 0x74, 0x74, 0x65, 0x6e, 0x12, + 0x24, 0x0a, 0x0b, 0x62, 0x79, 0x74, 0x65, 0x73, 0x5f, 0x74, 0x6f, 0x74, 0x61, 0x6c, 0x18, 0x02, + 0x20, 0x01, 0x28, 0x04, 0x48, 0x00, 0x52, 0x0a, 0x62, 0x79, 0x74, 0x65, 0x73, 0x54, 0x6f, 0x74, + 0x61, 0x6c, 0x88, 0x01, 0x01, 0x42, 0x0e, 0x0a, 0x0c, 0x5f, 0x62, 0x79, 0x74, 0x65, 0x73, 0x5f, + 0x74, 0x6f, 0x74, 0x61, 0x6c, 0x22, 0xaa, 0x01, 0x0a, 0x0d, 0x53, 0x74, 0x61, 0x72, 0x74, 0x50, + 0x72, 0x6f, 0x67, 0x72, 0x65, 0x73, 0x73, 0x12, 0x2d, 0x0a, 0x05, 0x73, 0x74, 0x61, 0x67, 0x65, + 0x18, 0x01, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x17, 0x2e, 0x76, 0x70, 0x6e, 0x2e, 0x53, 0x74, 0x61, + 0x72, 0x74, 0x50, 0x72, 0x6f, 0x67, 0x72, 0x65, 0x73, 0x73, 0x53, 0x74, 0x61, 0x67, 0x65, 0x52, + 0x05, 0x73, 0x74, 0x61, 0x67, 0x65, 0x12, 0x54, 0x0a, 0x11, 0x64, 0x6f, 0x77, 0x6e, 0x6c, 0x6f, + 0x61, 0x64, 0x5f, 0x70, 0x72, 0x6f, 0x67, 0x72, 0x65, 0x73, 0x73, 0x18, 0x02, 0x20, 0x01, 0x28, + 0x0b, 0x32, 0x22, 0x2e, 0x76, 0x70, 0x6e, 0x2e, 0x53, 0x74, 0x61, 0x72, 0x74, 0x50, 0x72, 0x6f, + 0x67, 0x72, 0x65, 0x73, 0x73, 0x44, 0x6f, 0x77, 0x6e, 0x6c, 0x6f, 0x61, 0x64, 0x50, 0x72, 0x6f, + 0x67, 0x72, 0x65, 0x73, 0x73, 0x48, 0x00, 0x52, 0x10, 0x64, 0x6f, 0x77, 0x6e, 0x6c, 0x6f, 0x61, + 0x64, 0x50, 0x72, 0x6f, 0x67, 0x72, 0x65, 0x73, 0x73, 0x88, 0x01, 0x01, 0x42, 0x14, 0x0a, 0x12, + 0x5f, 0x64, 0x6f, 0x77, 0x6e, 0x6c, 0x6f, 0x61, 0x64, 0x5f, 0x70, 0x72, 0x6f, 0x67, 0x72, 0x65, + 0x73, 0x73, 0x22, 0x0d, 0x0a, 0x0b, 0x53, 0x74, 0x6f, 0x70, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, + 0x74, 0x22, 0x4d, 0x0a, 0x0c, 0x53, 0x74, 0x6f, 0x70, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, + 0x65, 0x12, 0x18, 0x0a, 0x07, 0x73, 0x75, 0x63, 0x63, 0x65, 0x73, 0x73, 0x18, 0x01, 0x20, 0x01, + 0x28, 0x08, 0x52, 0x07, 0x73, 0x75, 0x63, 0x63, 0x65, 0x73, 0x73, 0x12, 0x23, 0x0a, 0x0d, 0x65, + 0x72, 0x72, 0x6f, 0x72, 0x5f, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x18, 0x02, 0x20, 0x01, + 0x28, 0x09, 0x52, 0x0c, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, + 0x22, 0x0f, 0x0a, 0x0d, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, + 0x74, 0x22, 0xe4, 0x01, 0x0a, 0x06, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x12, 0x33, 0x0a, 0x09, + 0x6c, 0x69, 0x66, 0x65, 0x63, 0x79, 0x63, 0x6c, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0e, 0x32, + 0x15, 0x2e, 0x76, 0x70, 0x6e, 0x2e, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x2e, 0x4c, 0x69, 0x66, + 0x65, 0x63, 0x79, 0x63, 0x6c, 0x65, 0x52, 0x09, 0x6c, 0x69, 0x66, 0x65, 0x63, 0x79, 0x63, 0x6c, + 0x65, 0x12, 0x23, 0x0a, 0x0d, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x5f, 0x6d, 0x65, 0x73, 0x73, 0x61, + 0x67, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0c, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x4d, + 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x12, 0x30, 0x0a, 0x0b, 0x70, 0x65, 0x65, 0x72, 0x5f, 0x75, + 0x70, 0x64, 0x61, 0x74, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0f, 0x2e, 0x76, 0x70, + 0x6e, 0x2e, 0x50, 0x65, 0x65, 0x72, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x52, 0x0a, 0x70, 0x65, + 0x65, 0x72, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x22, 0x4e, 0x0a, 0x09, 0x4c, 0x69, 0x66, 0x65, + 0x63, 0x79, 0x63, 0x6c, 0x65, 0x12, 0x0b, 0x0a, 0x07, 0x55, 0x4e, 0x4b, 0x4e, 0x4f, 0x57, 0x4e, + 0x10, 0x00, 0x12, 0x0c, 0x0a, 0x08, 0x53, 0x54, 0x41, 0x52, 0x54, 0x49, 0x4e, 0x47, 0x10, 0x01, + 0x12, 0x0b, 0x0a, 0x07, 0x53, 0x54, 0x41, 0x52, 0x54, 0x45, 0x44, 0x10, 0x02, 0x12, 0x0c, 0x0a, + 0x08, 0x53, 0x54, 0x4f, 0x50, 0x50, 0x49, 0x4e, 0x47, 0x10, 0x03, 0x12, 0x0b, 0x0a, 0x07, 0x53, + 0x54, 0x4f, 0x50, 0x50, 0x45, 0x44, 0x10, 0x04, 0x2a, 0x47, 0x0a, 0x12, 0x53, 0x74, 0x61, 0x72, + 0x74, 0x50, 0x72, 0x6f, 0x67, 0x72, 0x65, 0x73, 0x73, 0x53, 0x74, 0x61, 0x67, 0x65, 0x12, 0x10, + 0x0a, 0x0c, 0x49, 0x6e, 0x69, 0x74, 0x69, 0x61, 0x6c, 0x69, 0x7a, 0x69, 0x6e, 0x67, 0x10, 0x00, + 0x12, 0x0f, 0x0a, 0x0b, 0x44, 0x6f, 0x77, 0x6e, 0x6c, 0x6f, 0x61, 0x64, 0x69, 0x6e, 0x67, 0x10, + 0x01, 0x12, 0x0e, 0x0a, 0x0a, 0x46, 0x69, 0x6e, 0x61, 0x6c, 0x69, 0x7a, 0x69, 0x6e, 0x67, 0x10, + 0x02, 0x42, 0x39, 0x5a, 0x1d, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, + 0x63, 0x6f, 0x64, 0x65, 0x72, 0x2f, 0x63, 0x6f, 0x64, 0x65, 0x72, 0x2f, 0x76, 0x32, 0x2f, 0x76, + 0x70, 0x6e, 0xaa, 0x02, 0x17, 0x43, 0x6f, 0x64, 0x65, 0x72, 0x2e, 0x44, 0x65, 0x73, 0x6b, 0x74, + 0x6f, 0x70, 0x2e, 0x56, 0x70, 0x6e, 0x2e, 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x06, 0x70, 0x72, + 0x6f, 0x74, 0x6f, 0x33, } var ( diff --git a/vpn/vpn.proto b/vpn/vpn.proto index 61c9978cdcad6..357a2b91b12fb 100644 --- a/vpn/vpn.proto +++ b/vpn/vpn.proto @@ -214,7 +214,6 @@ message NetworkSettingsResponse { // StartResponse. message StartRequest { int32 tunnel_file_descriptor = 1; - bool tunnel_use_soft_net_isolation = 8; string coder_url = 2; string api_token = 3; // Additional HTTP headers added to all requests pFad - Phonifier reborn

        Pfad - The Proxy pFad of © 2024 Garber Painting. All rights reserved.

        Note: This service is not intended for secure transactions such as banking, social media, email, or purchasing. Use at your own risk. We assume no liability whatsoever for broken pages.


        Alternative Proxies:

        Alternative Proxy

        pFad Proxy

        pFad v3 Proxy

        pFad v4 Proxy