Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
; The profile defined in the DEFAULT section is to be used as a fallback when no profile is explicitly specified.
[DEFAULT]

[custom-test]
host = [DATABRICKS_URL]
auth_type = databricks-cli
5 changes: 5 additions & 0 deletions acceptance/cmd/auth/login/custom-config-file/out.test.toml

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

21 changes: 21 additions & 0 deletions acceptance/cmd/auth/login/custom-config-file/output.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@

=== Initial custom config file (with old host)
[custom-test]
host = https://old-host.cloud.databricks.com
auth_type = pat
token = old-token-123

=== Login with new host (should override old host)
>>> [CLI] auth login --host [DATABRICKS_URL] --profile custom-test
Profile custom-test was successfully saved

=== Default config file should NOT exist
OK: Default .databrickscfg does not exist

=== Custom config file after login (old host should be replaced)
; The profile defined in the DEFAULT section is to be used as a fallback when no profile is explicitly specified.
[DEFAULT]

[custom-test]
host = [DATABRICKS_URL]
auth_type = databricks-cli
39 changes: 39 additions & 0 deletions acceptance/cmd/auth/login/custom-config-file/script
Original file line number Diff line number Diff line change
@@ -0,0 +1,39 @@
sethome "./home"

# Use a fake browser that performs a GET on the authorization URL
# and follows the redirect back to localhost.
export BROWSER="browser.py"

# Set custom config file location via env var
export DATABRICKS_CONFIG_FILE="./home/custom.databrickscfg"

# Create an existing custom config file with a DIFFERENT host.
# The login command should use the host from --host argument, NOT from this file.
# If the wrong host (from the config file) were used, the OAuth flow would fail
# because the mock server only responds for $DATABRICKS_HOST.
cat > "./home/custom.databrickscfg" <<EOF
[custom-test]
host = https://old-host.cloud.databricks.com
auth_type = pat
token = old-token-123
EOF

title "Initial custom config file (with old host)\n"
cat "./home/custom.databrickscfg"

title "Login with new host (should override old host)"
trace $CLI auth login --host $DATABRICKS_HOST --profile custom-test

title "Default config file should NOT exist\n"
if [ -f "./home/.databrickscfg" ]; then
echo "ERROR: Default .databrickscfg exists but should not"
cat "./home/.databrickscfg"
else
echo "OK: Default .databrickscfg does not exist"
fi

title "Custom config file after login (old host should be replaced)\n"
cat "./home/custom.databrickscfg"

# Track the custom config file to surface changes
mv "./home/custom.databrickscfg" "./out.databrickscfg"
3 changes: 3 additions & 0 deletions acceptance/cmd/auth/login/custom-config-file/test.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
Ignore = [
"home"
]
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
; The profile defined in the DEFAULT section is to be used as a fallback when no profile is explicitly specified.
[DEFAULT]

[override-test]
host = [DATABRICKS_URL]
auth_type = databricks-cli

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

16 changes: 16 additions & 0 deletions acceptance/cmd/auth/login/host-arg-overrides-profile/output.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@

=== Initial profile with old host
[override-test]
host = https://old-host.cloud.databricks.com

=== Login with positional host argument (should override profile)
>>> [CLI] auth login [DATABRICKS_URL] --profile override-test
Profile override-test was successfully saved

=== Profile after login (host should be updated)
; The profile defined in the DEFAULT section is to be used as a fallback when no profile is explicitly specified.
[DEFAULT]

[override-test]
host = [DATABRICKS_URL]
auth_type = databricks-cli
25 changes: 25 additions & 0 deletions acceptance/cmd/auth/login/host-arg-overrides-profile/script
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
sethome "./home"

# Create an existing profile with a DIFFERENT host
cat > "./home/.databrickscfg" <<EOF
[override-test]
host = https://old-host.cloud.databricks.com
EOF

title "Initial profile with old host\n"
cat "./home/.databrickscfg"

# Use a fake browser that performs a GET on the authorization URL
# and follows the redirect back to localhost.
export BROWSER="browser.py"

# Login with profile but provide a different host as positional argument
# The positional argument should override the profile's host
title "Login with positional host argument (should override profile)"
trace $CLI auth login $DATABRICKS_HOST --profile override-test

title "Profile after login (host should be updated)\n"
cat "./home/.databrickscfg"

# Track the .databrickscfg file that was created to surface changes.
mv "./home/.databrickscfg" "./out.databrickscfg"
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
Ignore = [
"home"
]
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
; The profile defined in the DEFAULT section is to be used as a fallback when no profile is explicitly specified.
[DEFAULT]

[existing-profile]
host = [DATABRICKS_URL]
auth_type = databricks-cli
5 changes: 5 additions & 0 deletions acceptance/cmd/auth/login/host-from-profile/out.test.toml

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

16 changes: 16 additions & 0 deletions acceptance/cmd/auth/login/host-from-profile/output.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@

=== Initial profile
[existing-profile]
host = [DATABRICKS_URL]

=== Login with existing profile (no host argument)
>>> [CLI] auth login --profile existing-profile
Profile existing-profile was successfully saved

=== Profile after login
; The profile defined in the DEFAULT section is to be used as a fallback when no profile is explicitly specified.
[DEFAULT]

[existing-profile]
host = [DATABRICKS_URL]
auth_type = databricks-cli
24 changes: 24 additions & 0 deletions acceptance/cmd/auth/login/host-from-profile/script
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
sethome "./home"

# Create an existing profile with a host
cat > "./home/.databrickscfg" <<EOF
[existing-profile]
host = $DATABRICKS_HOST
EOF

title "Initial profile\n"
cat "./home/.databrickscfg"

# Use a fake browser that performs a GET on the authorization URL
# and follows the redirect back to localhost.
export BROWSER="browser.py"

# Login with just the profile name - should use the host from the profile
title "Login with existing profile (no host argument)"
trace $CLI auth login --profile existing-profile

title "Profile after login\n"
cat "./home/.databrickscfg"

# Track the .databrickscfg file that was created to surface changes.
mv "./home/.databrickscfg" "./out.databrickscfg"
3 changes: 3 additions & 0 deletions acceptance/cmd/auth/login/host-from-profile/test.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
Ignore = [
"home"
]
56 changes: 28 additions & 28 deletions cmd/auth/login.go
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@ import (
"github.com/databricks/cli/libs/exec"
"github.com/databricks/databricks-sdk-go"
"github.com/databricks/databricks-sdk-go/config"
"github.com/databricks/databricks-sdk-go/config/experimental/auth/authconv"
"github.com/databricks/databricks-sdk-go/credentials/u2m"
browserpkg "github.com/pkg/browser"
"github.com/spf13/cobra"
Expand All @@ -44,6 +45,7 @@ func promptForProfile(ctx context.Context, defaultValue string) (string, error)
const (
minimalDbConnectVersion = "13.1"
defaultTimeout = 1 * time.Hour
authTypeDatabricksCLI = "databricks-cli"
)

func newLoginCommand(authArguments *auth.AuthArguments) *cobra.Command {
Expand Down Expand Up @@ -161,42 +163,40 @@ depends on the existing profiles you have set in your configuration file
}
defer persistentAuth.Close()

// We need the config without the profile before it's used to initialise new workspace client below.
// Otherwise it will complain about non existing profile because it was not yet saved.
cfg := config.Config{
Host: authArguments.Host,
AccountID: authArguments.AccountID,
WorkspaceId: authArguments.WorkspaceId,
Experimental_IsUnifiedHost: authArguments.IsUnifiedHost,
AuthType: "databricks-cli",
}
databricksCfgFile := os.Getenv("DATABRICKS_CONFIG_FILE")
if databricksCfgFile != "" {
cfg.ConfigFile = databricksCfgFile
}

ctx, cancel := context.WithTimeout(ctx, loginTimeout)
defer cancel()

if err = persistentAuth.Challenge(); err != nil {
return err
}
// At this point, an OAuth token has been successfully minted and stored
// in the CLI cache. The rest of the command focuses on:
// 1. Configuring cluster and serverless;
// 2. Saving the profile.

var clusterID, serverlessComputeID string
Copy link
Contributor

@renaudhartert-db renaudhartert-db Jan 21, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Do we actually need these variables compared to setting the value in cfg as it was done before? I'm not convinced have these here is a readability improvement.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Do you mean moving cfg out of the switch block and accumulating values in it as before?

We have to either put them in cfg or in these variables. I wanted to move the cfg struct within the switch case to avoid others also getting confused about how the token command depends on the Go SDK, but I don't think thats a problem now since we've removed the databricks-cli auth type and added the comments you suggested. I don't have a strong opinion here but would lean towards keeping the config struct for configuring clusters separate from collecting fields to save to profile.

switch {
case configureCluster:
w, err := databricks.NewWorkspaceClient((*databricks.Config)(&cfg))
// Create a workspace client to list clusters for interactive selection.
// We use a custom CredentialsStrategy that wraps the token we just minted,
// avoiding the need to spawn a child CLI process (which AuthType "databricks-cli" does).
w, err := databricks.NewWorkspaceClient(&databricks.Config{
Host: authArguments.Host,
AccountID: authArguments.AccountID,
WorkspaceId: authArguments.WorkspaceId,
Experimental_IsUnifiedHost: authArguments.IsUnifiedHost,
Credentials: config.NewTokenSourceStrategy("login-token", authconv.AuthTokenSource(persistentAuth)),
})
if err != nil {
return err
}
clusterID, err := cfgpickers.AskForCluster(ctx, w,
clusterID, err = cfgpickers.AskForCluster(ctx, w,
cfgpickers.WithDatabricksConnect(minimalDbConnectVersion))
if err != nil {
return err
}
cfg.ClusterID = clusterID
case configureServerless:
cfg.ClusterID = ""
cfg.ServerlessComputeID = "auto"
serverlessComputeID = "auto"
default:
// Respect the existing profile if it exists, even if it has
// both cluster and serverless configured. Tools relying on
Expand All @@ -206,22 +206,22 @@ depends on the existing profiles you have set in your configuration file
// to clean up the profile under the assumption that serverless
// is the preferred option.
if existingProfile != nil {
cfg.ClusterID = existingProfile.ClusterID
cfg.ServerlessComputeID = existingProfile.ServerlessComputeID
clusterID = existingProfile.ClusterID
serverlessComputeID = existingProfile.ServerlessComputeID
}
}

if profileName != "" {
err = databrickscfg.SaveToProfile(ctx, &config.Config{
err := databrickscfg.SaveToProfile(ctx, &config.Config{
Profile: profileName,
Host: cfg.Host,
AuthType: cfg.AuthType,
AccountID: cfg.AccountID,
Host: authArguments.Host,
AuthType: authTypeDatabricksCLI,
AccountID: authArguments.AccountID,
WorkspaceId: authArguments.WorkspaceId,
Experimental_IsUnifiedHost: authArguments.IsUnifiedHost,
ClusterID: cfg.ClusterID,
ConfigFile: cfg.ConfigFile,
ServerlessComputeID: cfg.ServerlessComputeID,
ClusterID: clusterID,
ConfigFile: os.Getenv("DATABRICKS_CONFIG_FILE"),
ServerlessComputeID: serverlessComputeID,
})
if err != nil {
return err
Expand Down