Skip to content

Commit

Permalink
Bump github.com/databricks/databricks-sdk-go from 0.36.0 to 0.37.0 (#…
Browse files Browse the repository at this point in the history
…1326)

[![Dependabot compatibility
score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=github.com/databricks/databricks-sdk-go&package-manager=go_modules&previous-version=0.36.0&new-version=0.37.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores)

Dependabot will resolve any conflicts with this PR as long as you don't
alter it yourself. You can also trigger a rebase manually by commenting
`@dependabot rebase`.

[//]: # (dependabot-automerge-start)
[//]: # (dependabot-automerge-end)

---

<details>
<summary>Dependabot commands and options</summary>
<br />

You can trigger Dependabot actions by commenting on this PR:
- `@dependabot rebase` will rebase this PR
- `@dependabot recreate` will recreate this PR, overwriting any edits
that have been made to it
- `@dependabot merge` will merge this PR after your CI passes on it
- `@dependabot squash and merge` will squash and merge this PR after
your CI passes on it
- `@dependabot cancel merge` will cancel a previously requested merge
and block automerging
- `@dependabot reopen` will reopen this PR if it is closed
- `@dependabot close` will close this PR and stop Dependabot recreating
it. You can achieve the same result by closing it manually
- `@dependabot show <dependency name> ignore conditions` will show all
of the ignore conditions of the specified dependency
- `@dependabot ignore this major version` will close this PR and stop
Dependabot creating any more for this major version (unless you reopen
the PR or upgrade to it yourself)
- `@dependabot ignore this minor version` will close this PR and stop
Dependabot creating any more for this minor version (unless you reopen
the PR or upgrade to it yourself)
- `@dependabot ignore this dependency` will close this PR and stop
Dependabot creating any more for this dependency (unless you reopen the
PR or upgrade to it yourself)


</details>

---------

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
Co-authored-by: Andrew Nester <andrew.nester@databricks.com>
  • Loading branch information
dependabot[bot] and andrewnester authored Apr 3, 2024
1 parent c1963ec commit f28a9d7
Show file tree
Hide file tree
Showing 17 changed files with 443 additions and 130 deletions.
2 changes: 1 addition & 1 deletion .codegen/_openapi_sha
Original file line number Diff line number Diff line change
@@ -1 +1 @@
93763b0d7ae908520c229c786fff28b8fd623261
e316cc3d78d087522a74650e26586088da9ac8cb
10 changes: 5 additions & 5 deletions bundle/config/mutator/merge_job_clusters_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -23,21 +23,21 @@ func TestMergeJobClusters(t *testing.T) {
JobClusters: []jobs.JobCluster{
{
JobClusterKey: "foo",
NewCluster: &compute.ClusterSpec{
NewCluster: compute.ClusterSpec{
SparkVersion: "13.3.x-scala2.12",
NodeTypeId: "i3.xlarge",
NumWorkers: 2,
},
},
{
JobClusterKey: "bar",
NewCluster: &compute.ClusterSpec{
NewCluster: compute.ClusterSpec{
SparkVersion: "10.4.x-scala2.12",
},
},
{
JobClusterKey: "foo",
NewCluster: &compute.ClusterSpec{
NewCluster: compute.ClusterSpec{
NodeTypeId: "i3.2xlarge",
NumWorkers: 4,
},
Expand Down Expand Up @@ -79,14 +79,14 @@ func TestMergeJobClustersWithNilKey(t *testing.T) {
JobSettings: &jobs.JobSettings{
JobClusters: []jobs.JobCluster{
{
NewCluster: &compute.ClusterSpec{
NewCluster: compute.ClusterSpec{
SparkVersion: "13.3.x-scala2.12",
NodeTypeId: "i3.xlarge",
NumWorkers: 2,
},
},
{
NewCluster: &compute.ClusterSpec{
NewCluster: compute.ClusterSpec{
NodeTypeId: "i3.2xlarge",
NumWorkers: 4,
},
Expand Down
2 changes: 1 addition & 1 deletion bundle/deploy/metadata/annotate_jobs.go
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ func (m *annotateJobs) Apply(_ context.Context, b *bundle.Bundle) diag.Diagnosti
Kind: jobs.JobDeploymentKindBundle,
MetadataFilePath: path.Join(b.Config.Workspace.StatePath, MetadataFileName),
}
job.JobSettings.EditMode = jobs.JobSettingsEditModeUiLocked
job.JobSettings.EditMode = jobs.JobEditModeUiLocked
job.JobSettings.Format = jobs.FormatMultiTask
}

Expand Down
4 changes: 2 additions & 2 deletions bundle/deploy/metadata/annotate_jobs_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ func TestAnnotateJobsMutator(t *testing.T) {
MetadataFilePath: "/a/b/c/metadata.json",
},
b.Config.Resources.Jobs["my-job-1"].JobSettings.Deployment)
assert.Equal(t, jobs.JobSettingsEditModeUiLocked, b.Config.Resources.Jobs["my-job-1"].EditMode)
assert.Equal(t, jobs.JobEditModeUiLocked, b.Config.Resources.Jobs["my-job-1"].EditMode)
assert.Equal(t, jobs.FormatMultiTask, b.Config.Resources.Jobs["my-job-1"].Format)

assert.Equal(t,
Expand All @@ -53,7 +53,7 @@ func TestAnnotateJobsMutator(t *testing.T) {
MetadataFilePath: "/a/b/c/metadata.json",
},
b.Config.Resources.Jobs["my-job-2"].JobSettings.Deployment)
assert.Equal(t, jobs.JobSettingsEditModeUiLocked, b.Config.Resources.Jobs["my-job-2"].EditMode)
assert.Equal(t, jobs.JobEditModeUiLocked, b.Config.Resources.Jobs["my-job-2"].EditMode)
assert.Equal(t, jobs.FormatMultiTask, b.Config.Resources.Jobs["my-job-2"].Format)
}

Expand Down
2 changes: 1 addition & 1 deletion bundle/deploy/terraform/convert_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ func TestBundleToTerraformJob(t *testing.T) {
JobClusters: []jobs.JobCluster{
{
JobClusterKey: "key",
NewCluster: &compute.ClusterSpec{
NewCluster: compute.ClusterSpec{
SparkVersion: "10.4.x-scala2.12",
},
},
Expand Down
2 changes: 1 addition & 1 deletion bundle/deploy/terraform/tfdyn/convert_job_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ func TestConvertJob(t *testing.T) {
JobClusters: []jobs.JobCluster{
{
JobClusterKey: "key",
NewCluster: &compute.ClusterSpec{
NewCluster: compute.ClusterSpec{
SparkVersion: "10.4.x-scala2.12",
},
},
Expand Down
2 changes: 1 addition & 1 deletion bundle/python/warning.go
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@ func hasIncompatibleWheelTasks(ctx context.Context, b *bundle.Bundle) bool {
if task.JobClusterKey != "" {
for _, job := range b.Config.Resources.Jobs {
for _, cluster := range job.JobClusters {
if task.JobClusterKey == cluster.JobClusterKey && cluster.NewCluster != nil {
if task.JobClusterKey == cluster.JobClusterKey && cluster.NewCluster.SparkVersion != "" {
if lowerThanExpectedVersion(ctx, cluster.NewCluster.SparkVersion) {
return true
}
Expand Down
8 changes: 4 additions & 4 deletions bundle/python/warning_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -63,13 +63,13 @@ func TestIncompatibleWheelTasksWithJobClusterKey(t *testing.T) {
JobClusters: []jobs.JobCluster{
{
JobClusterKey: "cluster1",
NewCluster: &compute.ClusterSpec{
NewCluster: compute.ClusterSpec{
SparkVersion: "12.2.x-scala2.12",
},
},
{
JobClusterKey: "cluster2",
NewCluster: &compute.ClusterSpec{
NewCluster: compute.ClusterSpec{
SparkVersion: "13.1.x-scala2.12",
},
},
Expand Down Expand Up @@ -157,13 +157,13 @@ func TestNoIncompatibleWheelTasks(t *testing.T) {
JobClusters: []jobs.JobCluster{
{
JobClusterKey: "cluster1",
NewCluster: &compute.ClusterSpec{
NewCluster: compute.ClusterSpec{
SparkVersion: "12.2.x-scala2.12",
},
},
{
JobClusterKey: "cluster2",
NewCluster: &compute.ClusterSpec{
NewCluster: compute.ClusterSpec{
SparkVersion: "13.1.x-scala2.12",
},
},
Expand Down
Loading

0 comments on commit f28a9d7

Please sign in to comment.