Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@

IMPROVEMENTS:
* data source/nomad_jwks: add EdDSA (Ed25519) key support ([#583](https://github.com/hashicorp/terraform-provider-nomad/pull/583))
* data source/nomad_job_parser: add `variables` parameter to pass HCL2 variables to the job parser ([#582](https://github.com/hashicorp/terraform-provider-nomad/pull/582))
* resource/nomad_acl_policy: make `job_id` optional in `job_acl` block to allow policies that apply to all jobs in a namespace ([#580](https://github.com/hashicorp/terraform-provider-nomad/pull/580))
* resource/nomad_namespace: add `vault_config` and `consul_config` blocks to configure Vault and Consul cluster permissions (Nomad Enterprise only) ([#581](https://github.com/hashicorp/terraform-provider-nomad/pull/581))
* **New Data Source**: `nomad_node` to look up a single Nomad node by ID ([#579](https://github.com/hashicorp/terraform-provider-nomad/pull/579))
Expand Down
20 changes: 17 additions & 3 deletions nomad/data_source_job_parser.go
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ import (
"log"
"strings"

"github.com/hashicorp/nomad/api"
"github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema"
)

Expand All @@ -27,6 +28,13 @@ func dataSourceJobParser() *schema.Resource {
Optional: true,
Default: false,
},
"variables": {
Description: "HCL2 variables to pass to the job parser. Interpreted as the content of a variables file.",
Type: schema.TypeString,
Optional: true,
Default: "",
},

"json": {
Description: "The parsed job as JSON string.",
Type: schema.TypeString,
Expand All @@ -42,9 +50,17 @@ func dataSourceJobParserRead(d *schema.ResourceData, meta interface{}) error {

hcl := d.Get("hcl").(string)
canonicalize := d.Get("canonicalize").(bool)
variables := d.Get("variables").(string)

log.Printf("[DEBUG] Parsing Job with Canonicalize set to %t", canonicalize)
job, err := client.Jobs().ParseHCL(hcl, canonicalize)

req := &api.JobsParseRequest{
JobHCL: hcl,
Canonicalize: canonicalize,
Variables: variables,
}

job, err := client.Jobs().ParseHCLOpts(req)
if err != nil {
return fmt.Errorf("error parsing job: %#v", err)
}
Expand All @@ -57,8 +73,6 @@ func dataSourceJobParserRead(d *schema.ResourceData, meta interface{}) error {
jobJSONString := string(jobJSON)

d.SetId(*job.ID)
d.Set("hcl", strings.TrimSpace(hcl))
d.Set("canonicalize", canonicalize)
d.Set("json", strings.TrimSpace(jobJSONString))

return nil
Expand Down
165 changes: 157 additions & 8 deletions nomad/data_source_job_parser_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,6 @@ import (
"fmt"
"reflect"
"regexp"
"strings"
"testing"

"github.com/hashicorp/nomad/api"
Expand All @@ -26,13 +25,7 @@ func TestAccDataSourceNomadJobParser_Basic(t *testing.T) {
Steps: []resource.TestStep{
{
Config: testJobParserConfig(),
Check: resource.ComposeTestCheckFunc(
resource.TestCheckResourceAttr(
resourceName, "hcl", strings.TrimSpace(testDataSourceJobParserHCL)),
resource.TestCheckResourceAttr(
resourceName, "canonicalize", "false"),
checkJobFromString(resourceName, testDataSourceJobParserJSON),
),
Check: checkJobFromString(resourceName, testDataSourceJobParserJSON),
},
},
})
Expand Down Expand Up @@ -288,3 +281,159 @@ data "nomad_job_parser" "test_job" {
const testDataSourceJobParserMissingHCLConfig = `
data "nomad_job_parser" "test_job" {
}`

func TestAccDataSourceNomadJobParser_WithVariables(t *testing.T) {
resourceName := "data.nomad_job_parser.test_job"

resource.Test(t, resource.TestCase{
PreCheck: func() { testAccPreCheck(t) },
Providers: testProviders,
Steps: []resource.TestStep{
{
Config: testJobParserWithVariablesConfig(),
Check: checkJobFromString(resourceName, testDataSourceJobParserWithVariablesJSON),
},
},
})
}

func testJobParserWithVariablesConfig() string {
return fmt.Sprintf(`
data "nomad_job_parser" "test_job" {
hcl = <<EOT
%s
EOT
variables = <<EOT
%s
EOT
}`, testDataSourceJobParserWithVariablesHCL, testDataSourceJobParserVariables)
}

const testDataSourceJobParserWithVariablesHCL = `
variable "datacenter" {
type = string
}

variable "image" {
type = string
}

job "example" {
datacenters = [var.datacenter]

group "cache" {
task "redis" {
driver = "docker"

config {
image = var.image
}

resources {
cpu = 500
memory = 256
}
}
}
}`

const testDataSourceJobParserVariables = `datacenter = "dc1"
image = "redis:7.0"
`

const testDataSourceJobParserWithVariablesJSON = `{
"Stop": null,
"Region": null,
"Namespace": null,
"ID": "example",
"ParentID": null,
"Name": "example",
"Type": null,
"Priority": null,
"AllAtOnce": null,
"Datacenters": [
"dc1"
],
"Constraints": null,
"Affinities": null,
"TaskGroups": [
{
"Name": "cache",
"Count": null,
"Constraints": null,
"Affinities": null,
"Tasks": [
{
"Name": "redis",
"Driver": "docker",
"User": "",
"Lifecycle": null,
"Config": {
"image": "redis:7.0"
},
"Constraints": null,
"Affinities": null,
"Env": null,
"ScalingPolicies": null,
"Services": null,
"Resources": {
"CPU": 500,
"MemoryMB": 256,
"DiskMB": null,
"Networks": null,
"Devices": null,
"IOPS": null
},
"RestartPolicy": null,
"Meta": null,
"KillTimeout": null,
"LogConfig": null,
"Artifacts": null,
"Vault": null,
"Templates": null,
"DispatchPayload": null,
"VolumeMounts": null,
"Leader": false,
"ShutdownDelay": 0,
"KillSignal": "",
"Kind": ""
}
],
"Spreads": null,
"Volumes": null,
"RestartPolicy": null,
"ReschedulePolicy": null,
"EphemeralDisk": null,
"Update": null,
"Migrate": null,
"Networks": null,
"Meta": null,
"Services": null,
"ShutdownDelay": null,
"StopAfterClientDisconnect": null,
"Scaling": null
}
],
"Update": null,
"Multiregion": null,
"Spreads": null,
"Periodic": null,
"ParameterizedJob": null,
"Dispatched": false,
"Payload": null,
"Reschedule": null,
"Migrate": null,
"Meta": null,
"ConsulToken": null,
"VaultToken": null,
"VaultNamespace": null,
"NomadTokenID": null,
"Status": null,
"StatusDescription": null,
"Stable": null,
"Version": null,
"SubmitTime": null,
"CreateIndex": null,
"ModifyIndex": null,
"JobModifyIndex": null
}`
25 changes: 22 additions & 3 deletions website/docs/d/job_parser.html.markdown
Original file line number Diff line number Diff line change
Expand Up @@ -19,10 +19,29 @@ data "nomad_job_parser" "my_job" {
}
```

### With Variables

```hcl
data "nomad_job_parser" "my_job" {
hcl = file("${path.module}/jobspec.hcl")

variables = <<EOT
datacenter = "dc1"
image = "nginx:latest"
EOT
}
```

## Argument Reference

The following arguments are supported:

- `hcl` `(string: <required>)` - The HCL definition of the job.
- `canonicalize` `(boolean: false)` - Flag to enable setting any unset fields to their default values.
- `variables` `(string: "")` - HCL2 variables to pass to the job parser. Interpreted as the content of a variables file.

## Attribute Reference

The following attributes are exported:

- `hcl` `(string)` - the HCL definition of the job.
- `canonicalize` `(boolean: true)` - flag to enable setting any unset fields to their default values.
- `json` `(string)` - the parsed job as JSON string.
- `json` `(string)` - The parsed job as JSON string.
Loading