Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions .changelog/46933.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
```release-note:enhancement
resource/aws_emrcontainers_job_template: Add `parameter_configuration` attribute to `job_template_data`
```
90 changes: 90 additions & 0 deletions internal/service/emrcontainers/job_template.go
Original file line number Diff line number Diff line change
Expand Up @@ -231,6 +231,31 @@ func resourceJobTemplate() *schema.Resource {
ForceNew: true,
Elem: &schema.Schema{Type: schema.TypeString},
},
"parameter_configuration": {
Type: schema.TypeSet,
Optional: true,
ForceNew: true,
Elem: &schema.Resource{
Schema: map[string]*schema.Schema{
names.AttrName: {
Type: schema.TypeString,
Required: true,
ForceNew: true,
},
names.AttrType: {
Type: schema.TypeString,
Required: true,
ForceNew: true,
ValidateDiagFunc: enum.Validate[awstypes.TemplateParameterDataType](),
},
names.AttrDefaultValue: {
Type: schema.TypeString,
Optional: true,
ForceNew: true,
},
},
},
},
"release_label": {
Type: schema.TypeString,
Required: true,
Expand Down Expand Up @@ -408,6 +433,10 @@ func expandJobTemplateData(tfMap map[string]any) *awstypes.JobTemplateData {
apiObject.JobTags = flex.ExpandStringValueMap(v)
}

if v, ok := tfMap["parameter_configuration"].(*schema.Set); ok && v.Len() > 0 {
apiObject.ParameterConfiguration = expandParameterConfiguration(v)
}

if v, ok := tfMap["release_label"].(string); ok && v != "" {
apiObject.ReleaseLabel = aws.String(v)
}
Expand Down Expand Up @@ -607,13 +636,74 @@ func flattenJobTemplateData(apiObject *awstypes.JobTemplateData) map[string]any
tfMap["job_tags"] = v
}

if v := apiObject.ParameterConfiguration; v != nil {
tfMap["parameter_configuration"] = flattenParameterConfiguration(v)
}

if v := apiObject.ReleaseLabel; v != nil {
tfMap["release_label"] = aws.ToString(v)
}

return tfMap
}

func expandParameterConfiguration(tfSet *schema.Set) map[string]awstypes.TemplateParameterConfiguration {
if tfSet.Len() == 0 {
return nil
}

apiObjects := make(map[string]awstypes.TemplateParameterConfiguration)

for _, tfMapRaw := range tfSet.List() {
tfMap, ok := tfMapRaw.(map[string]any)
if !ok {
continue
}

name, ok := tfMap[names.AttrName].(string)
if !ok || name == "" {
continue
}

apiObject := awstypes.TemplateParameterConfiguration{}

if v, ok := tfMap[names.AttrType].(string); ok && v != "" {
apiObject.Type = awstypes.TemplateParameterDataType(v)
}

if v, ok := tfMap[names.AttrDefaultValue].(string); ok && v != "" {
apiObject.DefaultValue = aws.String(v)
}

apiObjects[name] = apiObject
}

return apiObjects
}

func flattenParameterConfiguration(apiObjects map[string]awstypes.TemplateParameterConfiguration) []map[string]any {
if len(apiObjects) == 0 {
return nil
}

var tfList []map[string]any

for name, apiObject := range apiObjects {
tfMap := map[string]any{
names.AttrName: name,
names.AttrType: string(apiObject.Type),
}

if v := apiObject.DefaultValue; v != nil {
tfMap[names.AttrDefaultValue] = aws.ToString(v)
}

tfList = append(tfList, tfMap)
}

return tfList
}

func flattenConfigurationOverrides(apiObject *awstypes.ParametricConfigurationOverrides) map[string]any {
if apiObject == nil {
return nil
Expand Down
63 changes: 63 additions & 0 deletions internal/service/emrcontainers/job_template_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -158,6 +158,37 @@ func TestAccEMRContainersJobTemplate_tags(t *testing.T) {
})
}

func TestAccEMRContainersJobTemplate_parameterConfiguration(t *testing.T) {
ctx := acctest.Context(t)
var v awstypes.JobTemplate
rName := acctest.RandomWithPrefix(t, acctest.ResourcePrefix)
resourceName := "aws_emrcontainers_job_template.test"

acctest.ParallelTest(ctx, t, resource.TestCase{
PreCheck: func() {
acctest.PreCheck(ctx, t)
},
ErrorCheck: acctest.ErrorCheck(t, names.EMRContainersServiceID),
ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories,
CheckDestroy: testAccCheckJobTemplateDestroy(ctx, t),
Steps: []resource.TestStep{
{
Config: testAccJobTemplateConfig_parameterConfiguration(rName),
Check: resource.ComposeTestCheckFunc(
testAccCheckJobTemplateExists(ctx, t, resourceName, &v),
resource.TestCheckResourceAttr(resourceName, "job_template_data.#", "1"),
resource.TestCheckResourceAttr(resourceName, "job_template_data.0.parameter_configuration.#", "2"),
),
},
{
ResourceName: resourceName,
ImportState: true,
ImportStateVerify: true,
},
},
})
}

func testAccCheckJobTemplateExists(ctx context.Context, t *testing.T, n string, v *awstypes.JobTemplate) resource.TestCheckFunc {
return func(s *terraform.State) error {
rs, ok := s.RootModule().Resources[n]
Expand Down Expand Up @@ -290,6 +321,38 @@ resource "aws_emrcontainers_job_template" "test" {
`, rName))
}

func testAccJobTemplateConfig_parameterConfiguration(rName string) string {
return acctest.ConfigCompose(
testAccJobTemplateConfig_base(rName),
fmt.Sprintf(`
resource "aws_emrcontainers_job_template" "test" {
job_template_data {
execution_role_arn = aws_iam_role.test.arn
release_label = "emr-6.10.0-latest"

job_driver {
spark_sql_job_driver {
entry_point = "${EntryPointUri}"
}
}

parameter_configuration {
name = "EntryPointUri"
type = "STRING"
default_value = "s3://my-bucket/my-script.sql"
}

parameter_configuration {
name = "SparkCores"
type = "NUMBER"
}
}

name = %[1]q
}
`, rName))
}

func testAccJobTemplateConfig_tags1(rName, tagKey1, tagValue1 string) string {
return acctest.ConfigCompose(
testAccJobTemplateConfig_base(rName),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -58,6 +58,7 @@ This resource supports the following arguments:
* `execution_role_arn` - (Required) The execution role ARN of the job run.
* `job_driver` - (Required) Specify the driver that the job runs on. Exactly one of the two available job drivers is required, either sparkSqlJobDriver or sparkSubmitJobDriver.
* `job_tags` - (Optional) The tags assigned to jobs started using the job template.
* `parameter_configuration` - (Optional) The configuration of parameters existing in the job template. See [`parameter_configuration`](#parameter_configuration-arguments) below.
* `release_label` - (Required) The release version of Amazon EMR.

#### configuration_overrides Arguments
Expand Down Expand Up @@ -102,6 +103,12 @@ This resource supports the following arguments:
* `entry_point_arguments` - (Optional) The arguments for job application.
* `spark_submit_parameters` - (Optional) The Spark submit parameters that are used for job runs.

#### parameter_configuration Arguments

* `name` - (Required) The name of the parameter. Referenced as `${ParameterName}` in job template fields.
* `type` - (Required) The type of the parameter. Valid values: `STRING`, `NUMBER`.
* `default_value` - (Optional) The default value for the parameter.

## Attribute Reference

This resource exports the following attributes in addition to the arguments above:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -61,6 +61,7 @@ This resource supports the following arguments:
* `executionRoleArn` - (Required) The execution role ARN of the job run.
* `jobDriver` - (Required) Specify the driver that the job runs on. Exactly one of the two available job drivers is required, either sparkSqlJobDriver or sparkSubmitJobDriver.
* `jobTags` - (Optional) The tags assigned to jobs started using the job template.
* `parameterConfiguration` - (Optional) The configuration of parameters existing in the job template. See [`parameterConfiguration`](#parameter_configuration-arguments) below.
* `releaseLabel` - (Required) The release version of Amazon EMR.

#### configuration_overrides Arguments
Expand Down Expand Up @@ -105,6 +106,12 @@ This resource supports the following arguments:
* `entryPointArguments` - (Optional) The arguments for job application.
* `sparkSubmitParameters` - (Optional) The Spark submit parameters that are used for job runs.

#### parameter_configuration Arguments

* `name` - (Required) The name of the parameter. Referenced as `${ParameterName}` in job template fields.
* `type` - (Required) The type of the parameter. Valid values: `STRING`, `NUMBER`.
* `defaultValue` - (Optional) The default value for the parameter.

## Attribute Reference

This resource exports the following attributes in addition to the arguments above:
Expand Down
7 changes: 7 additions & 0 deletions website/docs/r/emrcontainers_job_template.html.markdown
Original file line number Diff line number Diff line change
Expand Up @@ -47,6 +47,7 @@ This resource supports the following arguments:
* `execution_role_arn` - (Required) The execution role ARN of the job run.
* `job_driver` - (Required) Specify the driver that the job runs on. Exactly one of the two available job drivers is required, either sparkSqlJobDriver or sparkSubmitJobDriver.
* `job_tags` - (Optional) The tags assigned to jobs started using the job template.
* `parameter_configuration` - (Optional) The configuration of parameters existing in the job template. See [`parameter_configuration`](#parameter_configuration-arguments) below.
* `release_label` - (Required) The release version of Amazon EMR.

#### configuration_overrides Arguments
Expand Down Expand Up @@ -91,6 +92,12 @@ This resource supports the following arguments:
* `entry_point_arguments` - (Optional) The arguments for job application.
* `spark_submit_parameters` - (Optional) The Spark submit parameters that are used for job runs.

#### parameter_configuration Arguments

* `name` - (Required) The name of the parameter. Referenced as `${ParameterName}` in job template fields.
* `type` - (Required) The type of the parameter. Valid values: `STRING`, `NUMBER`.
* `default_value` - (Optional) The default value for the parameter.

## Attribute Reference

This resource exports the following attributes in addition to the arguments above:
Expand Down