From e9747b5fdc79d3b45290230106edcd02c1a3c469 Mon Sep 17 00:00:00 2001 From: Daniel Schmidt Date: Fri, 2 Jun 2023 13:05:29 +0200 Subject: [PATCH] chore: add cdktf documentation update steps into release process Co-authored-by: Brian Flad --- .github/workflows/cdktf-docs.yml | 69 ++++++++++ .github/workflows/release.yml | 35 ++--- docs/cdktf/python/data-sources/data_source.md | 108 ++++++++++++++++ docs/cdktf/python/index.md | 27 ++++ docs/cdktf/python/resources/resource.md | 80 ++++++++++++ .../typescript/data-sources/data_source.md | 120 ++++++++++++++++++ docs/cdktf/typescript/index.md | 27 ++++ docs/cdktf/typescript/resources/resource.md | 102 +++++++++++++++ docs/data-sources/data_source.md | 29 ++++- docs/resources/resource.md | 6 +- 10 files changed, 581 insertions(+), 22 deletions(-) create mode 100644 .github/workflows/cdktf-docs.yml create mode 100644 docs/cdktf/python/data-sources/data_source.md create mode 100644 docs/cdktf/python/index.md create mode 100644 docs/cdktf/python/resources/resource.md create mode 100644 docs/cdktf/typescript/data-sources/data_source.md create mode 100644 docs/cdktf/typescript/index.md create mode 100644 docs/cdktf/typescript/resources/resource.md diff --git a/.github/workflows/cdktf-docs.yml b/.github/workflows/cdktf-docs.yml new file mode 100644 index 00000000..35040691 --- /dev/null +++ b/.github/workflows/cdktf-docs.yml @@ -0,0 +1,69 @@ +name: CDKTF docs + +on: + workflow_dispatch: + +jobs: + cdktfDocs: + runs-on: ubuntu-latest + container: + image: docker.mirror.hashicorp.services/hashicorp/jsii-terraform + env: + CHECKPOINT_DISABLE: "1" + timeout-minutes: 120 + steps: + - uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # v3.5.2 + - name: Get yarn cache directory path + id: global-cache-dir-path + run: echo "dir=$(yarn cache dir)" >> $GITHUB_OUTPUT + - uses: actions/cache@88522ab9f39a2ea568f7027eddc7d8d8bc9d59c8 # v3.3.1 + id: global-cache + with: + path: ${{ steps.global-cache-dir-path.outputs.dir }} + key: ${{ runner.os }}-integration-yarn-${{ hashFiles('**/yarn.lock') }} + + - uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # v3.5.2 + - name: Set up Go + uses: actions/setup-go@fac708d6674e30b6ba41289acaab6d4b75aa0753 # v4.0.1 + with: + go-version-file: go.mod + cache: true + + - run: go mod download + + - name: Build Go binary + run: | + go build -o terraform-provider-null + + - name: Setup Node.js + uses: actions/setup-node@64ed1c7eab4cce3362f8c340dee64e5eaeef8f7c # v3.6.0 + with: + node-version: "18.x" + + - name: Install cdktf-registry-docs + run: npm install -g cdktf-registry-docs@1.11.0 + + - name: Run conversion + run: | + chmod +x terraform-provider-null + + cdktf-registry-docs convert --language='typescript,python' --parallel-conversions-per-document=2 --provider-from-binary="$(pwd)/terraform-provider-null" --additional-provider-requirements="hashicorp/aws@~> 5.0.0" . + env: + TF_PLUGIN_CACHE_DIR: ${{ steps.global-cache-dir-path.outputs.dir }}/terraform-plugins + + - name: Git push cdktf docs + run: | + git config --global user.name "${{ env.CI_COMMIT_AUTHOR }}" + git config --global user.email "${{ env.CI_COMMIT_EMAIL }}" + git checkout -b "cdktf-docs-${{ github.sha }}" + git add . + git commit -a -m "Update cdktf docs" + git push "https://${{ env.CI_COMMIT_AUTHOR }}:${{ secrets.TF_DEVEX_COMMIT_GITHUB_TOKEN }}@github.com/${{ github.repository }}.git" + + - name: Create Pull Request + uses: peter-evans/create-pull-request@284f54f989303d2699d373481a0cfa13ad5a6666 # v5.0.1 + with: + commit-message: "docs: update cdktf documentation" + title: "docs: update cdktf documentation" + body: "This PR updates the cdktf related documentation based on the current HCL-based documentation. It is automatically created by the cdktf-documentation GitHub action." + token: ${{ secrets.TF_DEVEX_COMMIT_GITHUB_TOKEN }} diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 7126fe18..6bc16eb2 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -4,7 +4,7 @@ on: workflow_dispatch: inputs: versionNumber: - description: 'Release version number (v#.#.#)' + description: "Release version number (v#.#.#)" type: string required: true @@ -57,7 +57,8 @@ jobs: git push "https://${{ env.CI_COMMIT_AUTHOR }}:${{ secrets.TF_DEVEX_COMMIT_GITHUB_TOKEN }}@github.com/${{ github.repository }}.git" release-tag: - needs: changelog + needs: + - changelog runs-on: ubuntu-latest steps: - name: Checkout @@ -75,12 +76,12 @@ jobs: run: | git config --global user.name "${{ env.CI_COMMIT_AUTHOR }}" git config --global user.email "${{ env.CI_COMMIT_EMAIL }}" - + git tag "${{ inputs.versionNumber }}" git push "https://${{ env.CI_COMMIT_AUTHOR }}:${{ secrets.TF_DEVEX_COMMIT_GITHUB_TOKEN }}@github.com/${{ github.repository }}.git" "${{ inputs.versionNumber }}" release-notes: - needs: [ changelog-version, changelog, release-tag ] + needs: [changelog-version, changelog, release-tag] runs-on: ubuntu-latest steps: - uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # v3.5.2 @@ -100,23 +101,23 @@ jobs: retention-days: 1 terraform-provider-release: - name: 'Terraform Provider Release' - needs: [ release-notes ] + name: "Terraform Provider Release" + needs: [release-notes] permissions: contents: write # Needed for goreleaser to create GitHub release uses: hashicorp/ghaction-terraform-provider-release/.github/workflows/hashicorp.yml@01981baad5d35ce2342924e60ae91cf69fe31fd0 # v2.3.0 secrets: - hc-releases-key-prod: '${{ secrets.HC_RELEASES_KEY_PROD }}' - hc-releases-key-staging: '${{ secrets.HC_RELEASES_KEY_STAGING }}' - hc-releases-github-token: '${{ secrets.HASHI_RELEASES_GITHUB_TOKEN }}' - hc-releases-terraform-registry-sync-token: '${{ secrets.TF_PROVIDER_RELEASE_TERRAFORM_REGISTRY_SYNC_TOKEN }}' - setup-signore-github-token: '${{ secrets.HASHI_SIGNORE_GITHUB_TOKEN }}' - signore-client-id: '${{ secrets.SIGNORE_CLIENT_ID }}' - signore-client-secret: '${{ secrets.SIGNORE_CLIENT_SECRET }}' - hc-releases-host-staging: '${{ secrets.HC_RELEASES_HOST_STAGING }}' - hc-releases-host-prod: '${{ secrets.HC_RELEASES_HOST_PROD }}' + hc-releases-key-prod: "${{ secrets.HC_RELEASES_KEY_PROD }}" + hc-releases-key-staging: "${{ secrets.HC_RELEASES_KEY_STAGING }}" + hc-releases-github-token: "${{ secrets.HASHI_RELEASES_GITHUB_TOKEN }}" + hc-releases-terraform-registry-sync-token: "${{ secrets.TF_PROVIDER_RELEASE_TERRAFORM_REGISTRY_SYNC_TOKEN }}" + setup-signore-github-token: "${{ secrets.HASHI_SIGNORE_GITHUB_TOKEN }}" + signore-client-id: "${{ secrets.SIGNORE_CLIENT_ID }}" + signore-client-secret: "${{ secrets.SIGNORE_CLIENT_SECRET }}" + hc-releases-host-staging: "${{ secrets.HC_RELEASES_HOST_STAGING }}" + hc-releases-host-prod: "${{ secrets.HC_RELEASES_HOST_PROD }}" with: release-notes: true - setup-go-version-file: 'go.mod' + setup-go-version-file: "go.mod" # Product Version (e.g. v1.2.3) - product-version: '${{ inputs.versionNumber }}' + product-version: "${{ inputs.versionNumber }}" diff --git a/docs/cdktf/python/data-sources/data_source.md b/docs/cdktf/python/data-sources/data_source.md new file mode 100644 index 00000000..673b8ac8 --- /dev/null +++ b/docs/cdktf/python/data-sources/data_source.md @@ -0,0 +1,108 @@ +--- + + +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "null_data_source Data Source - terraform-provider-null" +subcategory: "" +description: |- + The null_data_source data source implements the standard data source lifecycle but does not + interact with any external APIs. + Historically, the null_data_source was typically used to construct intermediate values to re-use elsewhere in configuration. The + same can now be achieved using locals https://developer.hashicorp.com/terraform/language/values/locals or the terraform_data resource type https://developer.hashicorp.com/terraform/language/resources/terraform-data in Terraform 1.4 and later. +--- + +# null_data_source + +The `null_data_source` data source implements the standard data source lifecycle but does not +interact with any external APIs. + +Historically, the `null_data_source` was typically used to construct intermediate values to re-use elsewhere in configuration. The +same can now be achieved using [locals](https://developer.hashicorp.com/terraform/language/values/locals) or the [terraform_data resource type](https://developer.hashicorp.com/terraform/language/resources/terraform-data) in Terraform 1.4 and later. + +## Example Usage + +```python +import constructs as constructs +import cdktf as cdktf +# Provider bindings are generated by running cdktf get. +# See https://cdk.tf/provider-generation for more details. +import ...gen.providers.aws as aws +import ...gen.providers.null as NullProvider +class MyConvertedCode(cdktf.TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + # In most cases loops should be handled in the programming language context and + # not inside of the Terraform context. If you are looping over something external, e.g. a variable or a file input + # you should consider using a for loop. If you are looping over something only known to Terraform, e.g. a result of a data source + # you need to keep this like it is. + aws_instance_blue_count = cdktf.TerraformCount.of( + cdktf.Token.as_number("3")) + aws_instance_blue = aws.instance.Instance(self, "blue", + ami="ami-0dcc1e21636832c5d", + instance_type="m5.large", + count=aws_instance_blue_count + ) + # In most cases loops should be handled in the programming language context and + # not inside of the Terraform context. If you are looping over something external, e.g. a variable or a file input + # you should consider using a for loop. If you are looping over something only known to Terraform, e.g. a result of a data source + # you need to keep this like it is. + aws_instance_green_count = cdktf.TerraformCount.of( + cdktf.Token.as_number("3")) + aws_instance_green = aws.instance.Instance(self, "green", + ami="ami-0dcc1e21636832c5d", + instance_type="m5.large", + count=aws_instance_green_count + ) + data_null_data_source_values = + null.data_null_data_source.DataNullDataSource(self, "values", + inputs={ + "all_server_ids": cdktf.Token.as_string( + cdktf.Fn.concat([ + cdktf.property_access(aws_instance_green, ["*", "id"]), + cdktf.property_access(aws_instance_blue, ["*", "id"]) + ])), + "all_server_ips": cdktf.Token.as_string( + cdktf.Fn.concat([ + cdktf.property_access(aws_instance_green, ["*", "private_ip"]), + cdktf.property_access(aws_instance_blue, ["*", "private_ip"]) + ])) + } + ) + cdktf.TerraformOutput(self, "all_server_ids", + value=cdktf.property_access(data_null_data_source_values.outputs, ["\"all_server_ids\"" + ]) + ) + cdktf.TerraformOutput(self, "all_server_ips", + value=cdktf.property_access(data_null_data_source_values.outputs, ["\"all_server_ips\"" + ]) + ) + aws.elb.Elb(self, "main", + instances=cdktf.Token.as_list( + cdktf.property_access(data_null_data_source_values.outputs, ["\"all_server_ids\"" + ])), + listener=[ElbListener( + instance_port=8000, + instance_protocol="http", + lb_port=80, + lb_protocol="http" + ) + ] + ) +``` + + + +## Schema + +### Optional + +- `has_computed_default` (String) If set, its literal value will be stored and returned. If not, its value defaults to `"default"`. This argument exists primarily for testing and has little practical use. +- `inputs` (Map of String) A map of arbitrary strings that is copied into the `outputs` attribute, and accessible directly for interpolation. + +### Read-Only + +- `id` (String, Deprecated) This attribute is only present for some legacy compatibility issues and should not be used. It will be removed in a future version. +- `outputs` (Map of String) After the data source is "read", a copy of the `inputs` map. +- `random` (String) A random value. This is primarily for testing and has little practical use; prefer the [hashicorp/random provider](https://registry.terraform.io/providers/hashicorp/random) for more practical random number use-cases. + + \ No newline at end of file diff --git a/docs/cdktf/python/index.md b/docs/cdktf/python/index.md new file mode 100644 index 00000000..d7281f75 --- /dev/null +++ b/docs/cdktf/python/index.md @@ -0,0 +1,27 @@ +--- +page_title: "Provider: Null" +description: |- + The null provider provides no-op constructs that can be useful helpers in tricky cases. +--- + + + +# Null Provider + +The `null` provider is a rather-unusual provider that has constructs that +intentionally do nothing. This may sound strange, and indeed these constructs +do not need to be used in most cases, but they can be useful in various +situations to help orchestrate tricky behavior or work around limitations. + +The documentation of each feature of this provider, accessible via the +navigation, gives examples of situations where these constructs may prove +useful. + +Usage of the `null` provider can make a Terraform configuration harder to +understand. While it can be useful in certain cases, it should be applied with +care and other solutions preferred when available. + + +## Schema + + \ No newline at end of file diff --git a/docs/cdktf/python/resources/resource.md b/docs/cdktf/python/resources/resource.md new file mode 100644 index 00000000..5865ea23 --- /dev/null +++ b/docs/cdktf/python/resources/resource.md @@ -0,0 +1,80 @@ +--- + + +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "null_resource Resource - terraform-provider-null" +subcategory: "" +description: |- + The null_resource resource implements the standard resource lifecycle but takes no further action. On Terraform 1.4 and later, use the terraform_data resource type https://developer.hashicorp.com/terraform/language/resources/terraform-data instead. + The triggers argument allows specifying an arbitrary set of values that, when changed, will cause the resource to be replaced. +--- + +# null_resource + +The `null_resource` resource implements the standard resource lifecycle but takes no further action. On Terraform 1.4 and later, use the [terraform_data resource type](https://developer.hashicorp.com/terraform/language/resources/terraform-data) instead. + +The `triggers` argument allows specifying an arbitrary set of values that, when changed, will cause the resource to be replaced. + +## Example Usage + +```python +import constructs as constructs +import cdktf as cdktf +# Provider bindings are generated by running cdktf get. +# See https://cdk.tf/provider-generation for more details. +import ...gen.providers.aws as aws +import ...gen.providers.null as NullProvider +class MyConvertedCode(cdktf.TerraformStack): + def __init__(self, scope, name): + super().__init__(scope, name) + # In most cases loops should be handled in the programming language context and + # not inside of the Terraform context. If you are looping over something external, e.g. a variable or a file input + # you should consider using a for loop. If you are looping over something only known to Terraform, e.g. a result of a data source + # you need to keep this like it is. + aws_instance_cluster_count = cdktf.TerraformCount.of( + cdktf.Token.as_number("3")) + aws_instance_cluster = aws.instance.Instance(self, "cluster", + ami="ami-0dcc1e21636832c5d", + instance_type="m5.large", + count=aws_instance_cluster_count + ) + null_provider_resource_cluster = NullProvider.resource.Resource(self, "cluster_1", + connection=cdktf.SSHProvisionerConnection( + host=cdktf.Fn.element( + cdktf.property_access(aws_instance_cluster, ["*", "public_ip"]), 0) + ), + triggers=[{ + "cluster_instance_ids": cdktf.Fn.join(",", + cdktf.Token.as_list( + cdktf.property_access(aws_instance_cluster, ["*", "id"]))) + } + ], + provisioners=[cdktf.FileProvisioner( + type="remote-exec", + inline=["bootstrap-cluster.sh " + + cdktf.Token.as_string( + cdktf.Fn.join(" ", + cdktf.Token.as_list( + cdktf.property_access(aws_instance_cluster, ["*", "private_ip" + ])))) + ] + ) + ] + ) + # This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match. + null_provider_resource_cluster.override_logical_id("cluster") +``` + + + +## Schema + +### Optional + +- `triggers` (Map of String) A map of arbitrary strings that, when changed, will force the null resource to be replaced, re-running any associated provisioners. + +### Read-Only + +- `id` (String) This is set to a random value at create time. + + \ No newline at end of file diff --git a/docs/cdktf/typescript/data-sources/data_source.md b/docs/cdktf/typescript/data-sources/data_source.md new file mode 100644 index 00000000..8cd851ea --- /dev/null +++ b/docs/cdktf/typescript/data-sources/data_source.md @@ -0,0 +1,120 @@ +--- + + +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "null_data_source Data Source - terraform-provider-null" +subcategory: "" +description: |- + The null_data_source data source implements the standard data source lifecycle but does not + interact with any external APIs. + Historically, the null_data_source was typically used to construct intermediate values to re-use elsewhere in configuration. The + same can now be achieved using locals https://developer.hashicorp.com/terraform/language/values/locals or the terraform_data resource type https://developer.hashicorp.com/terraform/language/resources/terraform-data in Terraform 1.4 and later. +--- + +# null_data_source + +The `nullDataSource` data source implements the standard data source lifecycle but does not +interact with any external APIs. + +Historically, the `nullDataSource` was typically used to construct intermediate values to re-use elsewhere in configuration. The +same can now be achieved using [locals](https://developer.hashicorp.com/terraform/language/values/locals) or the [terraform_data resource type](https://developer.hashicorp.com/terraform/language/resources/terraform-data) in Terraform 1.4 and later. + +## Example Usage + +```typescript +import * as constructs from "constructs"; +import * as cdktf from "cdktf"; +/*Provider bindings are generated by running cdktf get. +See https://cdk.tf/provider-generation for more details.*/ +import * as aws from "./.gen/providers/aws"; +import * as NullProvider from "./.gen/providers/null"; +class MyConvertedCode extends cdktf.TerraformStack { + constructor(scope: constructs.Construct, name: string) { + super(scope, name); + /*In most cases loops should be handled in the programming language context and + not inside of the Terraform context. If you are looping over something external, e.g. a variable or a file input + you should consider using a for loop. If you are looping over something only known to Terraform, e.g. a result of a data source + you need to keep this like it is.*/ + const awsInstanceBlueCount = cdktf.TerraformCount.of( + cdktf.Token.asNumber("3") + ); + const awsInstanceBlue = new aws.instance.Instance(this, "blue", { + ami: "ami-0dcc1e21636832c5d", + instanceType: "m5.large", + count: awsInstanceBlueCount, + }); + /*In most cases loops should be handled in the programming language context and + not inside of the Terraform context. If you are looping over something external, e.g. a variable or a file input + you should consider using a for loop. If you are looping over something only known to Terraform, e.g. a result of a data source + you need to keep this like it is.*/ + const awsInstanceGreenCount = cdktf.TerraformCount.of( + cdktf.Token.asNumber("3") + ); + const awsInstanceGreen = new aws.instance.Instance(this, "green", { + ami: "ami-0dcc1e21636832c5d", + instanceType: "m5.large", + count: awsInstanceGreenCount, + }); + const dataNullDataSourceValues = + new null.dataNullDataSource.DataNullDataSource(this, "values", { + inputs: { + all_server_ids: cdktf.Token.asString( + cdktf.Fn.concat([ + cdktf.propertyAccess(awsInstanceGreen, ["*", "id"]), + cdktf.propertyAccess(awsInstanceBlue, ["*", "id"]), + ]) + ), + all_server_ips: cdktf.Token.asString( + cdktf.Fn.concat([ + cdktf.propertyAccess(awsInstanceGreen, ["*", "private_ip"]), + cdktf.propertyAccess(awsInstanceBlue, ["*", "private_ip"]), + ]) + ), + }, + }); + new cdktf.TerraformOutput(this, "all_server_ids", { + value: cdktf.propertyAccess(dataNullDataSourceValues.outputs, [ + '"all_server_ids"', + ]), + }); + new cdktf.TerraformOutput(this, "all_server_ips", { + value: cdktf.propertyAccess(dataNullDataSourceValues.outputs, [ + '"all_server_ips"', + ]), + }); + new aws.elb.Elb(this, "main", { + instances: cdktf.Token.asList( + cdktf.propertyAccess(dataNullDataSourceValues.outputs, [ + '"all_server_ids"', + ]) + ), + listener: [ + { + instancePort: 8000, + instanceProtocol: "http", + lbPort: 80, + lbProtocol: "http", + }, + ], + }); + } +} + +``` + + + +## Schema + +### Optional + +- `hasComputedDefault` (String) If set, its literal value will be stored and returned. If not, its value defaults to `"default"`. This argument exists primarily for testing and has little practical use. +- `inputs` (Map of String) A map of arbitrary strings that is copied into the `outputs` attribute, and accessible directly for interpolation. + +### Read-Only + +- `id` (String, Deprecated) This attribute is only present for some legacy compatibility issues and should not be used. It will be removed in a future version. +- `outputs` (Map of String) After the data source is "read", a copy of the `inputs` map. +- `random` (String) A random value. This is primarily for testing and has little practical use; prefer the [hashicorp/random provider](https://registry.terraform.io/providers/hashicorp/random) for more practical random number use-cases. + + \ No newline at end of file diff --git a/docs/cdktf/typescript/index.md b/docs/cdktf/typescript/index.md new file mode 100644 index 00000000..d7281f75 --- /dev/null +++ b/docs/cdktf/typescript/index.md @@ -0,0 +1,27 @@ +--- +page_title: "Provider: Null" +description: |- + The null provider provides no-op constructs that can be useful helpers in tricky cases. +--- + + + +# Null Provider + +The `null` provider is a rather-unusual provider that has constructs that +intentionally do nothing. This may sound strange, and indeed these constructs +do not need to be used in most cases, but they can be useful in various +situations to help orchestrate tricky behavior or work around limitations. + +The documentation of each feature of this provider, accessible via the +navigation, gives examples of situations where these constructs may prove +useful. + +Usage of the `null` provider can make a Terraform configuration harder to +understand. While it can be useful in certain cases, it should be applied with +care and other solutions preferred when available. + + +## Schema + + \ No newline at end of file diff --git a/docs/cdktf/typescript/resources/resource.md b/docs/cdktf/typescript/resources/resource.md new file mode 100644 index 00000000..d87c36a6 --- /dev/null +++ b/docs/cdktf/typescript/resources/resource.md @@ -0,0 +1,102 @@ +--- + + +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "null_resource Resource - terraform-provider-null" +subcategory: "" +description: |- + The null_resource resource implements the standard resource lifecycle but takes no further action. On Terraform 1.4 and later, use the terraform_data resource type https://developer.hashicorp.com/terraform/language/resources/terraform-data instead. + The triggers argument allows specifying an arbitrary set of values that, when changed, will cause the resource to be replaced. +--- + +# null_resource + +The `nullResource` resource implements the standard resource lifecycle but takes no further action. On Terraform 1.4 and later, use the [terraform_data resource type](https://developer.hashicorp.com/terraform/language/resources/terraform-data) instead. + +The `triggers` argument allows specifying an arbitrary set of values that, when changed, will cause the resource to be replaced. + +## Example Usage + +```typescript +import * as constructs from "constructs"; +import * as cdktf from "cdktf"; +/*Provider bindings are generated by running cdktf get. +See https://cdk.tf/provider-generation for more details.*/ +import * as aws from "./.gen/providers/aws"; +import * as NullProvider from "./.gen/providers/null"; +class MyConvertedCode extends cdktf.TerraformStack { + constructor(scope: constructs.Construct, name: string) { + super(scope, name); + /*In most cases loops should be handled in the programming language context and + not inside of the Terraform context. If you are looping over something external, e.g. a variable or a file input + you should consider using a for loop. If you are looping over something only known to Terraform, e.g. a result of a data source + you need to keep this like it is.*/ + const awsInstanceClusterCount = cdktf.TerraformCount.of( + cdktf.Token.asNumber("3") + ); + const awsInstanceCluster = new aws.instance.Instance(this, "cluster", { + ami: "ami-0dcc1e21636832c5d", + instanceType: "m5.large", + count: awsInstanceClusterCount, + }); + const nullProviderResourceCluster = new NullProvider.resource.Resource( + this, + "cluster_1", + { + connection: { + host: cdktf.Fn.element( + cdktf.propertyAccess(awsInstanceCluster, ["*", "public_ip"]), + 0 + ), + }, + triggers: [ + { + cluster_instance_ids: cdktf.Fn.join( + ",", + cdktf.Token.asList( + cdktf.propertyAccess(awsInstanceCluster, ["*", "id"]) + ) + ), + }, + ], + provisioners: [ + { + type: "remote-exec", + inline: [ + "bootstrap-cluster.sh " + + cdktf.Token.asString( + cdktf.Fn.join( + " ", + cdktf.Token.asList( + cdktf.propertyAccess(awsInstanceCluster, [ + "*", + "private_ip", + ]) + ) + ) + ), + ], + }, + ], + } + ); + /*This allows the Terraform resource name to match the original name. You can remove the call if you don't need them to match.*/ + nullProviderResourceCluster.overrideLogicalId("cluster"); + } +} + +``` + + + +## Schema + +### Optional + +- `triggers` (Map of String) A map of arbitrary strings that, when changed, will force the null resource to be replaced, re-running any associated provisioners. + +### Read-Only + +- `id` (String) This is set to a random value at create time. + + \ No newline at end of file diff --git a/docs/data-sources/data_source.md b/docs/data-sources/data_source.md index 58648d56..5bf7deae 100644 --- a/docs/data-sources/data_source.md +++ b/docs/data-sources/data_source.md @@ -20,6 +20,23 @@ same can now be achieved using [locals](https://developer.hashicorp.com/terrafor ## Example Usage ```terraform +resource "aws_instance" "green" { + count = 3 + ami = "ami-0dcc1e21636832c5d" + instance_type = "m5.large" + + # ... +} + +resource "aws_instance" "blue" { + count = 3 + ami = "ami-0dcc1e21636832c5d" + instance_type = "m5.large" + + # ... +} + + data "null_data_source" "values" { inputs = { all_server_ids = concat( @@ -34,9 +51,15 @@ data "null_data_source" "values" { } resource "aws_elb" "main" { - # ... - instances = data.null_data_source.values.outputs["all_server_ids"] + + # ... + listener { + instance_port = 8000 + instance_protocol = "http" + lb_port = 80 + lb_protocol = "http" + } } output "all_server_ids" { @@ -49,6 +72,7 @@ output "all_server_ips" { ``` + ## Schema ### Optional @@ -61,4 +85,3 @@ output "all_server_ips" { - `id` (String, Deprecated) This attribute is only present for some legacy compatibility issues and should not be used. It will be removed in a future version. - `outputs` (Map of String) After the data source is "read", a copy of the `inputs` map. - `random` (String) A random value. This is primarily for testing and has little practical use; prefer the [hashicorp/random provider](https://registry.terraform.io/providers/hashicorp/random) for more practical random number use-cases. - diff --git a/docs/resources/resource.md b/docs/resources/resource.md index 3ba61be7..b3106f2b 100644 --- a/docs/resources/resource.md +++ b/docs/resources/resource.md @@ -17,7 +17,9 @@ The `triggers` argument allows specifying an arbitrary set of values that, when ```terraform resource "aws_instance" "cluster" { - count = 3 + count = 3 + ami = "ami-0dcc1e21636832c5d" + instance_type = "m5.large" # ... } @@ -53,6 +55,7 @@ resource "null_resource" "cluster" { ``` + ## Schema ### Optional @@ -62,4 +65,3 @@ resource "null_resource" "cluster" { ### Read-Only - `id` (String) This is set to a random value at create time. -