From b866fc7f026c76322f4572c8daa88c944a946a35 Mon Sep 17 00:00:00 2001 From: Paul Gottschling Date: Mon, 15 Jul 2024 10:07:26 -0400 Subject: [PATCH] Add a two-part Terraform starter guide Closes #41055 Add a learning series for users who wants to get started with managing Teleport resources using Terraform. The guide takes the user through some fundamental cluster setup tasks: deploying Agents, enrolling resources, adding labels, configuring roles, and setting up SSO. The series includes two guides: - Part One: Enrolling resources - Part Two: Configuring RBAC Part One of the series is based on the "Deploy Agents with Terraform" guide. This change adds instructions to the guide to label infrastructure resources so the user can access those resources with the roles configured in Part Two. Other changes: - Rename `agent-pool-terraform` to `terraform-starter`. - Rename the "Dynamic Resources" guide to be more explicitly about Infrastructure as Code. Organize this section into subsections to make room for the new starter guide. - Edit the Further Reading discussion in Part One. Move the static resource discussion to Further Reading because, otherwise, there is too much explanatory text between instructions. Also mention auto-discovery. --- docs/config.json | 114 ++- .../agents/deploy-agents-terraform.mdx | 599 -------------- ...sources.mdx => infrastructure-as-code.mdx} | 4 +- .../managing-resources}/access-list.mdx | 0 .../agentless-ssh-servers.mdx | 0 .../managing-resources/managing-resources.mdx | 9 + .../managing-resources}/user-and-role.mdx | 0 .../teleport-operator.mdx | 0 .../teleport-operator-helm.mdx | 0 .../teleport-operator-standalone.mdx | 0 .../terraform-provider.mdx | 0 .../terraform-starter.mdx | 38 + .../terraform-starter/enroll-resources.mdx | 749 ++++++++++++++++++ .../terraform-starter/rbac.mdx | 537 +++++++++++++ docs/pages/management/introduction.mdx | 6 +- .../agent-installation}/inputs.tf | 6 +- .../agent-installation}/outputs.tf | 0 .../agent-installation}/required_providers.tf | 0 .../agent-installation}/token.tf | 0 .../agent-installation}/userdata | 0 .../aws/ec2-instance.tf | 0 .../aws/inputs.tf | 0 .../aws/required_providers.tf | 0 .../azure/azure-instance.tf | 0 .../azure/inputs.tf | 0 .../azure/required_providers.tf | 0 .../terraform-starter/env_role/env_role.tf | 59 ++ examples/terraform-starter/env_role/inputs.tf | 13 + .../env_role/required_providers.tf | 7 + .../terraform-starter/env_role/reviewer.tf | 24 + .../gcp/gcp-instance.tf | 6 + .../gcp/inputs.tf | 0 .../gcp/required_providers.tf | 0 .../main.tf | 0 examples/terraform-starter/oidc/auth_pref.tf | 16 + examples/terraform-starter/oidc/inputs.tf | 34 + .../terraform-starter/oidc/oidc_connector.tf | 13 + .../oidc/required_providers.tf | 7 + examples/terraform-starter/saml/auth_pref.tf | 16 + examples/terraform-starter/saml/inputs.tf | 28 + .../saml/required_providers.tf | 7 + examples/terraform-starter/saml/saml.tf | 13 + 42 files changed, 1672 insertions(+), 633 deletions(-) delete mode 100644 docs/pages/enroll-resources/agents/deploy-agents-terraform.mdx rename docs/pages/management/{dynamic-resources.mdx => infrastructure-as-code.mdx} (98%) rename docs/pages/management/{dynamic-resources => infrastructure-as-code/managing-resources}/access-list.mdx (100%) rename docs/pages/management/{dynamic-resources => infrastructure-as-code/managing-resources}/agentless-ssh-servers.mdx (100%) create mode 100644 docs/pages/management/infrastructure-as-code/managing-resources/managing-resources.mdx rename docs/pages/management/{dynamic-resources => infrastructure-as-code/managing-resources}/user-and-role.mdx (100%) rename docs/pages/management/{dynamic-resources => infrastructure-as-code}/teleport-operator.mdx (100%) rename docs/pages/management/{dynamic-resources => infrastructure-as-code/teleport-operator}/teleport-operator-helm.mdx (100%) rename docs/pages/management/{dynamic-resources => infrastructure-as-code/teleport-operator}/teleport-operator-standalone.mdx (100%) rename docs/pages/management/{dynamic-resources => infrastructure-as-code}/terraform-provider.mdx (100%) create mode 100644 docs/pages/management/infrastructure-as-code/terraform-starter.mdx create mode 100644 docs/pages/management/infrastructure-as-code/terraform-starter/enroll-resources.mdx create mode 100644 docs/pages/management/infrastructure-as-code/terraform-starter/rbac.mdx rename examples/{agent-pool-terraform/teleport => terraform-starter/agent-installation}/inputs.tf (87%) rename examples/{agent-pool-terraform/teleport => terraform-starter/agent-installation}/outputs.tf (100%) rename examples/{agent-pool-terraform/teleport => terraform-starter/agent-installation}/required_providers.tf (100%) rename examples/{agent-pool-terraform/teleport => terraform-starter/agent-installation}/token.tf (100%) rename examples/{agent-pool-terraform/teleport => terraform-starter/agent-installation}/userdata (100%) rename examples/{agent-pool-terraform => terraform-starter}/aws/ec2-instance.tf (100%) rename examples/{agent-pool-terraform => terraform-starter}/aws/inputs.tf (100%) rename examples/{agent-pool-terraform => terraform-starter}/aws/required_providers.tf (100%) rename examples/{agent-pool-terraform => terraform-starter}/azure/azure-instance.tf (100%) rename examples/{agent-pool-terraform => terraform-starter}/azure/inputs.tf (100%) rename examples/{agent-pool-terraform => terraform-starter}/azure/required_providers.tf (100%) create mode 100644 examples/terraform-starter/env_role/env_role.tf create mode 100644 examples/terraform-starter/env_role/inputs.tf create mode 100644 examples/terraform-starter/env_role/required_providers.tf create mode 100644 examples/terraform-starter/env_role/reviewer.tf rename examples/{agent-pool-terraform => terraform-starter}/gcp/gcp-instance.tf (84%) rename examples/{agent-pool-terraform => terraform-starter}/gcp/inputs.tf (100%) rename examples/{agent-pool-terraform => terraform-starter}/gcp/required_providers.tf (100%) rename examples/{agent-pool-terraform => terraform-starter}/main.tf (100%) create mode 100644 examples/terraform-starter/oidc/auth_pref.tf create mode 100644 examples/terraform-starter/oidc/inputs.tf create mode 100644 examples/terraform-starter/oidc/oidc_connector.tf create mode 100644 examples/terraform-starter/oidc/required_providers.tf create mode 100644 examples/terraform-starter/saml/auth_pref.tf create mode 100644 examples/terraform-starter/saml/inputs.tf create mode 100644 examples/terraform-starter/saml/required_providers.tf create mode 100644 examples/terraform-starter/saml/saml.tf diff --git a/docs/config.json b/docs/config.json index 616e5c93d019a..1c2d404ba888d 100644 --- a/docs/config.json +++ b/docs/config.json @@ -655,37 +655,59 @@ "slug": "/management/introduction/" }, { - "title": "Using Dynamic Resources", - "slug": "/management/dynamic-resources/", + "title": "Using Infrastructure as Code", + "slug": "/management/infrastructure-as-code/", "entries": [ - { - "title": "Managing Users and Roles", - "slug": "/management/dynamic-resources/user-and-role/" - }, - { - "title": "Registering Agentless OpenSSH", - "slug": "/management/dynamic-resources/agentless-ssh-servers/" - }, - { - "title": "Managing Access Lists", - "slug": "/management/dynamic-resources/access-list/" - }, + { + "title": "Terraform Starter Setup", + "slug": "/management/infrastructure-as-code/terraform-starter/", + "entries": [ + { + "title": "Part 1: Enroll Resources", + "slug": "/management/infrastructure-as-code/terraform-starter/enroll-resources/" + }, + { + "title": "Part 2: Configure RBAC", + "slug": "/management/infrastructure-as-code/terraform-starter/rbac/" + } + ] + }, + { + "title": "Managing Resources", + "slug": "/management/infrastructure-as-code/managing-resources/managing-resources/", + "entries": [ + { + "title": "Managing Users and Roles", + "slug": "/management/infrastructure-as-code/managing-resources/user-and-role/" + }, + { + "title": "Registering Agentless OpenSSH", + "slug": "/management/infrastructure-as-code/managing-resources/agentless-ssh-servers/" + }, + { + "title": "Managing Access Lists", + "slug": "/management/infrastructure-as-code/managing-resources/access-list/" + } + ] + }, { "title": "Kubernetes Operator", - "slug": "/management/dynamic-resources/teleport-operator/" - }, - { - "title": "Kubernetes Operator in teleport-cluster Helm chart", - "slug": "/management/dynamic-resources/teleport-operator-helm/", - "forScopes": ["oss","enterprise"] - }, - { - "title": "Standalone Kubernetes Operator", - "slug": "/management/dynamic-resources/teleport-operator-standalone/" + "slug": "/management/infrastructure-as-code/teleport-operator/", + "entries": [ + { + "title": "Kubernetes Operator in teleport-cluster Helm chart", + "slug": "/management/infrastructure-as-code/teleport-operator/teleport-operator-helm/", + "forScopes": ["oss","enterprise"] + }, + { + "title": "Standalone Kubernetes Operator", + "slug": "/management/infrastructure-as-code/teleport-operator/teleport-operator-standalone/" + } + ] }, { "title": "Set up the Terraform Provider", - "slug": "/management/dynamic-resources/terraform-provider/" + "slug": "/management/infrastructure-as-code/terraform-provider/" } ] }, @@ -1486,7 +1508,7 @@ }, { "source": "/agents/deploy-agents-terraform/", - "destination": "/enroll-resources/agents/deploy-agents-terraform/", + "destination": "/management/infrastructure-as-code/terraform-starter/enroll-resources/", "permanent": true }, { @@ -2673,6 +2695,46 @@ "source": "/user-manual/", "destination": "/", "permanent": true + }, + { + "source": "/management/dynamic-resources/", + "destination": "/management/infrastructure-as-code/", + "permanent": true + }, + { + "source": "/management/dynamic-resources/access-list/", + "destination": "/management/infrastructure-as-code/managing-resources/access-list/", + "permanent": true + }, + { + "source": "/management/dynamic-resources/agentless-ssh-servers/", + "destination": "/management/infrastructure-as-code/managing-resources/agentless-ssh-servers/", + "permanent": true + }, + { + "source": "/management/dynamic-resources/teleport-operator-helm/", + "destination": "/management/infrastructure-as-code/teleport-operator/teleport-operator-helm/", + "permanent": true + }, + { + "source": "/management/dynamic-resources/teleport-operator-standalone/", + "destination": "/management/infrastructure-as-code/teleport-operator/teleport-operator-standalone/", + "permanent": true + }, + { + "source": "/management/dynamic-resources/teleport-operator/", + "destination": "/management/infrastructure-as-code/teleport-operator/", + "permanent": true + }, + { + "source": "/management/dynamic-resources/terraform-provider/", + "destination": "/management/infrastructure-as-code/terraform-provider/", + "permanent": true + }, + { + "source": "/management/dynamic-resources/user-and-role/", + "destination": "/management/infrastructure-as-code/managing-resources/user-and-role/", + "permanent": true } ] } diff --git a/docs/pages/enroll-resources/agents/deploy-agents-terraform.mdx b/docs/pages/enroll-resources/agents/deploy-agents-terraform.mdx deleted file mode 100644 index e4b912e7f2dfa..0000000000000 --- a/docs/pages/enroll-resources/agents/deploy-agents-terraform.mdx +++ /dev/null @@ -1,599 +0,0 @@ ---- -title: "Deploy Agents with Terraform" -description: "In this guide, we will show you how to deploy a pool of Agents so you can apply dynamic resources to enroll your infrastructure with Teleport." ---- - -## How it works - -An agent is a Teleport instance configured to run one or more Teleport services -in order to proxy infrastructure resources. For a brief architectural overview -of how agents run in a Teleport cluster, read the [Introduction to Teleport -Agents](introduction.mdx). - -This guide shows you how to deploy a pool of Agents running on virtual -machines by declaring it as code using Terraform. - -There are several methods you can use to join a Agents to your cluster, -which we discuss in the [Joining Services to your -Cluster](join-services-to-your-cluster.mdx) guide. In this guide, we will use -the **join token** method, where the operator stores a secure token on the Auth -Service, and an agent presents the token in order to join a cluster. - -No matter which join method you use, it will involve the following Terraform -resources: - -- Compute instances to run Teleport services -- A join token for each compute instance in the agent pool - -![An Agent pool](../../../img/tf-agent-diagram.png) - -## Prerequisites - -(!docs/pages/includes/edition-prereqs-tabs.mdx!) - - - -We recommend following this guide on a fresh Teleport demo cluster so you can -see how an agent pool works. After you are familiar with the setup, apply the -lessons from this guide to protect your infrastructure. You can get started with -a demo cluster using: -- A demo deployment on a [Linux server](../../deploy-a-cluster/linux-demo.mdx) -- A [Teleport Enterprise (managed) trial](https://goteleport.com/signup) - - - -- An AWS, Google Cloud, or Azure account with permissions to create virtual - machine instances. - -- Cloud infrastructure that enables virtual machine instances to connect to the - Teleport Proxy Service. For example: - - An AWS subnet with a public NAT gateway or NAT instance. - - Google Cloud NAT - - Azure NAT Gateway - - In minimum-security demo clusters, you can also configure the VM instances you - deploy to have public IP addresses. - -- Terraform v(=terraform.version=) or higher. - -- An identity file for the Teleport Terraform provider. Make sure you are - familiar with [how to set up the Teleport Terraform - provider](../../management/dynamic-resources/terraform-provider.mdx) before - following this guide. - -- (!docs/pages/includes/tctl.mdx!) - -## Step 1/3. Import the Terraform module - -1. Navigate to the directory where you plan to organize files for your root - Terraform module. - -1. Move the identity file for the Teleport Terraform provider into your project - directory so the Terraform provider can access it. Name the file - `terraform-identity`. - - - - If you don't have an identity file available, make sure you have followed the - [prerequisites for this guide](#prerequisites). - - - -1. Fetch the Teleport code repository and copy the example Terraform - configuration for this project into your current working directory. Copy - the appropriate child module for your cloud provider into a subdirectory - called `cloud` and HCL configurations for Teleport resources into a - subdirectory called `teleport`: - - - - - ```code - $ git clone --depth=1 https://github.com/gravitational/teleport teleport-clone - $ cp -R teleport-clone/examples/agent-pool-terraform/teleport teleport - $ cp -R teleport-clone/examples/agent-pool-terraform/aws cloud - $ rm -rf teleport-clone - ``` - - - - - ```code - $ git clone --depth=1 https://github.com/gravitational/teleport teleport-clone - $ cp -R teleport-clone/examples/agent-pool-terraform/teleport teleport - $ cp -R teleport-clone/examples/agent-pool-terraform/gcp cloud - $ rm -rf teleport-clone - ``` - - - - - ```code - $ git clone --depth=1 https://github.com/gravitational/teleport teleport-clone - $ cp -R teleport-clone/examples/agent-pool-terraform/teleport teleport - $ cp -R teleport-clone/examples/agent-pool-terraform/azure cloud - $ rm -rf teleport-clone - ``` - - - - -1. Create a file called `main.tf` with the following content, which imports the - `agent-pool-terraform` module for your cloud provider: - - - - -```hcl -module "teleport" { - source = "./teleport" - agent_count = 2 - agent_roles = ["Node"] - proxy_service_address = "teleport.example.com:443" - teleport_edition = "oss" - teleport_version = "(=teleport.version=)" -} - -module "agents" { - region = "" - source = "./cloud" - subnet_id = "" - userdata_scripts = module.teleport.userdata_scripts -} -``` - - - - -```hcl -module "teleport" { - source = "./teleport" - agent_count = 2 - agent_roles = ["Node"] - proxy_service_address = "teleport.example.com:443" - teleport_edition = "oss" - teleport_version = "(=teleport.version=)" -} - -module "agents" { - gcp_zone = "us-east1-b" - google_project = "" - source = "./cloud" - subnet_id = "" - userdata_scripts = module.teleport.userdata_scripts -} -``` - - - - -```hcl -module "teleport" { - source = "./teleport" - agent_count = 2 - agent_roles = ["Node"] - proxy_service_address = "teleport.example.com:443" - teleport_edition = "oss" - teleport_version = "(=teleport.version=)" -} - -module "agents" { - azure_resource_group = "" - public_key_path = "" - region = "East US" - source = "./cloud" - subnet_id = "" - userdata_scripts = module.teleport.userdata_scripts -} -``` - - - - -Edit the `module "teleport"` block in `main.tf` as follows: - -1. Assign `agent_count` to `2` for high availability. As you scale your Teleport - usage, you can increase this count to ease the load on each agent. - -1. Modify the elements of the `agent_roles` list. - - The Teleport Auth Service associates a join token with one or more roles, - identifying the Teleport services that are allowed to use the token. Modify - `agent_roles` to include the token roles that correspond to the Teleport - services you plan to run on your agent nodes: - - | Role value | Service it enables | - |-------------|------------------------------| - | `App` | Teleport Application Service | - | `Discovery` | Teleport Discovery Service | - | `Db` | Teleport Database Service | - | `Kube` | Teleport Kubernetes Service | - | `Node` | Teleport SSH Service | - - The elements of the `agent_roles` list also determine which Teleport services - the agents deployed by the `agent-pool-terraform` module will run. For - example, if the value of `agent_roles` is `["Node", "Db"]`, the configuration - file enables the Teleport SSH Service and Teleport Database Service. - - If the value of the `agent_roles` input variable includes the `Node` role, - the configuration also adds the `role: agent-pool` label to the Teleport SSH - Service on each instance. This makes it easier to access hosts in the agent - pool later. The script makes Teleport the only option for accessing agent - instances by disabling OpenSSH on startup and deleting any authorized public - keys. - -1. Assign `proxy_service_address` to the host and HTTPS port of your Teleport - Proxy Service, e.g., `mytenant.teleport.sh:443`. - - - - Make sure to include the port. - - - -1. Make sure `teleport_edition` matches your Teleport edition. Assign this to - `oss`, `cloud`, or `enterprise`. The default is `oss`. - -1. If needed, change the value of `teleport_version` to the version of Teleport - you want to run on your agents. It must be either the same major version as - your Teleport cluster or one major version behind. - -Edit the `module "agents"` block in `main.tf` as follows: - -1. If you are deploying your instance in a minimum-security demo environment and - do not have a NAT gateway, NAT instance, or other method for connecting your - instances to the Teleport Proxy Service, modify the `module` block to - associate a public IP address with each agent instance: - - ```hcl - insecure_direct_access=true - ``` - -1. Assign the remaining input variables depending on your cloud provider: - - - - -1. Assign `region` to the AWS region where you plan to deploy Teleport agents, - such as `us-east-1`. -1. For `subnet_id`, include the ID of the subnet where you will deploy Teleport - agents. - - - - -1. Assign `google_project` to the name of your Google Cloud project and - `gcp_zone` to the zone where you will deploy agents, such as `us-east1-b`. -1. For `subnet_id`, include the name or URI of the Google Cloud subnet where you - will deploy the Teleport agents. - - - - -1. Assign `azure_resource_group` to the name of the Azure resource group where - you are deploying Teleport agents. - -1. The module uses `public_key_path` to pass validation, as Azure VMs must - include an RSA public key with at least 2048 bits. Once the module deploys - the VMs, a cloud-init script removes the public key and disables OpenSSH. Set - this input to the path to a valid public SSH key. - -1. Assign `region` to the Azure region where you plan to deploy Teleport agents, - such as `East US`. - -1. For `subnet_id`, include the ID of the subnet where you will deploy Teleport - agents. Use the following format: - - ```text - /subscriptions/SUBSCRIPTION/resourceGroups/RESOURCE_GROUP/providers/Microsoft.Network/virtualNetworks/NETWORK_NAME/subnets/SUBNET_NAME - ``` - - - - -## Step 2/3. Add provider configurations - -In this step, you will configure the `agent-pool-terraform` module for your -Teleport cluster and cloud provider. - -In your project directory, create a file called `providers.tf` with the -following content: - - - - -```hcl -terraform { - required_providers { - aws = { - source = "hashicorp/aws" - version = "~> 4.0" - } - - teleport = { - source = "terraform.releases.teleport.dev/gravitational/teleport" - version = "(=teleport.plugin.version=)" - } - } -} - -provider "aws" { - region = AWS_REGION -} - -provider "teleport" { - # Update addr to point to your Teleport Enterprise (managed) tenant URL's host:port - addr = PROXY_SERVICE_ADDRESS - identity_file_path = "terraform-identity" -} -``` - -Replace the following placeholders: - -| Placeholder | Value | -|-----------------------|------------------------------------------------------------------------------| -| AWS_REGION | The AWS region where you will deploy agents, e.g., `us-east-2` | -| PROXY_SERVICE_ADDRESS | The host and port of the Teleport Proxy Service, e.g., `example.teleport.sh:443` | - - - -```hcl -terraform { - required_providers { - google = { - source = "hashicorp/google" - version = "~> 5.5.0" - } - - teleport = { - source = "terraform.releases.teleport.dev/gravitational/teleport" - version = "(=teleport.plugin.version=)" - } - } -} - -provider "google" { - project = GOOGLE_CLOUD_PROJECT - region = GOOGLE_CLOUD_REGION -} - -provider "teleport" { - # Update addr to point to your Teleport Enterprise (managed) tenant URL's host:port - addr = PROXY_SERVICE_ADDRESS - identity_file_path = "terraform-identity" -} -``` - -Replace the following placeholders: - -| Placeholder | Value | -|-----------------------|------------------------------------------------------------------------------| -| GOOGLE_CLOUD_PROJECT| Google Cloud project where you will deploy agents. | -| GOOGLE_CLOUD_REGION | Google Cloud region where you will deploy agents. | -| PROXY_SERVICE_ADDRESS | The host and port of the Teleport Proxy Service, e.g., `example.teleport.sh:443` | - - - - -```hcl -terraform { - required_providers { - azurerm = { - source = "hashicorp/azurerm" - version = "~> 3.0.0" - } - - teleport = { - source = "terraform.releases.teleport.dev/gravitational/teleport" - version = "(=teleport.plugin.version=)" - } - } -} - -provider "teleport" { - identity_file_path = "terraform-identity" - # Update addr to point to your Teleport Enterprise (managed) tenant URL's host:port - addr = PROXY_SERVICE_ADDRESS -} - -provider "azurerm" { - features {} -} -``` - -Replace the following placeholders: - -|Placeholder|Value| -|---|---| -| PROXY_SERVICE_ADDRESS | The host and port of the Teleport Proxy Service, e.g., `example.teleport.sh:443` | - - - - -## Step 3/3. Verify the deployment - -Make sure your cloud provider credentials are available to Terraform using the -standard approach for your organization. - -Apply the Terraform configuration: - -```code -$ terraform init -$ terraform apply -``` - -Once the `apply` command completes, run the following command to verify that -your agents have deployed successfully. This command, which assumes that the -agents have the `Node` role, lists all Teleport SSH Service instances with the -`role=agent-pool` label: - -```code -$ tsh ls role=agent-pool -Node Name Address Labels --------------------------- ---------- --------------- -ip-10-1-1-187.ec2.internal ⟵ Tunnel role=agent-pool -ip-10-1-1-24.ec2.internal ⟵ Tunnel role=agent-pool -``` - -## Next step: Enroll infrastructure resources - -In this section, we describe the two ways to configure your agent pool to -protect infrastructure resources with Teleport. - -### Define dynamic resources in Terraform - -You can declare Terraform resources that enroll your infrastructure with -Teleport. The Teleport Terraform provider currently supports the following: - -|Infrastructure Resource|Terraform Resource| -|---|---| -|Application|`teleport_app`| -|Database|`teleport_database`| - -To declare a dynamic resource with Terraform, add a configuration block similar -to the ones below to a `*.tf` file in your `agent-pool-terraform` project -directory. - -The Teleport Terraform provider creates these on the Auth Service backend, and -the relevant Teleport services query them in order to proxy user traffic. For a -full list of supported resources and fields, see the [Terraform provider -reference](../../reference/terraform-provider.mdx). - - - - -```hcl -resource "teleport_app" "example" { - metadata = { - name = "example" - description = "Test app" - labels = { - // Teleport adds this label by default, so add it here to - // ensure a consistent state. - "teleport.dev/origin" = "dynamic" - } - } - - spec = { - uri = "localhost:3000" - } -} -``` - - - - -```hcl -resource "teleport_database" "example" { - metadata = { - name = "example" - description = "Test database" - labels = { - // Teleport adds this label by default, so add it here to - // ensure a consistent state. - "teleport.dev/origin" = "dynamic" - } - } - - spec = { - protocol = "postgres" - uri = "localhost" - } -} -``` - - - - -### Configure Teleport services in the agent pool - -Each Teleport service reads its local configuration file (`/etc/teleport.yaml` -by default) to determine which infrastructure resources to proxy. You can edit -this configuration file to enroll resources with Teleport. - -In the setup we explored in this guide, you can edit the user data script for -each instance to add configuration settings to, for example, the -`database_service` or `kubernetes_service` sections. - -To see how to configure each service, read its section of the documentation: - -- [SSH Service](../server-access/introduction.mdx) -- [Database Service](../database-access/database-access.mdx) -- [Kubernetes Service](../kubernetes-access/introduction.mdx) -- [Windows Desktop Service](../desktop-access/introduction.mdx) -- [Application Service](../application-access/introduction.mdx) - -## Further reading: How the module works - -In this section, we explain the resources configured in the -`agent-pool-terraform` module. - -### Join token - -The `agent-pool-terraform` module deploys one virtual machine instance for each -Agent. Each agent joins the cluster using a token. We create each token -using the `teleport_provision_token` Terraform resource, specifying the token's -value with a `random_string` resource: - -```hcl -(!examples/agent-pool-terraform/teleport/token.tf!) -``` - -When we apply the `teleport_provision_token` resources, the Teleport Terraform -provider creates them on the Teleport Auth Service backend. - -### User data script - -Each Agent deployed by the `agent-pool-terraform` module loads a user -data script that creates a Teleport configuration file for the agent. The -services enabled by the configuration file depend on the value of the -`agent_roles` input variable: - -```text -(!examples/agent-pool-terraform/teleport/userdata!) -``` - -### Virtual machine instances - -Each cloud-specific child module of `agent-pool-terraform` declares resources to -deploy a virtual machine instance on your cloud provider: - - - - -`ec2-instance.tf` declares a data source for an Amazon Linux 2023 machine image -and uses it to launch EC2 instances that run Teleport agents with the -`teleport_provision_token` resource: - -```hcl -(!examples/agent-pool-terraform/aws/ec2-instance.tf!) -``` - - - - -`gcp-instance.tf` declares Google Compute Engine instances that use the -`teleport_provision_token` to run Teleport agents: - -```hcl -(!examples/agent-pool-terraform/gcp/gcp-instance.tf!) -``` - - - - -`azure-instance.tf` declares an Azure virtual machine resource to run Teleport -agents using the `teleport_provision_token` resource, plus the required network -interface for each instance. - -Note that while Azure VM instances require a user account, this module declares -a temporary one to pass validation, but uses Teleport to enable access to the -instances: - -```hcl -(!examples/agent-pool-terraform/azure/azure-instance.tf!) -``` - - - - diff --git a/docs/pages/management/dynamic-resources.mdx b/docs/pages/management/infrastructure-as-code.mdx similarity index 98% rename from docs/pages/management/dynamic-resources.mdx rename to docs/pages/management/infrastructure-as-code.mdx index 741ba9d5d8240..d93295b467fa6 100644 --- a/docs/pages/management/dynamic-resources.mdx +++ b/docs/pages/management/infrastructure-as-code.mdx @@ -1,12 +1,12 @@ --- -title: Using Dynamic Resources +title: Manage Teleport with Infrastructure as Code description: An introduction to Teleport's dynamic resources, which make it possible to apply settings to remote clusters using infrastructure as code. tocDepth: 3 --- This section explains how to manage Teleport's **dynamic resources**, which make it possible to adjust the behavior of your Teleport cluster as your -infrastructure changes. +infrastructure changes using infrastructure as code tools. ## What is a dynamic resource? diff --git a/docs/pages/management/dynamic-resources/access-list.mdx b/docs/pages/management/infrastructure-as-code/managing-resources/access-list.mdx similarity index 100% rename from docs/pages/management/dynamic-resources/access-list.mdx rename to docs/pages/management/infrastructure-as-code/managing-resources/access-list.mdx diff --git a/docs/pages/management/dynamic-resources/agentless-ssh-servers.mdx b/docs/pages/management/infrastructure-as-code/managing-resources/agentless-ssh-servers.mdx similarity index 100% rename from docs/pages/management/dynamic-resources/agentless-ssh-servers.mdx rename to docs/pages/management/infrastructure-as-code/managing-resources/agentless-ssh-servers.mdx diff --git a/docs/pages/management/infrastructure-as-code/managing-resources/managing-resources.mdx b/docs/pages/management/infrastructure-as-code/managing-resources/managing-resources.mdx new file mode 100644 index 0000000000000..423f97a515756 --- /dev/null +++ b/docs/pages/management/infrastructure-as-code/managing-resources/managing-resources.mdx @@ -0,0 +1,9 @@ +--- +title: "Managing Resources with Infrastructure as Code" +description: Provides instructions on managing specific dynamic resources with tctl and the Teleport Terraform provider and Kubernetes operator. +--- + +Read the guides in this section for instructions on managing specific dynamic +resources with tctl and the Teleport Terraform provider and Kubernetes operator. + +(!toc!) diff --git a/docs/pages/management/dynamic-resources/user-and-role.mdx b/docs/pages/management/infrastructure-as-code/managing-resources/user-and-role.mdx similarity index 100% rename from docs/pages/management/dynamic-resources/user-and-role.mdx rename to docs/pages/management/infrastructure-as-code/managing-resources/user-and-role.mdx diff --git a/docs/pages/management/dynamic-resources/teleport-operator.mdx b/docs/pages/management/infrastructure-as-code/teleport-operator.mdx similarity index 100% rename from docs/pages/management/dynamic-resources/teleport-operator.mdx rename to docs/pages/management/infrastructure-as-code/teleport-operator.mdx diff --git a/docs/pages/management/dynamic-resources/teleport-operator-helm.mdx b/docs/pages/management/infrastructure-as-code/teleport-operator/teleport-operator-helm.mdx similarity index 100% rename from docs/pages/management/dynamic-resources/teleport-operator-helm.mdx rename to docs/pages/management/infrastructure-as-code/teleport-operator/teleport-operator-helm.mdx diff --git a/docs/pages/management/dynamic-resources/teleport-operator-standalone.mdx b/docs/pages/management/infrastructure-as-code/teleport-operator/teleport-operator-standalone.mdx similarity index 100% rename from docs/pages/management/dynamic-resources/teleport-operator-standalone.mdx rename to docs/pages/management/infrastructure-as-code/teleport-operator/teleport-operator-standalone.mdx diff --git a/docs/pages/management/dynamic-resources/terraform-provider.mdx b/docs/pages/management/infrastructure-as-code/terraform-provider.mdx similarity index 100% rename from docs/pages/management/dynamic-resources/terraform-provider.mdx rename to docs/pages/management/infrastructure-as-code/terraform-provider.mdx diff --git a/docs/pages/management/infrastructure-as-code/terraform-starter.mdx b/docs/pages/management/infrastructure-as-code/terraform-starter.mdx new file mode 100644 index 0000000000000..b1463ed26879a --- /dev/null +++ b/docs/pages/management/infrastructure-as-code/terraform-starter.mdx @@ -0,0 +1,38 @@ +--- +title: "Terraform Starter Setup" +description: Provides an example to help you get started managing dynamic resources in a Teleport cluster using Terraform. +--- + +The Terraform starter guide provides an example of a Terraform module that +manages Teleport resources in production. The guide helps you to understand the +Teleport resources to manage with Terraform in order to accomplish common +Teleport setup tasks. You can use the example module as a starting point for +managing a complete set of Teleport cluster resources. + +The guides in the Terraform starter module assume that you have followed [Machine +ID with the Teleport Terraform Provider]( +../../../enroll-resources/machine-id/access-guides//terraform.mdx) on your +workstation. + +## Part One: Enroll resources + +In Part One of the Terraform starter module, we show you how to enroll resources +such as Linux servers, databases, and Kubernetes clusters by deploying a pool of +Teleport Agents on virtual machine instances. You can then declare dynamic +infrastructure resources with Terraform or change the configuration file +provided to each Agent. + +[Read Part One](./terraform-starter/enroll-resources.mdx). + +## Part Two: Configure RBAC + +Part Two of the Terraform starter module shows you how to configure Teleport +role-based access controls to provide different levels of access to the +resources you enrolled in Part One. It also configures Access Requests, +available in Teleport Identity, so that users authenticate with less privileged +roles by default but can request access to more privileged roles. An +authentication connector lets users authenticate to Teleport using a Single +Sign-On provider. + +[Read Part Two](./terraform-starter/rbac.mdx). + diff --git a/docs/pages/management/infrastructure-as-code/terraform-starter/enroll-resources.mdx b/docs/pages/management/infrastructure-as-code/terraform-starter/enroll-resources.mdx new file mode 100644 index 0000000000000..cc225930d4d10 --- /dev/null +++ b/docs/pages/management/infrastructure-as-code/terraform-starter/enroll-resources.mdx @@ -0,0 +1,749 @@ +--- +title: "Part 1: Enroll Infrastructure with Terraform" +h1: "Terraform Starter: Enroll Infrastructure" +description: Explains you how to deploy a pool of Teleport Agents so you can apply dynamic resources to enroll your infrastructure with Teleport. +--- + +*This guide is Part One of the Teleport Terraform starter guide. Read the +[overview](../terraform-starter.mdx) for the scope and purpose of the Terraform +starter guide.* + +This guide shows you how to use Terraform to enroll infrastructure resources +with Teleport. You will: + +- Deploy a pool of Teleport Agents running on virtual machines. +- Label resources enrolled by the Agents with `env:dev` and `env:prod` so that, + in [Part Two](rbac.mdx), you can configure Teleport roles to enable access to + these resources. + +## How it works + +An Agent is a Teleport instance configured to run one or more Teleport services +in order to proxy infrastructure resources. For a brief architectural overview +of how Agents run in a Teleport cluster, read the [Introduction to Teleport +Agents](introduction.mdx). + +There are several methods you can use to join a Teleport Agent to your cluster, +which we discuss in the [Joining Services to your +Cluster](join-services-to-your-cluster.mdx) guide. In this guide, we will use +the **join token** method, where the operator stores a secure token on the Auth +Service, and an Agent presents the token in order to join a cluster. + +No matter which join method you use, it will involve the following Terraform +resources: + +- Compute instances to run Teleport services +- A join token for each compute instance in the Agent pool + +![A Teleport Agent pool](../../../../img/tf-agent-diagram.png) + +## Prerequisites + +(!docs/pages/includes/edition-prereqs-tabs.mdx!) + + + +We recommend following this guide on a fresh Teleport demo cluster so you can +see how an Agent pool works. After you are familiar with the setup, apply the +lessons from this guide to protect your infrastructure. You can get started with +a demo cluster using: +- A demo deployment on a [Linux server](../../../deploy-a-cluster/linux-demo.mdx) +- A [Teleport Enterprise (Cloud) trial](https://goteleport.com/signup) + + + +- An AWS, Google Cloud, or Azure account with permissions to create virtual + machine instances. + +- Cloud infrastructure that enables virtual machine instances to connect to the + Teleport Proxy Service. For example: + - An AWS subnet with a public NAT gateway or NAT instance + - Google Cloud NAT + - Azure NAT Gateway + + In minimum-security demo clusters, you can also configure the VM instances you + deploy to have public IP addresses. + +- Terraform v(=terraform.version=) or higher. + +- An identity file for the Teleport Terraform provider. Make sure you have + completed [Set up the Teleport Terraform Provider + ](../../infrastructure-as-code/terraform-provider.mdx) before following this + guide. + + If you followed the guide, you should have the following files in your + Terraform project directory: + + |Path|Description| + |---|---| + |`terraform-identity/identity`|Identity file used by the Terraform provider (renewed by a Machine ID Bot).| + |`create_and_run_terraform_bot.sh`|Script to start a Machine ID bot.| + |`provider.tf`|Configuration for the Teleport Terraform provider.| + |`remove_terraform_bot.sh`|Script to clean up the Machine ID Bot.| + +- (!docs/pages/includes/tctl.mdx!) + +## Step 1/4. Import the Terraform module + +1. Navigate to your Terraform project directory. + + + + If you don't have an identity file available in + `terraform-identity/identity`, make sure you have followed the [prerequisites + for this guide](#prerequisites). + + + +1. Fetch the Teleport code repository and copy the example Terraform + configuration for this project into your current working directory. The + following commands copy the appropriate child module for your cloud provider + into a subdirectory called `cloud` and HCL configurations for Teleport + resources into a subdirectory called `teleport`: + + + + + ```code + $ git clone --depth=1 https://github.com/gravitational/teleport teleport-clone + $ cp -R teleport-clone/examples/terraform-starter/agent-installation teleport + $ cp -R teleport-clone/examples/terraform-starter/aws cloud + $ rm -rf teleport-clone + ``` + + + + + ```code + $ git clone --depth=1 https://github.com/gravitational/teleport teleport-clone + $ cp -R teleport-clone/examples/terraform-starter/agent-installation teleport + $ cp -R teleport-clone/examples/terraform-starter/gcp cloud + $ rm -rf teleport-clone + ``` + + + + + ```code + $ git clone --depth=1 https://github.com/gravitational/teleport teleport-clone + $ cp -R teleport-clone/examples/terraform-starter/agent-installation teleport + $ cp -R teleport-clone/examples/terraform-starter/azure cloud + $ rm -rf teleport-clone + ``` + + + + +1. Create a file called `main.tf` with the following content, which imports the + `terraform-starter` module for your cloud provider: + + + + + ```hcl + module "agent_installation" { + source = "./teleport" + agent_count = 2 + agent_roles = ["Node"] + labels = {} + proxy_service_address = "teleport.example.com:443" + teleport_edition = "cloud" + teleport_version = "(=teleport.version=)" + } + + module "agent_deployment" { + region = "" + source = "./cloud" + subnet_id = "" + userdata_scripts = module.agent_installation.userdata_scripts + } + ``` + + + + + ```hcl + module "agent_installation" { + source = "./teleport" + agent_count = 2 + agent_roles = ["Node"] + labels = {} + proxy_service_address = "teleport.example.com:443" + teleport_edition = "cloud" + teleport_version = "(=teleport.version=)" + } + + module "agent_deployment" { + gcp_zone = "us-east1-b" + google_project = "" + source = "./cloud" + subnet_id = "" + userdata_scripts = module.agent_installation.userdata_scripts + } + ``` + + + + + ```hcl + module "agent_installation" { + source = "./teleport" + agent_count = 2 + agent_roles = ["Node"] + labels = {} + proxy_service_address = "teleport.example.com:443" + teleport_edition = "cloud" + teleport_version = "(=teleport.version=)" + } + + module "agent_deployment" { + azure_resource_group = "" + public_key_path = "" + region = "East US" + source = "./cloud" + subnet_id = "" + userdata_scripts = module.agent_installation.userdata_scripts + } + ``` + + + + +Edit the ` module "agent_installation"` block in `main.tf` as follows: + +1. Assign `agent_count` to `2` for high availability. As you scale your Teleport + usage, you can increase this count to ease the load on each Agent. + +1. Modify the elements of the `agent_roles` list. + + The Teleport Auth Service associates a join token with one or more roles, + identifying the Teleport services that are allowed to use the token. Modify + `agent_roles` to include the token roles that correspond to the Teleport + services you plan to run on your Agent nodes: + + | Role value | Service it enables | + |-------------|------------------------------| + | `App` | Teleport Application Service | + | `Discovery` | Teleport Discovery Service | + | `Db` | Teleport Database Service | + | `Kube` | Teleport Kubernetes Service | + | `Node` | Teleport SSH Service | + + The elements of the `agent_roles` list also determine which Teleport services + the Agents deployed by the `terraform-starter` module will run. For + example, if the value of `agent_roles` is `["Node", "Db"]`, the configuration + file enables the Teleport SSH Service and Teleport Database Service. + + If the value of the `agent_roles` input variable includes the `Node` role, + the configuration also adds the `role: agent-pool` label to the Teleport SSH + Service on each instance. This makes it easier to access hosts in the Agent + pool later. The script makes Teleport the only option for accessing Agent + instances by disabling OpenSSH on startup and deleting any authorized public + keys. + +1. Assign `proxy_service_address` to the host and HTTPS port of your Teleport + Proxy Service, e.g., `mytenant.teleport.sh:443`. + + + + Make sure to include the port. + + + +1. Make sure `teleport_edition` matches your Teleport edition. Assign this to + `oss`, `cloud`, or `enterprise`. The default is `oss`. + +1. If needed, change the value of `teleport_version` to the version of Teleport + you want to run on your Agents. It must be either the same major version as + your Teleport cluster or one major version behind. + +Edit the `module "agent_deployment"` block in `main.tf` as follows: + +1. If you are deploying your instance in a minimum-security demo environment and + do not have a NAT gateway, NAT instance, or other method for connecting your + instances to the Teleport Proxy Service, modify the `module` block to + associate a public IP address with each Agent instance: + + ```hcl + insecure_direct_access=true + ``` + +1. Assign the remaining input variables depending on your cloud provider: + + + + +1. Assign `region` to the AWS region where you plan to deploy Teleport Agents, + such as `us-east-1`. +1. For `subnet_id`, include the ID of the subnet where you will deploy Teleport + Agents. + + + + +1. Assign `google_project` to the name of your Google Cloud project and + `gcp_zone` to the zone where you will deploy Agents, such as `us-east1-b`. +1. For `subnet_id`, include the name or URI of the Google Cloud subnet where you + will deploy the Teleport Agents. + + The subnet URI has the format: + + ``` + projects/PROJECT_NAME/regions/REGION/subnetworks/SUBNET_NAME + ``` + + + + +1. Assign `azure_resource_group` to the name of the Azure resource group where + you are deploying Teleport Agents. + +1. The module uses `public_key_path` to pass validation, as Azure VMs must + include an RSA public key with at least 2048 bits. Once the module deploys + the VMs, a cloud-init script removes the public key and disables OpenSSH. Set + this input to the path to a valid public SSH key. + +1. Assign `region` to the Azure region where you plan to deploy Teleport Agents, + such as `East US`. + +1. For `subnet_id`, include the ID of the subnet where you will deploy Teleport + Agents. Use the following format: + + ```text + /subscriptions/SUBSCRIPTION/resourceGroups/RESOURCE_GROUP/providers/Microsoft.Network/virtualNetworks/NETWORK_NAME/subnets/SUBNET_NAME + ``` + + + + +## Step 2/4. Add provider configurations + +In this step, you will configure the `terraform-starter` module for your +Teleport cluster and cloud provider. + +In your Terraform project directory, ensure that the file called `provider.tf` +includes the following content: + + + + +```hcl +terraform { + required_providers { + aws = { + source = "hashicorp/aws" + version = "~> 4.0" + } + + teleport = { + source = "terraform.releases.teleport.dev/gravitational/teleport" + version = "~> (=teleport.major_version=).0" + } + } +} + +provider "aws" { + region = AWS_REGION +} + +provider "teleport" { + # Update addr to point to your Teleport Enterprise (managed) tenant URL's host:port + addr = PROXY_SERVICE_ADDRESS + identity_file_path = "terraform-identity/identity" +} +``` + +Replace the following placeholders: + +| Placeholder | Value | +|-----------------------|------------------------------------------------------------------------------| +| AWS_REGION | The AWS region where you will deploy Agents, e.g., `us-east-2` | +| PROXY_SERVICE_ADDRESS | The host and port of the Teleport Proxy Service, e.g., `example.teleport.sh:443` | + + + +```hcl +terraform { + required_providers { + google = { + source = "hashicorp/google" + version = "~> 5.5.0" + } + + teleport = { + source = "terraform.releases.teleport.dev/gravitational/teleport" + version = "~> (=teleport.major_version=).0" + } + } +} + +provider "google" { + project = GOOGLE_CLOUD_PROJECT + region = GOOGLE_CLOUD_REGION +} + +provider "teleport" { + # Update addr to point to your Teleport Enterprise (managed) tenant URL's host:port + addr = PROXY_SERVICE_ADDRESS + identity_file_path = "terraform-identity/identity" +} +``` + +Replace the following placeholders: + +| Placeholder | Value | +|-----------------------|------------------------------------------------------------------------------| +| GOOGLE_CLOUD_PROJECT| Google Cloud project where you will deploy Agents. | +| GOOGLE_CLOUD_REGION | Google Cloud region where you will deploy Agents. | +| PROXY_SERVICE_ADDRESS | The host and port of the Teleport Proxy Service, e.g., `example.teleport.sh:443` | + + + + +```hcl +terraform { + required_providers { + azurerm = { + source = "hashicorp/azurerm" + version = "~> 3.0.0" + } + + teleport = { + source = "terraform.releases.teleport.dev/gravitational/teleport" + version = "~> (=teleport.major_version=).0" + } + } +} + +provider "teleport" { + identity_file_path = "terraform-identity/identity" + # Update addr to point to your Teleport Cloud tenant URL's host:port + addr = PROXY_SERVICE_ADDRESS +} + +provider "azurerm" { + features {} +} +``` + +Replace the following placeholders: + +|Placeholder|Value| +|---|---| +| PROXY_SERVICE_ADDRESS | The host and port of the Teleport Proxy Service, e.g., `example.teleport.sh:443` | + + + + +## Step 3/4. Enroll dynamic infrastructure resources + +In this section, you will configure your Agent pool to enroll infrastructure +resources in your Teleport cluster. To do so, you will declare **dynamic +resources**. Agents read these from the Auth Service and proxy traffic to them. +You apply these by defining them as Terraform resources. + +The Teleport Terraform provider currently supports the following dynamic +resources: + +|Infrastructure Resource|Terraform Resource| +|---|---| +|Application|`teleport_app`| +|Database|`teleport_database`| +|OpenSSH Servers|`teleport_server`| + +For Windows desktops and Kubernetes clusters, you must edit the Agent +configuration file to enroll these resources (we will explain this [later in the +guide](#enroll-static-resources). + +The module we demonstrate in this guide already configures the Agents you deploy +to be Teleport SSH Service instances. In this setup, you can enroll OpenSSH +servers with your Teleport cluster agentlessly as dynamic resources. + +To declare a dynamic resource with Terraform: + +1. Add a configuration block similar to the ones below to a `*.tf` file in your + `terraform-starter` project directory. + + + + + ```hcl + resource "teleport_app" "example" { + metadata = { + name = "example" + description = "Test app" + labels = { + // Teleport adds this label by default, so add it here to + // ensure a consistent state. + "teleport.dev/origin" = "dynamic" + // Change "dev" to "prod" as appropriate. + "env" = "dev" + } + } + + spec = { + uri = "example.com:3000" + } + } + ``` + + 1. Update the `spec.uri` field to match your application's domain name and + port. + + 1. Read the [Terraform Provider + Reference](../../reference/terraform-provider/resources/app.mdx) for the + `teleport_app` resource and adjust any additional configuration fields. + + + + + ```hcl + resource "teleport_database" "example" { + metadata = { + name = "example" + description = "Test database" + labels = { + // Teleport adds this label by default, so add it here to + // ensure a consistent state. + "teleport.dev/origin" = "dynamic" + // Change "dev" to "prod" as appropriate. + "env" = "dev" + } + } + + spec = { + protocol = "postgres" + uri = "localhost" + } + } + ``` + + 1. Update the `protocol` field to match the protocol of your database. Run the + following command on your workstation to see all supported protocols: + + ```code + $ teleport help db configure create + ``` + + 1. Update the `uri` field to match the domain name port of your database. If + you manage your database with Terraform, you can set it to an attribute of + the corresponding Terraform resource to establish a dependency + relationship. + 1. Read the [Terraform Provider + Reference](../../reference/terraform-provider/resources/database.mdx) for + the `teleport_database` resource and adjust any additional configuration + fields. + + + + + ```hcl + resource "teleport_server" "openssh_agentless" { + version = "v2" + sub_kind = "openssh" + metadata = { + name = "example" + description = "Test server" + labels = { + // Teleport adds this label by default, so add it here to + // ensure a consistent state. + "teleport.dev/origin" = "dynamic" + // Change "dev" to "prod" as appropriate. + "env" = "dev" + } + } + spec = { + addr = "198.51.100.1:22" + hostname = "example" + } + } + ``` + + 1. Update `spec.addr` with the domain name or IP address of the server as well + as the port. If you manage your database with Terraform, you can set it to + an attribute of the corresponding Terraform resource to establish a + dependency relationship. + 1. Update `hostname` with the hostname of the server as shown by Teleport + client tools (Teleport Connect, `tsh`, and the Web UI). + 1. Read the [Terraform Provider + Reference](../../reference/terraform-provider/resources/server.mdx) for + the `teleport_database` resource and adjust any additional configuration + fields. + + + + +1. Make sure that, in each dynamic infrastructure resource, the `labels` field + includes the following line to ensure that your Terraform state remains + consistent with the your cluster: + + ```hcl + "teleport.dev/origin" = "dynamic" + ``` + +1. Edit the `labels` field of each dynamic infrastructure resource to add + one of the following key-value pairs: + - `"env" = "dev"` + - `"env" = "prod"` + + In the next guide in this series, you will define a Teleport role that + restricts access to resources with this label. + +## Step 4/4. Verify the deployment + +Make sure your cloud provider credentials are available to Terraform using the +standard approach for your organization. + +Apply the Terraform configuration: + +```code +$ terraform init +$ terraform apply +``` + +Once the `apply` command completes, run the following command to verify that +your Agents have deployed successfully. This command, which assumes that the +Agents have the `Node` role, lists all Teleport SSH Service instances with the +`role=agent-pool` label: + +```code +$ tsh ls role=agent-pool +Node Name Address Labels +-------------------------- ---------- --------------- +ip-10-1-1-187.ec2.internal ⟵ Tunnel role=agent-pool +ip-10-1-1-24.ec2.internal ⟵ Tunnel role=agent-pool +``` + +## Further reading: How the module works + +In this section, we explain the resources configured in the +`terraform-starter` module. + +### Join token + +The `terraform-starter` module deploys one virtual machine instance for each +Teleport Agent. Each Agent joins the cluster using a token. We create each token +using the `teleport_provision_token` Terraform resource, specifying the token's +value with a `random_string` resource: + +```hcl +(!examples/terraform-starter/agent-installation/token.tf!) +``` + +When we apply the `teleport_provision_token` resources, the Teleport Terraform +provider creates them on the Teleport Auth Service backend. + +### User data script + +Each Teleport Agent deployed by the `terraform-starter` module loads a user +data script that creates a Teleport configuration file for the Agent. The +services enabled by the configuration file depend on the value of the +`agent_roles` input variable: + +```text +(!examples/terraform-starter/agent-installation/userdata!) +``` + +### Virtual machine instances + +Each cloud-specific child module of `terraform-starter` declares resources to +deploy a virtual machine instance on your cloud provider: + + + + +`ec2-instance.tf` declares a data source for an Amazon Linux 2023 machine image +and uses it to launch EC2 instances that run Teleport Agents with the +`teleport_provision_token` resource: + +```hcl +(!examples/terraform-starter/aws/ec2-instance.tf!) +``` + + + + +`gcp-instance.tf` declares Google Compute Engine instances that use the +`teleport_provision_token` to run Teleport Agents: + +```hcl +(!examples/terraform-starter/gcp/gcp-instance.tf!) +``` + + + + +`azure-instance.tf` declares an Azure virtual machine resource to run Teleport +Agents using the `teleport_provision_token` resource, plus the required network +interface for each instance. + +Note that while Azure VM instances require a user account, this module declares +a temporary one to pass validation, but uses Teleport to enable access to the +instances: + +```hcl +(!examples/terraform-starter/azure/azure-instance.tf!) +``` + + + + +## Next steps: More options for enrolling resources + +In Part One of the Terraform starter guide, we showed you how to use Terraform +to deploy a pool of Teleport Agents in order to enroll infrastructure resources +with Teleport. While the guide showed you how to enroll resources dynamically, +by declaring Terraform resources for each infrastructure resource you want to +enroll, you can protect more of your infrastructure with Teleport by: + +- Configuring Auto-Discovery +- Configuring static resource enrollment + +### Configure Auto-Discovery + +For a more scalable approach to enrolling resources than the one shown in this +guide, configure the Teleport Discovery Service to automatically detect +resources in your infrastructure and enroll them with the Teleport Auth Service. + +To configure the Teleport Discovery Service, edit the userdata script run by the +Agent instances managed in the Terraform starter module. Follow the +[Auto-Discovery +guides](../../../enroll-resources/auto-discovery/auto-discovery.mdx) guides to +configure the Discovery Service and enable your Agents to proxy the resources +that the service enrolls. + +### Enroll static resources + +You can enroll resources statically by editing the configuration file that each +Teleport Agent reads when it starts up, which is `/etc/teleport.yaml` by +default. For information about enrolling static resources, read the +documentation section for each kind of resource you would like to enroll: + +- [Databases](../../../enroll-resources/database-access/database-access.mdx) +- [Windows + desktops](../../../enroll-resources/desktop-access/desktop-access.mdx) +- [Kubernetes + clusters](../../../enroll-resources/kubernetes-access/kubernetes-access.mdx) +- [Linux servers](../../../enroll-resources/server-access/server-access.mdx) +- [Web applications and cloud provider + APIs](../../../enroll-resources/application-access/application-access.mdx) + +To incorporate configuration changes into your Terraform module: + +1. Edit the userdata script for the compute instances that run Teleport Agents. + +1. In the section of the configuration file for the service that you edited, + assign the `labels` field to add key-value pairs. In the next guide in this + series, you will define a Teleport role that restricts access to resources + with this label. + + For example, you could indicate that a resource belongs to a development + environment with the following label: + + ```yaml + kubernetes_service: + enabled: true + labels: + env: "dev" + ``` + diff --git a/docs/pages/management/infrastructure-as-code/terraform-starter/rbac.mdx b/docs/pages/management/infrastructure-as-code/terraform-starter/rbac.mdx new file mode 100644 index 0000000000000..8781d12bea61a --- /dev/null +++ b/docs/pages/management/infrastructure-as-code/terraform-starter/rbac.mdx @@ -0,0 +1,537 @@ +--- +title: "Part 2: Configure Teleport RBAC with Terraform" +h1: "Terraform Starter: Configure RBAC" +description: Explains how to manage Teleport roles and authentication connectors with Terraform so you can implement the principle of least privilege in your infrastructure. +--- + +*This guide is Part Two of the Teleport Terraform starter guide. Read the +[overview](../terraform-starter.mdx) for the scope and purpose of the Terraform +starter guide.* + +In [Part One](./deploy-agents-terraform.mdx) of this series, we showed you +how to use Terraform to deploy Teleport Agents in order to enroll infrastructure +resources with your Teleport cluster. While configuring Agents, you labeled them +based on their environment, with some falling under `dev` and others under +`prod`. + +In this guide, you will configure your Teleport cluster to manage access to +resources with the `dev` and `prod` labels in order to implement the principle +of least privilege. + +## How it works + +This guide shows you how to create: + +- A role that can access `prod` resources. +- A role that can access `dev` resources and request access to `prod` resources. +- An authentication connector that allows users to sign into your organization's + identity provider and automatically gain access to `dev` resources. + +In this setup, the only way to access `prod` resources is with an Access +Request, meaning that there are no standing credentiasl for accessing `prod` +resources that an attacker can compromise. + +## Prerequisites + +This guide assumes that you have completed [Part 1: Enroll Infrastructure with +Terraform](./enroll-resources.mdx). + +- Resources enrolled with Teleport that include the `dev` and `prod` labels. We + show you how to enroll these resources using Terraform in Part One. +- An identity provider that supports OIDC or SAML. You should have either: + - The ability to modify SAML attributes or OIDC claims in your organization. + - Pre-existing groups of users that you want to map to two levels of access: + the ability to connect to `dev` resources; and the ability to review Access + Requests for `prod` access. + + + +We recommend following this guide on the same Teleport demo cluster you used for +Part One. After you are familiar with the setup, you can apply the lessons from +this guide to manage RBAC with Terraform. + + + +- Terraform v(=terraform.version=) or higher. +- An identity file for the Teleport Terraform provider. Make sure you are + familiar with [how to set up the Teleport Terraform + provider](../../infrastructure-as-code/terraform-provider.mdx) before + following this guide. +- (!docs/pages/includes/tctl.mdx!) +- To help with troubleshooting, we recommend completing the setup steps in this + guide with a local user that has the preset `editor` and `auditor` roles. In + production, you can apply the lessons in this guide using a less privileged + user. + +## Step 1/4. Import Terraform modules + +1. Navigate to the directory where you organized files for your root Terraform + module. + +1. Fetch the Teleport code repository and copy the example Terraform + configuration for this project into your current working directory. + + Since you will enable users to authenticate to Teleport through your + organization's identity provider (IdP), the modules depend on whether your + organization uses OIDC or SAML to communicate with services: + + + + + ```code + $ git clone --depth=1 https://github.com/gravitational/teleport teleport-clone + $ cp -R teleport-clone/examples/terraform-starter/env_role env_role + $ cp -R teleport-clone/examples/terraform-starter/oidc oidc + $ rm -rf teleport-clone + ``` + + + + ```code + $ git clone --depth=1 https://github.com/gravitational/teleport teleport-clone + $ cp -R teleport-clone/examples/terraform-starter/env_role env_role + $ cp -R teleport-clone/examples/terraform-starter/saml saml + $ rm -rf teleport-clone + ``` + + + + + Your project directory will include two new modules: + + + + + |Name|Description| + |---|---| + |`env_role`|A module for a Teleport role that grants access to resources with a specific `env` label.| + |`oidc`|Teleport resources to configure an OIDC authentication connector and require that users authenticate with it.| + + + + + |Name|Description| + |---|---| + |`env_role`|A module for a Teleport role that grants access to resources with a specific `env` label.| + |`saml`|Teleport resources to configure a SAML authentication connector and require that users authenticate with it.| + + + + +1. Edit the `main.tf` you created in Part One to include the following `module` + blocks: + + + + + ```hcl + module "oidc" { + source = "./oidc" + oidc_claims_to_roles = [] + oidc_client_id = "" + oidc_connector_name = "Log in with OIDC" + oidc_redirect_url = "" + oidc_secret = "" + teleport_domain = "" + } + + module "prod_role" { + source = "./env_role" + env_label = "prod" + principals = {} + request_roles = [] + } + + module "dev_role" { + source = "./env_role" + env_label = "dev" + principals = {} + request_roles = [module.prod_role.role_name] + } + ``` + + + + ```hcl + module "saml" { + source = "./saml" + saml_connector_name = "Log in with SAML" + saml_attributes_to_roles = [] + saml_acs = "" + saml_entity_descriptor = "" + teleport_domain = "" + } + + module "prod_role" { + source = "./env_role" + env_label = "prod" + principals = {} + request_roles = [] + } + + module "dev_role" { + source = "./env_role" + env_label = "dev" + principals = {} + request_roles = [module.prod_role.role_name] + } + ``` + + + +Next, we will show you how to configure the two child modules, and walk you +through the Terraform resources that they apply. + +## Step 2/4. Configure role principals + +Together, the `prod_role` and `dev_role` modules you declared in Step 1 create +three Teleport roles: + +|Role|Description| +|---|---| +|`prod_access`|Allows access to infrastructure resources with the `env:prod` label.| +|`dev_access`|Allows access to infrastructure resources with the `env:dev` label, and Access Requests for the `prod_access` role.| +|`prod_reviewer`|Allows reviews of Access Requests for the `prod_access` role.| + +When Teleport users connect to resources in your infrastructure, they assume a +principal, such as an operating system login or Kubernetes user, in order to +interact with those resources. In this step, you will configure the `prod_role` +and `dev_role` modules to grant access to principals in your infrastructre. + +In `main.tf`, edit the `prod_role` and `dev_role` blocks so that the +`principals` field contains a mapping, similar to the example below. Use the +list of keys below the example to configure the principals. + +```hcl +module "prod_role" { + principals = { + KEY = "value" + } + // ... +} + +// ... +``` + +|Key|Description| +|---|---| +|`aws_role_arns`|AWS role ARNs the user can access when authenticating to an AWS API.| +|`azure_identities`|Azure identities the user can access when authenticating to an Azure API.| +|`db_names`|Names of databases the user can access within a database server.| +|`db_roles`|Roles the user can access on a database when they authenticate to a database server.| +|`db_users`|Users the user can access on a database when they authenticate to a database server.| +|`gcp_service_accounts`|Google Cloud service accounts the user can access when authenticating to a Google Cloud API.| +|`kubernetes_groups`|Kubernetes groups the Teleport Database Service can impersonate when proxying requests from the user.| +|`kubernetes_users`|Kubernetes users the Teleport Database Service can impersonate when proxying requests from the user.| +|`logins`|Operating system logins the user can access when authenticating to a Linux server.| +|`windows_desktop_logins`|Operating system logins the user can access when authenticating to a Windows desktop.| + +For example, the following configuration allows users with the `dev_access` role +to assume the `dev` login on Linux hosts and the `developers` group on +Kubernetes. `prod` users have more privileges and can assume the `root` login +and `system:masters` Kubernetes group: + +```hcl +module "dev_role" { + principals = { + logins = ["dev"] + kubernetes_groups = ["developers"] + } + // ... +} + +module "prod_role" { + principals = { + logins = ["root"] + kubernetes_groups = ["system:masters"] + } + // ... +} +``` + +## Step 3/4. Configure the single sign-on connector + +In this step, you will configure your Terraform module to enable authentication +through your organization's IdP. Configure the `saml` or `oidc` module you +declared in Step 1 by following the instructions. + +1. Register your Teleport cluster with your IdP as a relying party. The + instructions depend on your IdP. + + The following guides show you how to set up your IdP to support the SAML or + OIDC authentication connector. **Read only the linked section**, since these + guides assume you are using `tctl` instead of Terraform to manage + authentication connectors: + + - [AD FS](../../../access-controls/sso/adfs.mdx#step-13-configure-adfs) + - [GitLab](../../../access-controls/sso/gitlab.mdx#step-13-configure-gitlab) + - [Google + Workspace](../../../access-controls/sso/google-workspace.mdx#step-14-configure-google-workspace) + - [OneLogin](../../../access-controls/sso/one-login.mdx#step-13-create-teleport-application-in-onelogin) + - [Azure + AD](../../../access-controls/sso/azuread.mdx#step-13-configure-azure-ad) + - [Okta](../../../access-controls/sso/okta.mdx#step-24-configure-okta) + +1. Configure the redirect URL (for OIDC) or assertion consumer service (for SAML): + + + + + Set `oidc_redirect_url` to + `https://example.teleport.sh:443/v1/webapi/oidc/callback`, replacing + `example.teleport.sh` with the domain name of your Teleport cluster. + + Ensure that `oidc_redirect_url` matches match the URL you configured with + your IdP when registering your Teleport cluster as a relying party. + + + + + Set `saml_acs` to `https://example.teleport.sh:443/v1/webapi/saml/acs`, + replacing `example.teleport.sh` with the domain name of your Teleport + cluster. + + Ensure that `saml_acs` matches the URL you configured with your IdP when + registering your Teleport cluster as a relying party. + + + + +1. After you register Teleport as a relying party, your identity provider will + print information that you will use to configure the authentication + connector. Fill in the information depending on your provider type: + + + + + Fill in the `oidc_client_id` and `oidc_secret` with the client ID and secret + returned by the IdP. + + + + + Assign `saml_entity_descriptor` to the contents of the XML document that + contains the SAML entity descriptor for the IdP. + + + + +1. Assign `teleport_domain` to the domain name of your Teleport Proxy Service, + with no scheme or path, e.g., `example.teleport.sh`. The child module uses + this to configure WebAuthn for local users. This way, you can authenticate as + a local user as a fallback if you need to troubleshoot your single sign-on + authentication connector. + +1. Configure role mapping for your authentication connector. When a user + authenticates to Teleport through your organization's IdP, Teleport assigns + roles to the user based on your connector's role mapping configuration: + + + + + In this example, users with a `group` claim with the `developers` value + receive the `dev_access` role, while users with a `group` claim with the + value `admins` receive the `prod_reviewer` role: + + ```hcl + oidc_claims_to_roles = [ + { + claim = "group" + value = "developers" + roles = [ + module.dev_role.role_name + ] + }, + { + claim = "group" + value = "admins" + roles = module.dev_role.reviewer_role_names + } + ] + ``` + + Edit the `claim` value for each item in `oidc_claims_to_roles` to match the + name of an OIDC claim you have configured on your IdP. + + + + + In this example, users with a `group` attribute with the `developers` value + receive the `dev_access` role, while users with a `group` attribute with the + value `admins` receive the `prod_reviewer` role: + + ```hcl + saml_attributes_to_roles = [ + { + name = "group" + value = "developers" + roles = [ + module.dev_role.role_name + ] + }, + { + name = "group" + value = "admins" + roles = module.dev_role.reviewer_role_names + } + ] + ``` + + + + +## Step 4/4. Apply and verify changes + +1. Ensure there is an identity file available to the Teleport Terraform + provider. + + If you followed [Set up the Teleport Terraform + Provider](../terraform-provider.mdx), run the script that you copied to start + a Machine ID Bot and generate identity files: + + ```code + $ create_and_run_terraform_bot.sh + ``` + + Otherwise, use the method you used when following Part One to obtain an + identity file. + + +1. Apply your changes: + + ```code + $ terraform init + $ terraform apply + ``` + +1. Open the Teleport Web UI in a browser and sign in to Teleport as a user on + your IdP with the `groups` trait assigned to the value that you mapped to the + role in your authentication connector. Your user should have the `dev_access` + role. + + + + If you receive errors logging in using your authentication connector, log in + as a local user with permissions to view the Teleport audit log. These is + available in the preset `auditor` role. Check for error messages in events + related with the "SSO Login" type. + + + +1. Request access to the `prod_access` role through the Web UI. Visit the + "Access Requests" tab and click "New Request". + +1. Sign out of the Web UI and, as a user in a group that you mapped to the + `prod_access` role, sign in. In the "Access Requests" tab, you should be able + to see and resolve the Access Request you created. + +## Further reading: How the module works + +This section describes the resources managed by the `env_role`, `saml`, and +`oidc` child modules. We encourage you to copy and customize these +configurations in order to refine your settings and choose the best reusable +interface for your environment. + +### The `env_access` role + +The `env_role` child module creates Teleport roles with the ability to access +Teleport-protected resources with the `env` label: + +```hcl +(!examples/terraform-starter/env_role/env_role.tf!) +``` + +The role hardcodes an `allow` rule with the ability to access applications, +databases, Linux servers, Kubernetes clusters, and Windows desktops with the +user-configured `env` label. + +Since we cannot predict which principals are available in your infrastructure, +this role leaves the `aws_role_arns`, `logins`, and other principal-related role +attributes for the user to configure. + +The role also configures an `allow` rule that enables users to request access +for the roles configured in the `request_roles` input variable. + +An `output` prints the name of the role to allow us to create a dependency +relationship between this role and an authentication connector. + +### The `env_access_reviewer` role + +If `var.request_roles` in the `env_access` role is nonempty, the `env_role` +module creates a role that can review those roles. This is a separate role to +make permissions more composable: + +```hcl +(!examples/terraform-starter/env_role/reviewer.tf!) +``` + +As with the `env_access` role, there is an output to print the name of the +`env_access_reviewer` role to create a dependency relationship with the +authentication connector. + +### Configuring an authentication connector + +The authentication connector resources are minimal. Beyond providing the +attributes necessary to send and receive Teleport OIDC and SAML messages, the +connectors configure role mappings based on user-provided values: + + + + +```hcl +(!examples/terraform-starter/oidc/oidc_connector.tf!) +``` + + + +```hcl +(!examples/terraform-starter/saml/saml.tf!) +``` + + + + +Since the role mappings inputs are composite data types, we add a complex type +definition when declaring the input variables for the `oidc` and `saml` child +modules: + + + + +```hcl +(!examples/terraform-starter/oidc/inputs.tf!) +``` + + + +```hcl +(!examples/terraform-starter/saml/inputs.tf!) +``` + + + +For each authentication connector, we declare a cluster authentication +preference that enables the connector. The cluster authentication preference +enables local user login with WebAuthn as a secure fallback in case you need to +troubleshoot the single sign-on provider. + + + + +```hcl +(!examples/terraform-starter/oidc/auth_pref.tf!) +``` + + + +```hcl +(!examples/terraform-starter/saml/auth_pref.tf!) +``` + + + +## Next steps + +Now that you have configured RBAC in your Terraform demo cluster, fine-tune your +setup by reading the comprehensive [Terraform provider +reference](../../../reference/terraform-provider.mdx). diff --git a/docs/pages/management/introduction.mdx b/docs/pages/management/introduction.mdx index 8c53e3ae3244e..28a29da17d46c 100644 --- a/docs/pages/management/introduction.mdx +++ b/docs/pages/management/introduction.mdx @@ -7,13 +7,13 @@ layout: tocless-doc In this section, you can find guides on managing a Teleport cluster after deploying it. -## Use dynamic resources +## Use infrastructure as code In Teleport, **dynamic resources** let you adjust the behavior of your Teleport cluster as your infrastructure changes. Teleport provides three methods for applying dynamic resources: the `tctl` client tool, Teleport Terraform provider, -and Kubernetes Operator. Read [Using Dynamic Resources](./dynamic-resources.mdx) -for more information. +and Kubernetes Operator. Read [Manage Teleport with Infrastructure as +Code](./infrastructure-as-code.mdx) for more information. ## Configure your cluster diff --git a/examples/agent-pool-terraform/teleport/inputs.tf b/examples/terraform-starter/agent-installation/inputs.tf similarity index 87% rename from examples/agent-pool-terraform/teleport/inputs.tf rename to examples/terraform-starter/agent-installation/inputs.tf index 31b23113e5467..75465c1f17e38 100644 --- a/examples/agent-pool-terraform/teleport/inputs.tf +++ b/examples/terraform-starter/agent-installation/inputs.tf @@ -33,10 +33,10 @@ variable "proxy_service_address" { variable "teleport_edition" { type = string default = "oss" - description = "Edition of your Teleport cluster. Can be: oss, enterprise, team, or cloud." + description = "Edition of your Teleport cluster. Can be: oss, enterprise, or cloud." validation { - condition = contains(["oss", "enterprise", "team", "cloud"], var.teleport_edition) - error_message = "teleport_edition must be one of: oss, enterprise, team, cloud." + condition = contains(["oss", "enterprise", "cloud"], var.teleport_edition) + error_message = "teleport_edition must be one of: oss, enterprise, cloud." } } diff --git a/examples/agent-pool-terraform/teleport/outputs.tf b/examples/terraform-starter/agent-installation/outputs.tf similarity index 100% rename from examples/agent-pool-terraform/teleport/outputs.tf rename to examples/terraform-starter/agent-installation/outputs.tf diff --git a/examples/agent-pool-terraform/teleport/required_providers.tf b/examples/terraform-starter/agent-installation/required_providers.tf similarity index 100% rename from examples/agent-pool-terraform/teleport/required_providers.tf rename to examples/terraform-starter/agent-installation/required_providers.tf diff --git a/examples/agent-pool-terraform/teleport/token.tf b/examples/terraform-starter/agent-installation/token.tf similarity index 100% rename from examples/agent-pool-terraform/teleport/token.tf rename to examples/terraform-starter/agent-installation/token.tf diff --git a/examples/agent-pool-terraform/teleport/userdata b/examples/terraform-starter/agent-installation/userdata similarity index 100% rename from examples/agent-pool-terraform/teleport/userdata rename to examples/terraform-starter/agent-installation/userdata diff --git a/examples/agent-pool-terraform/aws/ec2-instance.tf b/examples/terraform-starter/aws/ec2-instance.tf similarity index 100% rename from examples/agent-pool-terraform/aws/ec2-instance.tf rename to examples/terraform-starter/aws/ec2-instance.tf diff --git a/examples/agent-pool-terraform/aws/inputs.tf b/examples/terraform-starter/aws/inputs.tf similarity index 100% rename from examples/agent-pool-terraform/aws/inputs.tf rename to examples/terraform-starter/aws/inputs.tf diff --git a/examples/agent-pool-terraform/aws/required_providers.tf b/examples/terraform-starter/aws/required_providers.tf similarity index 100% rename from examples/agent-pool-terraform/aws/required_providers.tf rename to examples/terraform-starter/aws/required_providers.tf diff --git a/examples/agent-pool-terraform/azure/azure-instance.tf b/examples/terraform-starter/azure/azure-instance.tf similarity index 100% rename from examples/agent-pool-terraform/azure/azure-instance.tf rename to examples/terraform-starter/azure/azure-instance.tf diff --git a/examples/agent-pool-terraform/azure/inputs.tf b/examples/terraform-starter/azure/inputs.tf similarity index 100% rename from examples/agent-pool-terraform/azure/inputs.tf rename to examples/terraform-starter/azure/inputs.tf diff --git a/examples/agent-pool-terraform/azure/required_providers.tf b/examples/terraform-starter/azure/required_providers.tf similarity index 100% rename from examples/agent-pool-terraform/azure/required_providers.tf rename to examples/terraform-starter/azure/required_providers.tf diff --git a/examples/terraform-starter/env_role/env_role.tf b/examples/terraform-starter/env_role/env_role.tf new file mode 100644 index 0000000000000..143f4d756e957 --- /dev/null +++ b/examples/terraform-starter/env_role/env_role.tf @@ -0,0 +1,59 @@ +resource "teleport_role" "env_access" { + version = "v7" + metadata = { + name = "${var.env_label}_access" + description = "Can access infrastructure with label ${var.env_label}" + labels = { + env = var.env_label + } + } + + spec = { + allow = { + aws_role_arns = lookup(var.principals, "aws_role_arns", []) + azure_identities = lookup(var.principals, "azure_identities", []) + db_names = lookup(var.principals, "db_names", []) + db_users = lookup(var.principals, "db_users", []) + gcp_service_accounts = lookup(var.principals, "gcp_service_accounts", []) + kubernetes_groups = lookup(var.principals, "kubernetes_groups", []) + kubernetes_users = lookup(var.principals, "kubernetes_users", []) + logins = lookup(var.principals, "logins", []) + windows_desktop_logins = lookup(var.principals, "windows_desktop_logins", []) + + request = { + roles = var.request_roles + search_as_roles = var.request_roles + thresholds = [{ + approve = 1 + deny = 1 + filter = "!equals(request.reason, \"\")" + }] + } + + app_labels = { + env = [var.env_label] + } + + db_labels = { + env = [var.env_label] + } + + node_labels = { + env = [var.env_label] + } + + kubernetes_labels = { + env = [var.env_label] + } + + windows_desktop_labels = { + env = [var.env_label] + } + } + } +} + +output "role_name" { + value = teleport_role.env_access.metadata.name +} + diff --git a/examples/terraform-starter/env_role/inputs.tf b/examples/terraform-starter/env_role/inputs.tf new file mode 100644 index 0000000000000..77541c61d86d7 --- /dev/null +++ b/examples/terraform-starter/env_role/inputs.tf @@ -0,0 +1,13 @@ +variable "env_label" { + description = "\"env\" label value of resources that a user with this role can access" + type = string +} +variable "principals" { + description = "Map of strings to lists of strings corresponding to the principals on infrastructure resources that the user can access. Examples: logins, kubernetes_groups, db_names." + type = map(list(string)) +} + +variable "request_roles" { + description = "List of strings indicating the names of roles that a user with this role can request" + type = list(string) +} diff --git a/examples/terraform-starter/env_role/required_providers.tf b/examples/terraform-starter/env_role/required_providers.tf new file mode 100644 index 0000000000000..b99ddb043a488 --- /dev/null +++ b/examples/terraform-starter/env_role/required_providers.tf @@ -0,0 +1,7 @@ +terraform { + required_providers { + teleport = { + source = "terraform.releases.teleport.dev/gravitational/teleport" + } + } +} diff --git a/examples/terraform-starter/env_role/reviewer.tf b/examples/terraform-starter/env_role/reviewer.tf new file mode 100644 index 0000000000000..4882b8b421b87 --- /dev/null +++ b/examples/terraform-starter/env_role/reviewer.tf @@ -0,0 +1,24 @@ +locals { + can_review_roles = join(", ", var.request_roles) +} + +resource "teleport_role" "env_access_reviewer" { + version = "v7" + count = length(var.request_roles) > 0 ? 1 : 0 + metadata = { + name = "${local.can_review_roles}_reviewer" + description = "Can review Access Requests for: ${local.can_review_roles}" + } + + spec = { + allow = { + review_requests = { + roles = var.request_roles + } + } + } +} + +output "reviewer_role_names" { + value = teleport_role.env_access_reviewer[*].metadata.name +} diff --git a/examples/agent-pool-terraform/gcp/gcp-instance.tf b/examples/terraform-starter/gcp/gcp-instance.tf similarity index 84% rename from examples/agent-pool-terraform/gcp/gcp-instance.tf rename to examples/terraform-starter/gcp/gcp-instance.tf index 153f39c34ce7b..b722f0a320f3c 100644 --- a/examples/agent-pool-terraform/gcp/gcp-instance.tf +++ b/examples/terraform-starter/gcp/gcp-instance.tf @@ -10,6 +10,12 @@ resource "google_compute_instance" "teleport_agent" { name = "teleport-agent-${count.index}" zone = var.gcp_zone + // Initialize the instance tags to an empty map to prevent errors when the + // Teleport SSH Service fetches them. + params { + resource_manager_tags = {} + } + boot_disk { initialize_params { image = "family/ubuntu-2204-lts" diff --git a/examples/agent-pool-terraform/gcp/inputs.tf b/examples/terraform-starter/gcp/inputs.tf similarity index 100% rename from examples/agent-pool-terraform/gcp/inputs.tf rename to examples/terraform-starter/gcp/inputs.tf diff --git a/examples/agent-pool-terraform/gcp/required_providers.tf b/examples/terraform-starter/gcp/required_providers.tf similarity index 100% rename from examples/agent-pool-terraform/gcp/required_providers.tf rename to examples/terraform-starter/gcp/required_providers.tf diff --git a/examples/agent-pool-terraform/main.tf b/examples/terraform-starter/main.tf similarity index 100% rename from examples/agent-pool-terraform/main.tf rename to examples/terraform-starter/main.tf diff --git a/examples/terraform-starter/oidc/auth_pref.tf b/examples/terraform-starter/oidc/auth_pref.tf new file mode 100644 index 0000000000000..ac4f74ff028db --- /dev/null +++ b/examples/terraform-starter/oidc/auth_pref.tf @@ -0,0 +1,16 @@ +resource "teleport_auth_preference" "main" { + version = "v2" + metadata = { + description = "Require authentication via the ${var.oidc_connector_name} connector" + } + + spec = { + connector_name = teleport_oidc_connector.main.metadata.name + type = "oidc" + allow_local_auth = true + second_factor = "webauthn" + webauthn = { + rp_id = var.teleport_domain + } + } +} diff --git a/examples/terraform-starter/oidc/inputs.tf b/examples/terraform-starter/oidc/inputs.tf new file mode 100644 index 0000000000000..3b6c914ec35c6 --- /dev/null +++ b/examples/terraform-starter/oidc/inputs.tf @@ -0,0 +1,34 @@ +variable "teleport_domain" { + type = string + description = "Domain name of your Teleport cluster (to configure WebAuthn)" +} + +variable "oidc_claims_to_roles" { + type = list(object({ + claim = string + roles = list(string) + value = string + })) + description = "Mappings of OIDC claims to lists of Teleport role names" +} + +variable "oidc_client_id" { + type = string + description = "The OIDC identity provider's client iD" +} + +variable "oidc_connector_name" { + type = string + description = "Name of the Teleport OIDC connector resource" +} + +variable "oidc_redirect_url" { + type = string + description = "Redirect URL for the OIDC provider." +} + +variable "oidc_secret" { + type = string + description = "Secret for configuring the Teleport OIDC connector. Available from your identity provider." +} + diff --git a/examples/terraform-starter/oidc/oidc_connector.tf b/examples/terraform-starter/oidc/oidc_connector.tf new file mode 100644 index 0000000000000..dec4b24f33e55 --- /dev/null +++ b/examples/terraform-starter/oidc/oidc_connector.tf @@ -0,0 +1,13 @@ +resource "teleport_oidc_connector" "main" { + version = "v3" + metadata = { + name = var.oidc_connector_name + } + + spec = { + client_id = var.oidc_client_id + client_secret = var.oidc_secret + claims_to_roles = var.oidc_claims_to_roles + redirect_url = [var.oidc_redirect_url] + } +} diff --git a/examples/terraform-starter/oidc/required_providers.tf b/examples/terraform-starter/oidc/required_providers.tf new file mode 100644 index 0000000000000..b99ddb043a488 --- /dev/null +++ b/examples/terraform-starter/oidc/required_providers.tf @@ -0,0 +1,7 @@ +terraform { + required_providers { + teleport = { + source = "terraform.releases.teleport.dev/gravitational/teleport" + } + } +} diff --git a/examples/terraform-starter/saml/auth_pref.tf b/examples/terraform-starter/saml/auth_pref.tf new file mode 100644 index 0000000000000..1dec628d578e6 --- /dev/null +++ b/examples/terraform-starter/saml/auth_pref.tf @@ -0,0 +1,16 @@ +resource "teleport_auth_preference" "main" { + version = "v2" + metadata = { + description = "Require authentication via the ${var.saml_connector_name} connector" + } + + spec = { + connector_name = teleport_saml_connector.main.metadata.name + type = "saml" + allow_local_auth = true + second_factor = "webauthn" + webauthn = { + rp_id = var.teleport_domain + } + } +} diff --git a/examples/terraform-starter/saml/inputs.tf b/examples/terraform-starter/saml/inputs.tf new file mode 100644 index 0000000000000..168f0f3f19a4c --- /dev/null +++ b/examples/terraform-starter/saml/inputs.tf @@ -0,0 +1,28 @@ +variable "teleport_domain" { + type = string + description = "Domain name of your Teleport cluster (to configure WebAuthn)" +} + +variable "saml_connector_name" { + type = string + description = "Name for the SAML authentication connector created by this module" +} + +variable "saml_attributes_to_roles" { + type = list(object({ + name = string + roles = list(string) + value = string + })) + description = "Mappings of SAML attributes to lists of Teleport role names" +} + +variable "saml_acs" { + type = string + description = "URL (scheme, domain, port, and path) for the SAML assertion consumer service" +} + +variable "saml_entity_descriptor" { + type = string + description = "SAML entity descriptor" +} diff --git a/examples/terraform-starter/saml/required_providers.tf b/examples/terraform-starter/saml/required_providers.tf new file mode 100644 index 0000000000000..b99ddb043a488 --- /dev/null +++ b/examples/terraform-starter/saml/required_providers.tf @@ -0,0 +1,7 @@ +terraform { + required_providers { + teleport = { + source = "terraform.releases.teleport.dev/gravitational/teleport" + } + } +} diff --git a/examples/terraform-starter/saml/saml.tf b/examples/terraform-starter/saml/saml.tf new file mode 100644 index 0000000000000..ea1ea5e4e1d96 --- /dev/null +++ b/examples/terraform-starter/saml/saml.tf @@ -0,0 +1,13 @@ +resource "teleport_saml_connector" "main" { + version = "v2" + metadata = { + name = var.saml_connector_name + } + + spec = { + attributes_to_roles = var.saml_attributes_to_roles + + acs = var.saml_acs + entity_descriptor = var.saml_entity_descriptor + } +}