From 95031a4ff2f4907589e461a770d94170c707c092 Mon Sep 17 00:00:00 2001 From: Marcus Weiner Date: Sun, 30 Jun 2024 21:43:39 +0200 Subject: [PATCH] Provide scaffold command (#29) This implements the CLI command `tf-preview-gh scaffold` It will create these files: - `backend.tf` or edit any file that has a `terraform { backend { ... } }` block in it - `.github/workflows/tf-run.yaml` - workflow for running plans on PRs and applies on main - `.github/workflows/tf-preview.yaml` - workflow for running speculative plans from local machines --- .dockerignore | 1 + README.md | 17 + cmd/tf-preview-gh/main.go | 297 +----------------- go.mod | 10 +- go.sum | 8 + pkg/fs/fs.go | 68 ++++ pkg/git/git.go | 43 +++ pkg/scaffold/files/tf-preview.yaml | 30 ++ pkg/scaffold/files/tf-run.yaml | 183 +++++++++++ pkg/scaffold/githubactions.go | 91 ++++++ pkg/scaffold/scaffold.go | 39 +++ pkg/scaffold/scaffold_test.go | 117 +++++++ .../update/.github/workflows/tf-run.yaml | 184 +++++++++++ pkg/scaffold/testdata/update/backend.tf | 19 ++ .../expected/.github/workflows/tf-run.yaml | 187 +++++++++++ .../testdata/update/expected/backend.tf | 18 ++ pkg/scaffold/tf.go | 129 ++++++++ pkg/scaffold/utils.go | 43 +++ pkg/speculative/logs.go | 72 +++++ pkg/speculative/run.go | 156 +++++++++ pkg/speculative/serve.go | 72 +++++ pkg/tfcontext/parse.go | 77 ++--- pkg/tfcontext/parse_test.go | 4 +- 23 files changed, 1538 insertions(+), 327 deletions(-) create mode 100644 pkg/fs/fs.go create mode 100644 pkg/git/git.go create mode 100644 pkg/scaffold/files/tf-preview.yaml create mode 100644 pkg/scaffold/files/tf-run.yaml create mode 100644 pkg/scaffold/githubactions.go create mode 100644 pkg/scaffold/scaffold.go create mode 100644 pkg/scaffold/scaffold_test.go create mode 100644 pkg/scaffold/testdata/update/.github/workflows/tf-run.yaml create mode 100644 pkg/scaffold/testdata/update/backend.tf create mode 100644 pkg/scaffold/testdata/update/expected/.github/workflows/tf-run.yaml create mode 100644 pkg/scaffold/testdata/update/expected/backend.tf create mode 100644 pkg/scaffold/tf.go create mode 100644 pkg/scaffold/utils.go create mode 100644 pkg/speculative/logs.go create mode 100644 pkg/speculative/run.go create mode 100644 pkg/speculative/serve.go diff --git a/.dockerignore b/.dockerignore index 8df5515..bbeccae 100644 --- a/.dockerignore +++ b/.dockerignore @@ -4,3 +4,4 @@ docs/**/* docker-compose.yml LICENSE README.md +testdata/ diff --git a/README.md b/README.md index eff0672..fb3d524 100644 --- a/README.md +++ b/README.md @@ -108,6 +108,23 @@ sudo mv tf-preview-gh /usr/local/bin/tf-preview-gh sudo chmod +x /usr/local/bin/tf-preview-gh ``` +### Configure + +In order to use it with your respository, you need to have some workflows in place. + +The `tf-preview-gh scaffold` command sets up everything that's necessary. This includes the workflows to run plans for pull requests and applies for merges to main. + +It should look like this: + +``` +% tf-preview-gh scaffold +Wrote backend config to: backend.tf +Wrote workflow to: .github/workflows/tf-preview.yaml +Wrote workflow to: .github/workflows/tf-run.yaml +``` + +Next, commit the new files and get them on main before continuing. + ### Usage Run the CLI in the directory for which you want to run a remote plan. diff --git a/cmd/tf-preview-gh/main.go b/cmd/tf-preview-gh/main.go index 3cfb7be..c5f40c7 100644 --- a/cmd/tf-preview-gh/main.go +++ b/cmd/tf-preview-gh/main.go @@ -1,309 +1,30 @@ package main import ( - "bufio" "context" - "errors" - "flag" "fmt" - "io" - "net/http" - "net/url" "os" - "os/exec" "os/signal" - "path/filepath" - "slices" - "strings" "syscall" - "time" - "github.com/cenkalti/backoff" - "github.com/go-git/go-git/v5" - "github.com/google/go-github/v62/github" - "github.com/google/uuid" - "github.com/hashicorp/go-slug" - "github.com/nimbolus/terraform-backend/pkg/tfcontext" - giturls "github.com/whilp/git-urls" + "github.com/nimbolus/terraform-backend/pkg/fs" + "github.com/nimbolus/terraform-backend/pkg/scaffold" + "github.com/nimbolus/terraform-backend/pkg/speculative" ) -func serveWorkspace(ctx context.Context) (string, error) { +func main() { cwd, err := os.Getwd() if err != nil { - return "", err - } - - backend, err := tfcontext.FindBackend(cwd) - if err != nil { - return "", err - } - backendURL, err := url.Parse(backend.Address) - if err != nil { - return "", fmt.Errorf("failed to parse backend url: %s, %w", backend.Address, err) - } - if backend.Password == "" { - backendPassword, ok := os.LookupEnv("TF_HTTP_PASSWORD") - if !ok || backendPassword == "" { - return "", errors.New("missing backend password") - } - backend.Password = backendPassword - } - - id := uuid.New() - backendURL.Path = filepath.Join(backendURL.Path, "/share/", id.String()) - - pr, pw := io.Pipe() - req, err := http.NewRequestWithContext(ctx, http.MethodPost, backendURL.String(), pr) - if err != nil { - return "", err + panic(fmt.Errorf("failed to get working directory: %w", err)) } - req.Header.Set("Content-Type", "application/octet-stream") - req.SetBasicAuth(backend.Username, backend.Password) - - go func() { - _, err := slug.Pack(cwd, pw, true) - if err != nil { - fmt.Printf("failed to pack workspace: %v\n", err) - pw.CloseWithError(err) - } else { - pw.Close() - } - }() - - go func() { - resp, err := http.DefaultClient.Do(req) - if err != nil { - fmt.Printf("failed to stream workspace: %v\n", err) - } else if resp.StatusCode/100 != 2 { - fmt.Printf("invalid status code after streaming workspace: %d\n", resp.StatusCode) - } - fmt.Println("done streaming workspace") - }() - - return backendURL.String(), nil -} -type countingReader struct { - io.Reader - readBytes int -} - -func (c *countingReader) Read(dst []byte) (int, error) { - n, err := c.Reader.Read(dst) - c.readBytes += n - return n, err -} - -var ignoredGroupNames = []string{ - "Operating System", - "Runner Image", - "Runner Image Provisioner", - "GITHUB_TOKEN Permissions", -} + rootCmd := speculative.NewCommand() + rootCmd.AddCommand(scaffold.NewCommand(fs.ForOS(cwd), os.Stdin)) -func streamLogs(logsURL *url.URL, skip int64) (int64, error) { - logs, err := http.Get(logsURL.String()) - if err != nil { - return 0, err - } - if logs.StatusCode != http.StatusOK { - return 0, fmt.Errorf("invalid status for logs: %d", logs.StatusCode) - } - defer logs.Body.Close() - - if _, err := io.Copy(io.Discard, io.LimitReader(logs.Body, skip)); err != nil { - return 0, err - } - - r := &countingReader{Reader: logs.Body} - scanner := bufio.NewScanner(r) - groupDepth := 0 - for scanner.Scan() { - line := scanner.Text() - ts, rest, ok := strings.Cut(line, " ") - if !ok { - rest = ts - } - if groupName, ok := strings.CutPrefix(rest, "##[group]"); ok { - groupDepth++ - if !slices.Contains(ignoredGroupNames, groupName) { - fmt.Printf("\n# %s\n", groupName) - } - } - if groupDepth == 0 { - fmt.Println(rest) - } - if strings.HasPrefix(rest, "##[endgroup]") { - groupDepth-- - } - } - if err := scanner.Err(); err != nil { - return int64(r.readBytes), err - } - - return int64(r.readBytes), err -} - -var ( - owner string - repo string - workflowFilename string -) - -func gitRepoOrigin() (*url.URL, error) { - cwd, err := os.Getwd() - if err != nil { - return nil, err - } - - repo, err := git.PlainOpen(cwd) - if err != nil { - return nil, err - } - - orig, err := repo.Remote("origin") - if err != nil { - return nil, err - } - if orig == nil { - return nil, errors.New("origin remote not present") - } - - for _, u := range orig.Config().URLs { - remoteURL, err := giturls.Parse(u) - if err != nil { - continue - } - if remoteURL.Hostname() == "github.com" { - return remoteURL, nil - } - } - return nil, errors.New("no suitable url found") -} - -func main() { ctx, cancel := signal.NotifyContext(context.Background(), os.Interrupt, syscall.SIGTERM) defer cancel() - flag.StringVar(&owner, "github-owner", "", "Repository owner") - flag.StringVar(&repo, "github-repo", "", "Repository name") - flag.StringVar(&workflowFilename, "workflow-file", "preview.yaml", "Name of the workflow file to run for previews") - flag.Parse() - - if owner == "" || repo == "" { - if ghURL, err := gitRepoOrigin(); err == nil { - parts := strings.Split(ghURL.Path, "/") - if len(parts) >= 2 { - owner = parts[0] - repo = strings.TrimSuffix(parts[1], ".git") - fmt.Printf("Using local repo info: %s/%s\n", owner, repo) - } - } - } - if owner == "" { - panic("Missing flag: -github-owner") - } - if repo == "" { - panic("Missing flag: -github-repo") - } - - serverURL, err := serveWorkspace(ctx) - if err != nil { - panic(err) - } - - // steal token from GH CLI - cmd := exec.CommandContext(ctx, "gh", "auth", "token") - out, err := cmd.Output() - if err != nil { - panic(err) - } - - token := strings.TrimSpace(string(out)) - gh := github.NewClient(nil).WithAuthToken(token) - - startedAt := time.Now().UTC() - - // start workflow - _, err = gh.Actions.CreateWorkflowDispatchEventByFileName(ctx, - owner, repo, workflowFilename, - github.CreateWorkflowDispatchEventRequest{ - Ref: "main", - Inputs: map[string]interface{}{ - "workspace_transfer_url": serverURL, - }, - }, - ) - if err != nil { - panic(err) - } - - fmt.Println("Waiting for run to start...") - - // find workflow run - var run *github.WorkflowRun - err = backoff.Retry(func() error { - workflows, _, err := gh.Actions.ListWorkflowRunsByFileName( - ctx, owner, repo, workflowFilename, - &github.ListWorkflowRunsOptions{ - Created: fmt.Sprintf(">=%s", startedAt.Format("2006-01-02T15:04")), - }, - ) - if err != nil { - return backoff.Permanent(err) - } - if len(workflows.WorkflowRuns) == 0 { - return fmt.Errorf("no workflow runs found") - } - - run = workflows.WorkflowRuns[0] - return nil - }, backoff.NewExponentialBackOff()) - if err != nil { - panic(err) - } - - var jobID int64 - err = backoff.Retry(func() error { - jobs, _, err := gh.Actions.ListWorkflowJobs(ctx, - owner, repo, *run.ID, - &github.ListWorkflowJobsOptions{}, - ) - if err != nil { - return backoff.Permanent(err) - } - if len(jobs.Jobs) == 0 { - return fmt.Errorf("no jobs found") - } - - jobID = *jobs.Jobs[0].ID - return nil - }, backoff.NewExponentialBackOff()) - if err != nil { - panic(err) - } - - logsURL, _, err := gh.Actions.GetWorkflowJobLogs(ctx, owner, repo, jobID, 2) - if err != nil { - panic(err) - } - - var readBytes int64 - for { - n, err := streamLogs(logsURL, readBytes) - if err != nil { - panic(err) - } - readBytes += n - - // check if job is done - job, _, err := gh.Actions.GetWorkflowJobByID(ctx, owner, repo, jobID) - if err != nil { - panic(err) - } - if job.CompletedAt != nil { - fmt.Println("Job complete.") - break - } + if err := rootCmd.ExecuteContext(ctx); err != nil { + os.Exit(1) } } diff --git a/go.mod b/go.mod index b332dd7..e69d31b 100644 --- a/go.mod +++ b/go.mod @@ -6,6 +6,7 @@ go 1.22 toolchain go1.22.4 require ( + dario.cat/mergo v1.0.0 github.com/cenkalti/backoff v2.2.1+incompatible github.com/coreos/go-oidc/v3 v3.10.0 github.com/go-git/go-git/v5 v5.12.0 @@ -18,15 +19,19 @@ require ( github.com/hashicorp/go-slug v0.15.2 github.com/hashicorp/hcl/v2 v2.21.0 github.com/hashicorp/vault/api v1.14.0 + github.com/liamg/memoryfs v1.6.0 github.com/lib/pq v1.10.9 github.com/mattn/go-sqlite3 v1.14.22 github.com/minio/minio-go/v7 v7.0.72 github.com/prometheus/client_golang v1.19.1 github.com/sirupsen/logrus v1.9.3 + github.com/spf13/cobra v1.8.0 github.com/spf13/viper v1.19.0 github.com/stretchr/testify v1.9.0 github.com/whilp/git-urls v1.0.0 + github.com/zclconf/go-cty v1.13.1 go.uber.org/multierr v1.11.0 + gopkg.in/yaml.v3 v3.0.1 ) require ( @@ -35,7 +40,6 @@ require ( cloud.google.com/go/compute/metadata v0.2.3 // indirect cloud.google.com/go/iam v1.1.6 // indirect cloud.google.com/go/storage v1.38.0 // indirect - dario.cat/mergo v1.0.0 // indirect github.com/Microsoft/go-winio v0.6.1 // indirect github.com/ProtonMail/go-crypto v1.0.0 // indirect github.com/agext/levenshtein v1.2.3 // indirect @@ -62,6 +66,7 @@ require ( github.com/goccy/go-json v0.10.2 // indirect github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da // indirect github.com/golang/protobuf v1.5.4 // indirect + github.com/google/go-cmp v0.6.0 // indirect github.com/google/go-querystring v1.1.0 // indirect github.com/google/s2a-go v0.1.7 // indirect github.com/googleapis/enterprise-certificate-proxy v0.3.2 // indirect @@ -79,6 +84,7 @@ require ( github.com/hashicorp/go-version v1.6.0 // indirect github.com/hashicorp/hcl v1.0.1-vault-5 // indirect github.com/hashicorp/terraform-json v0.16.0 // indirect + github.com/inconshreveable/mousetrap v1.1.0 // indirect github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99 // indirect github.com/jinzhu/copier v0.3.5 // indirect github.com/jmespath/go-jmespath v0.4.0 // indirect @@ -112,7 +118,6 @@ require ( github.com/tmccombs/hcl2json v0.5.0 // indirect github.com/ulikunitz/xz v0.5.11 // indirect github.com/xanzy/ssh-agent v0.3.3 // indirect - github.com/zclconf/go-cty v1.13.1 // indirect go.opencensus.io v0.24.0 // indirect go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.49.0 // indirect go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.49.0 // indirect @@ -137,5 +142,4 @@ require ( google.golang.org/protobuf v1.34.0 // indirect gopkg.in/ini.v1 v1.67.0 // indirect gopkg.in/warnings.v0 v0.1.2 // indirect - gopkg.in/yaml.v3 v3.0.1 // indirect ) diff --git a/go.sum b/go.sum index bd979d6..5279fa2 100644 --- a/go.sum +++ b/go.sum @@ -242,6 +242,7 @@ github.com/cncf/xds/go v0.0.0-20211001041855-01bcc9b48dfe/go.mod h1:eXthEFrGJvWH github.com/cncf/xds/go v0.0.0-20211011173535-cb28da3451f1/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= github.com/coreos/go-oidc/v3 v3.10.0 h1:tDnXHnLyiTVyT/2zLDGj09pFPkhND8Gl8lnTRhoEaJU= github.com/coreos/go-oidc/v3 v3.10.0/go.mod h1:5j11xcw0D3+SGxn6Z/WFADsgcWVMyNAlSQupk0KK3ac= +github.com/cpuguy83/go-md2man/v2 v2.0.3/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o= github.com/cyphar/filepath-securejoin v0.2.4 h1:Ugdm7cg7i6ZK6x3xDF1oEu1nfkyfH53EtKeQYTC3kyg= github.com/cyphar/filepath-securejoin v0.2.4/go.mod h1:aPGpWjXOXUn2NCNjFvBE6aRxGGx79pTxQpKOJNYHHl4= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= @@ -457,6 +458,8 @@ github.com/hashicorp/vault/api v1.14.0 h1:Ah3CFLixD5jmjusOgm8grfN9M0d+Y8fVR2SW0K github.com/hashicorp/vault/api v1.14.0/go.mod h1:pV9YLxBGSz+cItFDd8Ii4G17waWOQ32zVjMWHe/cOqk= github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= github.com/ianlancetaylor/demangle v0.0.0-20200824232613-28f6c0f3b639/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= +github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8= +github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw= github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99 h1:BQSFePA1RWJOlocH6Fxy8MmwDt+yVQYULKfN0RoTN8A= github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99/go.mod h1:1lJo3i6rXxKeerYnT8Nvf0QmHCRC1n8sfWVwXF2Frvo= github.com/jinzhu/copier v0.3.5 h1:GlvfUwHk62RokgqVNvYsku0TATCF7bAHVwEXoBh3iJg= @@ -483,6 +486,8 @@ github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= +github.com/liamg/memoryfs v1.6.0 h1:jAFec2HI1PgMTem5gR7UT8zi9u4BfG5jorCRlLH06W8= +github.com/liamg/memoryfs v1.6.0/go.mod h1:z7mfqXFQS8eSeBBsFjYLlxYRMRyiPktytvYCYTb3BSk= github.com/lib/pq v1.10.9 h1:YXG7RB+JIjhP29X+OtkiDnYaXQwpS4JEWq7dtCCRUEw= github.com/lib/pq v1.10.9/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o= github.com/magiconair/properties v1.8.7 h1:IeQXZAiQcpL9mgcAe1Nu6cX9LLw6ExEHKjN0VQdvPDY= @@ -540,6 +545,7 @@ github.com/rogpeppe/go-internal v1.11.0 h1:cWPaGQEPrBb5/AsnsZesgZZ9yb1OQ+GOISoDN github.com/rogpeppe/go-internal v1.11.0/go.mod h1:ddIwULY96R17DhadqLgMfk9H9tvdUzkipdSkR5nkCZA= github.com/rs/xid v1.5.0 h1:mKX4bl4iPYJtEIxp6CYiUuLQ/8DYMoz0PUdtGgMFRVc= github.com/rs/xid v1.5.0/go.mod h1:trrq9SKmegXys3aeAKXMUTdJsYXVwGY3RLcfgqegfbg= +github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= github.com/ryanuber/go-glob v1.0.0 h1:iQh3xXAumdQ+4Ufa5b25cRpC5TYKlno6hsv6Cb3pkBk= github.com/ryanuber/go-glob v1.0.0/go.mod h1:807d1WSdnB0XRJzKNil9Om6lcp/3a0v4qIHxIXzX/Yc= github.com/sagikazarmark/locafero v0.4.0 h1:HApY1R9zGo4DBgr7dqsTH/JJxLTTsOt7u6keLGt6kNQ= @@ -560,6 +566,8 @@ github.com/spf13/afero v1.11.0 h1:WJQKhtpdm3v2IzqG8VMqrr6Rf3UYpEF239Jy9wNepM8= github.com/spf13/afero v1.11.0/go.mod h1:GH9Y3pIexgf1MTIWtNGyogA5MwRIDXGUr+hbWNoBjkY= github.com/spf13/cast v1.6.0 h1:GEiTHELF+vaR5dhz3VqZfFSzZjYbgeKDpBxQVS4GYJ0= github.com/spf13/cast v1.6.0/go.mod h1:ancEpBxwJDODSW/UG4rDrAqiKolqNNh2DX3mk86cAdo= +github.com/spf13/cobra v1.8.0 h1:7aJaZx1B85qltLMc546zn58BxxfZdR/W22ej9CFoEf0= +github.com/spf13/cobra v1.8.0/go.mod h1:WXLWApfZ71AjXPya3WOlMsY9yMs7YeiHhFVlvLyhcho= github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA= github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= github.com/spf13/viper v1.19.0 h1:RWq5SEjt8o25SROyN3z2OrDB9l7RPd3lwTWU8EcEdcI= diff --git a/pkg/fs/fs.go b/pkg/fs/fs.go new file mode 100644 index 0000000..3923e2a --- /dev/null +++ b/pkg/fs/fs.go @@ -0,0 +1,68 @@ +package fs + +import ( + "bytes" + "io" + "io/fs" + iofs "io/fs" + "os" + "path/filepath" +) + +type FullStdFS interface { + iofs.ReadDirFS + iofs.StatFS + iofs.ReadFileFS +} + +type FS interface { + FullStdFS + + WriteFile(string, []byte, fs.FileMode) error + MkdirAll(string, fs.FileMode) error +} + +type advancedFS struct { + FullStdFS + + path string +} + +func ForOS(path string) FS { + osFS := os.DirFS(path) + return &advancedFS{ + FullStdFS: osFS.(FullStdFS), + path: path, + } +} + +func (afs *advancedFS) WriteFile(path string, data []byte, perm fs.FileMode) error { + fullpath := filepath.Join(afs.path, path) + return os.WriteFile(fullpath, data, perm) +} + +func (afs *advancedFS) MkdirAll(path string, perm fs.FileMode) error { + fullpath := filepath.Join(afs.path, path) + return os.MkdirAll(fullpath, perm) +} + +type writableFile struct { + fs FS + path string + buf bytes.Buffer +} + +func Create(dir FS, path string) (io.WriteCloser, error) { + return &writableFile{ + fs: dir, + path: path, + }, nil +} + +func (wf *writableFile) Write(data []byte) (int, error) { + return wf.buf.Write(data) +} + +func (wf *writableFile) Close() error { + return wf.fs.WriteFile(wf.path, wf.buf.Bytes(), 0644) +} diff --git a/pkg/git/git.go b/pkg/git/git.go new file mode 100644 index 0000000..d5165cc --- /dev/null +++ b/pkg/git/git.go @@ -0,0 +1,43 @@ +package git + +import ( + "errors" + "net/url" + "os" + + "github.com/go-git/go-git/v5" + giturls "github.com/whilp/git-urls" +) + +func RepoOrigin() (*url.URL, error) { + cwd, err := os.Getwd() + if err != nil { + return nil, err + } + + repo, err := git.PlainOpenWithOptions(cwd, &git.PlainOpenOptions{ + DetectDotGit: true, + }) + if err != nil { + return nil, err + } + + orig, err := repo.Remote("origin") + if err != nil { + return nil, err + } + if orig == nil { + return nil, errors.New("origin remote not present") + } + + for _, u := range orig.Config().URLs { + remoteURL, err := giturls.Parse(u) + if err != nil { + continue + } + if remoteURL.Hostname() == "github.com" { + return remoteURL, nil + } + } + return nil, errors.New("no suitable url found") +} diff --git a/pkg/scaffold/files/tf-preview.yaml b/pkg/scaffold/files/tf-preview.yaml new file mode 100644 index 0000000..7204c51 --- /dev/null +++ b/pkg/scaffold/files/tf-preview.yaml @@ -0,0 +1,30 @@ +name: Terraform +on: + workflow_dispatch: + inputs: + workspace_transfer_url: + description: "URL from which to download the workspace" + required: true + type: string +jobs: + plan: + name: Speculative Plan + runs-on: ubuntu-22.04 + env: + # renovate: datasource=github-releases depName=hashicorp/terraform + TERRAFORM_VERSION: "1.6.6" + TF_HTTP_PASSWORD: ${{ github.token }} + TF_IN_AUTOMATION: "true" + TF_CLI_ARGS: -input=false + # environment variables for providers + # example: + # NETBOX_API_TOKEN: ${{ secrets.NETBOX_API_TOKEN }} + steps: + - name: Download Workspace + run: | + curl ${{ inputs.workspace_transfer_url }} --user github_pat:${TF_HTTP_PASSWORD} --fail --silent | tar -xzf - + - uses: hashicorp/setup-terraform@v3 + with: + terraform_version: ${TERRAFORM_VERSION} + - run: terraform init + - run: terraform plan diff --git a/pkg/scaffold/files/tf-run.yaml b/pkg/scaffold/files/tf-run.yaml new file mode 100644 index 0000000..f8d19e9 --- /dev/null +++ b/pkg/scaffold/files/tf-run.yaml @@ -0,0 +1,183 @@ +name: Terraform +on: + push: + branches: [main] + pull_request: +jobs: + run: + name: Run + runs-on: ubuntu-22.04 + concurrency: + group: terraform + cancel-in-progress: false + permissions: + contents: read + pull-requests: write + checks: write + env: + # renovate: datasource=github-releases depName=hashicorp/terraform + TERRAFORM_VERSION: "1.6.6" + TF_HTTP_PASSWORD: ${{ github.token }} + TF_IN_AUTOMATION: "true" + TF_CLI_ARGS: "-input=false" + # environment variables for providers + # example: + # NETBOX_API_TOKEN: ${{ secrets.NETBOX_API_TOKEN }} + steps: + - name: Checkout + uses: actions/checkout@v4 + - uses: hashicorp/setup-terraform@v3 + with: + terraform_version: ${TERRAFORM_VERSION} + - run: terraform init + - run: terraform plan -out=tfplan + - name: terraform apply + if: ${{ github.event_name == 'push' && github.ref_name == 'main' }} + run: | + set -o pipefail + terraform apply tfplan | tee apply.log + - name: Backup state + if: ${{ github.event_name == 'push' && github.ref_name == 'main' }} + env: + AWS_ACCESS_KEY_ID: ${{ secrets.B2_TFBACKUP_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.B2_TFBACKUP_SECRET_KEY }} + AWS_DEFAULT_REGION: us-east-1 + AWS_ENDPOINT_URL: https://s3.us-east-005.backblazeb2.com + S3_BUCKET: terraform-state-backup + TF_CLI_ARGS: "" + run: | + set -e -o pipefail + + terraform show -json > state.json + aws s3 cp state.json s3://${S3_BUCKET}/${{ github.repository }}/$(date +%s).json + + DELETE_FILES=$(\ + aws s3api list-objects-v2 --bucket "${S3_BUCKET}" --prefix "${{ github.repository }}" | \ + jq -r '.Contents | map(.Key) | sort | reverse | .[5:] | .[]' \ + ) + for file in ${DELETE_FILES}; do aws s3 rm s3://${S3_BUCKET}/$file; done + - run: terraform show -json tfplan > tfplan.json + env: + TF_CLI_ARGS: "" + - run: terraform show -no-color tfplan > summary.txt + env: + TF_CLI_ARGS: "" + - name: Create status check with details + uses: actions/github-script@v7 + with: + github-token: ${{ github.token }} + script: | + const fs = require('fs').promises + const plan = JSON.parse(await fs.readFile('tfplan.json', 'utf-8')) + const humanSummary = await fs.readFile('summary.txt', 'utf-8') + let applyLog; + try { + applyLog = await fs.readFile('apply.log', 'utf-8') + } catch {} + + function countActions(plan, type) { + return plan.resource_changes.filter(ch => ch.change.actions.includes(type)).length + } + const createCount = countActions(plan, 'create') + const updateCount = countActions(plan, 'update') + const deleteCount = countActions(plan, 'delete') + + const noChanges = createCount == 0 && updateCount == 0 && deleteCount == 0 + const title = noChanges + ? "No changes" + : (context.eventName === 'push' + ? `${createCount} added, ${updateCount} changed, ${deleteCount} destroyed` + : `${createCount} to add, ${updateCount} to change, ${deleteCount} to destroy` + ) + + const codefence = "```" + const summary = ` + # Terraform Plan + ${codefence} + ${humanSummary.trim("\n")} + ${codefence} + ${!!applyLog ? ` + # Terraform Apply + ${codefence} + ${applyLog.replace(/\u001b\[[^m]+m/g, '').trim()} + ${codefence} + ` : ""} + ` + + const sha = context.eventName === 'pull_request' + ? context.payload.pull_request.head.sha + : context.sha + await github.rest.checks.create({ + owner: context.repo.owner, + repo: context.repo.repo, + head_sha: sha, + status: 'completed', + conclusion: noChanges ? 'neutral' : 'success', + name: context.eventName === 'push' ? "Apply" : "Plan", + output: { + title, + summary, + }, + }); + - name: Show plan on PR + uses: actions/github-script@v7 + if: ${{ github.event_name == 'pull_request' }} + with: + github-token: ${{ github.token }} + script: | + const { repository: { pullRequest: { comments } } } = await github.graphql(` + query($owner:String!, $name:String!, $pr:Int!) { + repository(owner:$owner, name:$name) { + pullRequest(number:$pr) { + comments(last: 10) { + nodes { + id, + minimizedReason + author { + ...on Bot { + login + } + } + } + } + } + } + } + `, { + owner: context.repo.owner, + name: context.repo.repo, + pr: context.issue.number, + }) + + const commentsToHide = comments.nodes.filter((comment) => { + return !comment.minimizedReason && comment.author.login == "github-actions" + }) + if (commentsToHide.length > 0) { + await github.graphql(` + mutation { + ${commentsToHide.map((c,i) => + `c${i}: minimizeComment(input: { subjectId: "${c.id}", classifier: OUTDATED }) { + clientMutationId + } + ` + ).join("")} + } + `) + } + + const fs = require('fs').promises + const plan = await fs.readFile('summary.txt', 'utf-8') + + const codefence = "```" + const body = ` + #### :building_construction: Terraform Plan + ${codefence} + ${plan.trim("\n")} + ${codefence}` + + github.rest.issues.createComment({ + issue_number: context.issue.number, + owner: context.repo.owner, + repo: context.repo.repo, + body, + }) diff --git a/pkg/scaffold/githubactions.go b/pkg/scaffold/githubactions.go new file mode 100644 index 0000000..80a7a35 --- /dev/null +++ b/pkg/scaffold/githubactions.go @@ -0,0 +1,91 @@ +package scaffold + +import ( + "context" + "embed" + "fmt" + "io" + "path/filepath" + + "dario.cat/mergo" + "github.com/nimbolus/terraform-backend/pkg/fs" + "gopkg.in/yaml.v3" +) + +var ( + //go:embed files + assets embed.FS + filesToWrite = []string{ + "tf-preview.yaml", + "tf-run.yaml", + } +) + +func writeGithubActionsWorkflows(ctx context.Context, dir fs.FS, stdin io.Reader) error { + if err := dir.MkdirAll(".github/workflows", 0755); err != nil { + return err + } + + for _, filename := range filesToWrite { + outFilename := filepath.Join(".github", "workflows", filename) + + _, err := dir.Stat(outFilename) + fileExists := err == nil + if fileExists { + ok, err := promptYesNo(ctx, stdin, fmt.Sprintf("Workflow at %s already exist. Do you want to replace it? (This is experimental and might not deal well with your edits.)", outFilename)) + if err != nil { + return err + } + if !ok { + fmt.Printf("Skipping update of %s\n", outFilename) + continue + } + } + + srcFile, err := assets.Open(filepath.Join("files", filename)) + if err != nil { + return err + } + defer srcFile.Close() + + var config yaml.Node + if err := yaml.NewDecoder(srcFile).Decode(&config); err != nil { + return err + } + + if fileExists { + oldFile, err := dir.Open(outFilename) + if err != nil { + return err + } + defer oldFile.Close() + + var oldConfig yaml.Node + if err := yaml.NewDecoder(oldFile).Decode(&oldConfig); err != nil { + return err + } + + if err := mergo.Merge(&oldConfig, &config, mergo.WithSliceDeepCopy); err != nil { + return err + } + config = oldConfig + } + + f, err := fs.Create(dir, outFilename) + if err != nil { + return err + } + defer f.Close() + + enc := yaml.NewEncoder(f) + enc.SetIndent(2) + if err := enc.Encode(&config); err != nil { + return err + } + if err := enc.Close(); err != nil { + return err + } + fmt.Printf("Wrote workflow to: %s\n", outFilename) + } + return nil +} diff --git a/pkg/scaffold/scaffold.go b/pkg/scaffold/scaffold.go new file mode 100644 index 0000000..717600c --- /dev/null +++ b/pkg/scaffold/scaffold.go @@ -0,0 +1,39 @@ +package scaffold + +import ( + "context" + "io" + + "github.com/nimbolus/terraform-backend/pkg/fs" + "github.com/spf13/cobra" +) + +var ( + backendAddress string +) + +func NewCommand(dir fs.FS, stdin io.Reader) *cobra.Command { + cmd := &cobra.Command{ + Use: "scaffold", + Short: "scaffold the necessary config to use the GitHub Actions Terraform workflow", + RunE: func(cmd *cobra.Command, args []string) error { + return run(cmd.Context(), dir, stdin) + }, + } + + cmd.Flags().StringVar(&backendAddress, "backend-url", "https://ffddorf-terraform-backend.fly.dev/", "URL to use as the backend address") + + return cmd +} + +func run(ctx context.Context, dir fs.FS, stdin io.Reader) error { + if err := writeBackendConfig(ctx, dir, stdin); err != nil { + return err + } + + if err := writeGithubActionsWorkflows(ctx, dir, stdin); err != nil { + return err + } + + return nil +} diff --git a/pkg/scaffold/scaffold_test.go b/pkg/scaffold/scaffold_test.go new file mode 100644 index 0000000..30e9126 --- /dev/null +++ b/pkg/scaffold/scaffold_test.go @@ -0,0 +1,117 @@ +package scaffold_test + +import ( + "io" + "io/fs" + "os" + "path/filepath" + "testing" + + "github.com/liamg/memoryfs" + "github.com/nimbolus/terraform-backend/pkg/scaffold" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +const expectedBackendConfig = `terraform { + backend "http" { + address = "https://ffddorf-terraform-backend.fly.dev/state/terraform-backend/default" + lock_address = "https://ffddorf-terraform-backend.fly.dev/state/terraform-backend/default" + unlock_address = "https://ffddorf-terraform-backend.fly.dev/state/terraform-backend/default" + username = "github_pat" + } +} +` + +func compareFiles(t *testing.T, fs1, fs2 fs.FS, name string) { + file1, err := fs.ReadFile(fs1, name) + require.NoError(t, err) + + file2, err := fs.ReadFile(fs2, name) + require.NoError(t, err) + + assert.Equal(t, string(file1), string(file2), "in file: %s", name) +} + +type confirmer struct{} + +var confirmation = []byte{'y', '\n'} + +func (c *confirmer) Read(dst []byte) (int, error) { + return copy(dst, confirmation), nil +} + +func TestScaffolding(t *testing.T) { + nativeFS := os.DirFS("files") + + tests := map[string]struct { + stdin io.Reader + assert func(*testing.T, *memoryfs.FS) + }{ + "empty": { + assert: func(t *testing.T, memfs *memoryfs.FS) { + backendOut, err := memfs.ReadFile("backend.tf") + require.NoError(t, err) + assert.Equal(t, expectedBackendConfig, string(backendOut)) + + subFS, err := memfs.Sub(".github/workflows") + require.NoError(t, err) + compareFiles(t, nativeFS, subFS, "tf-preview.yaml") + compareFiles(t, nativeFS, subFS, "tf-run.yaml") + }, + }, + "update": { + stdin: &confirmer{}, + assert: func(t *testing.T, memfs *memoryfs.FS) { + expectedFS, err := memfs.Sub("expected") + require.NoError(t, err) + compareFiles(t, expectedFS, memfs, "backend.tf") + compareFiles(t, expectedFS, memfs, ".github/workflows/tf-run.yaml") + + subFS, err := memfs.Sub(".github/workflows") + require.NoError(t, err) + compareFiles(t, nativeFS, subFS, "tf-preview.yaml") + }, + }, + } + + for name, params := range tests { + t.Run(name, func(t *testing.T) { + memfs := memoryfs.New() + + s, err := os.Stat("testdata/" + name) + if err == nil { + require.True(t, s.IsDir(), "testdata for test name needs to be a directory") + testFS := os.DirFS("testdata/" + name) + err := fs.WalkDir(testFS, ".", func(path string, d fs.DirEntry, err error) error { + if err != nil { + return err + } + if !d.Type().IsRegular() { + return nil + } + + contents, err := fs.ReadFile(testFS, path) + if err != nil { + return err + } + if err := memfs.MkdirAll(filepath.Dir(path), 0755); err != nil { + return err + } + return memfs.WriteFile(path, contents, d.Type().Perm()) + }) + require.NoError(t, err) + } + + var stdin io.Reader = os.Stdin + if params.stdin != nil { + stdin = params.stdin + } + + cmd := scaffold.NewCommand(memfs, stdin) + require.NoError(t, cmd.Execute()) + + params.assert(t, memfs) + }) + } +} diff --git a/pkg/scaffold/testdata/update/.github/workflows/tf-run.yaml b/pkg/scaffold/testdata/update/.github/workflows/tf-run.yaml new file mode 100644 index 0000000..774baf7 --- /dev/null +++ b/pkg/scaffold/testdata/update/.github/workflows/tf-run.yaml @@ -0,0 +1,184 @@ +name: Terraform +on: + push: + branches: [main] + pull_request: +jobs: + run: + name: Run + runs-on: ubuntu-22.04 + concurrency: + group: terraform + cancel-in-progress: false + permissions: + contents: read + pull-requests: write + checks: write + env: + # renovate: datasource=github-releases depName=hashicorp/terraform + TERRAFORM_VERSION: "1.6.0" + TF_HTTP_PASSWORD: ${{ github.token }} + TF_IN_AUTOMATION: "true" + TF_CLI_ARGS: "-input=false" + + # Secrets for providers + NETBOX_API_TOKEN: ${{ secrets.NETBOX_API_TOKEN }} + PROXMOX_TOKEN: ${{ secrets.PROXMOX_TOKEN }} + steps: + - name: Checkout + uses: actions/checkout@v4 + - uses: hashicorp/setup-terraform@v3 + with: + terraform_version: ${TERRAFORM_VERSION} + - run: terraform init + - run: terraform plan -out=tfplan + - name: terraform apply + if: ${{ github.event_name == 'push' && github.ref_name == 'main' }} + run: | + set -o pipefail + terraform apply tfplan | tee apply.log + - name: Backup state + if: ${{ github.event_name == 'push' && github.ref_name == 'main' }} + env: + AWS_ACCESS_KEY_ID: ${{ secrets.B2_TFBACKUP_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.B2_TFBACKUP_SECRET_KEY }} + AWS_DEFAULT_REGION: us-east-1 + AWS_ENDPOINT_URL: https://s3.us-east-005.backblazeb2.com + S3_BUCKET: terraform-state-backup + TF_CLI_ARGS: "" + run: | + set -e -o pipefail + + terraform show -json > state.json + aws s3 cp state.json s3://${S3_BUCKET}/${{ github.repository }}/$(date +%s).json + + DELETE_FILES=$(\ + aws s3api list-objects-v2 --bucket "${S3_BUCKET}" --prefix "${{ github.repository }}" | \ + jq -r '.Contents | map(.Key) | sort | reverse | .[5:] | .[]' \ + ) + for file in ${DELETE_FILES}; do aws s3 rm s3://${S3_BUCKET}/$file; done + - run: terraform show -json tfplan > tfplan.json + env: + TF_CLI_ARGS: "" + - run: terraform show -no-color tfplan > summary.txt + env: + TF_CLI_ARGS: "" + - name: Create status check with details + uses: actions/github-script@v7 + with: + github-token: ${{ github.token }} + script: | + const fs = require('fs').promises + const plan = JSON.parse(await fs.readFile('tfplan.json', 'utf-8')) + const humanSummary = await fs.readFile('summary.txt', 'utf-8') + let applyLog; + try { + applyLog = await fs.readFile('apply.log', 'utf-8') + } catch {} + + function countActions(plan, type) { + return plan.resource_changes.filter(ch => ch.change.actions.includes(type)).length + } + const createCount = countActions(plan, 'create') + const updateCount = countActions(plan, 'update') + const deleteCount = countActions(plan, 'delete') + + const noChanges = createCount == 0 && updateCount == 0 && deleteCount == 0 + const title = noChanges + ? "No changes" + : (context.eventName === 'push' + ? `${createCount} added, ${updateCount} changed, ${deleteCount} destroyed` + : `${createCount} to add, ${updateCount} to change, ${deleteCount} to destroy` + ) + + const codefence = "```" + const summary = ` + # Terraform Plan + ${codefence} + ${humanSummary.trim("\n")} + ${codefence} + ${!!applyLog ? ` + # Terraform Apply + ${codefence} + ${applyLog.replace(/\u001b\[[^m]+m/g, '').trim()} + ${codefence} + ` : ""} + ` + + const sha = context.eventName === 'pull_request' + ? context.payload.pull_request.head.sha + : context.sha + await github.rest.checks.create({ + owner: context.repo.owner, + repo: context.repo.repo, + head_sha: sha, + status: 'completed', + conclusion: noChanges ? 'neutral' : 'success', + name: context.eventName === 'push' ? "Apply" : "Plan", + output: { + title, + summary, + }, + }); + - name: Show plan on PR + uses: actions/github-script@v7 + if: ${{ github.event_name == 'pull_request' }} + with: + github-token: ${{ github.token }} + script: | + const { repository: { pullRequest: { comments } } } = await github.graphql(` + query($owner:String!, $name:String!, $pr:Int!) { + repository(owner:$owner, name:$name) { + pullRequest(number:$pr) { + comments(last: 10) { + nodes { + id, + minimizedReason + author { + ...on Bot { + login + } + } + } + } + } + } + } + `, { + owner: context.repo.owner, + name: context.repo.repo, + pr: context.issue.number, + }) + + const commentsToHide = comments.nodes.filter((comment) => { + return !comment.minimizedReason && comment.author.login == "github-actions" + }) + if (commentsToHide.length > 0) { + await github.graphql(` + mutation { + ${commentsToHide.map((c,i) => + `c${i}: minimizeComment(input: { subjectId: "${c.id}", classifier: OUTDATED }) { + clientMutationId + } + ` + ).join("")} + } + `) + } + + const fs = require('fs').promises + const plan = await fs.readFile('summary.txt', 'utf-8') + + const codefence = "```" + const body = ` + #### :building_construction: Terraform Plan + ${codefence} + ${plan.trim("\n")} + ${codefence}` + + github.rest.issues.createComment({ + issue_number: context.issue.number, + owner: context.repo.owner, + repo: context.repo.repo, + body, + }) diff --git a/pkg/scaffold/testdata/update/backend.tf b/pkg/scaffold/testdata/update/backend.tf new file mode 100644 index 0000000..e611a32 --- /dev/null +++ b/pkg/scaffold/testdata/update/backend.tf @@ -0,0 +1,19 @@ +terraform { + backend "remote" { + organization = "ffddorf" + + workspaces { + name = "terraform-backend" + } + } + required_providers { + cloudflare = { + source = "cloudflare/cloudflare" + version = "~>2.17" + } + tls = { + source = "hashicorp/tls" + version = "~>3.1" + } + } +} diff --git a/pkg/scaffold/testdata/update/expected/.github/workflows/tf-run.yaml b/pkg/scaffold/testdata/update/expected/.github/workflows/tf-run.yaml new file mode 100644 index 0000000..e450f38 --- /dev/null +++ b/pkg/scaffold/testdata/update/expected/.github/workflows/tf-run.yaml @@ -0,0 +1,187 @@ +name: Terraform +on: + push: + branches: [main] + pull_request: +jobs: + run: + name: Run + runs-on: ubuntu-22.04 + concurrency: + group: terraform + cancel-in-progress: false + permissions: + contents: read + pull-requests: write + checks: write + env: + # renovate: datasource=github-releases depName=hashicorp/terraform + TERRAFORM_VERSION: "1.6.6" + TF_HTTP_PASSWORD: ${{ github.token }} + TF_IN_AUTOMATION: "true" + TF_CLI_ARGS: "-input=false" + # environment variables for providers + # example: + # NETBOX_API_TOKEN: ${{ secrets.NETBOX_API_TOKEN }} + + # Secrets for providers + NETBOX_API_TOKEN: ${{ secrets.NETBOX_API_TOKEN }} + PROXMOX_TOKEN: ${{ secrets.PROXMOX_TOKEN }} + steps: + - name: Checkout + uses: actions/checkout@v4 + - uses: hashicorp/setup-terraform@v3 + with: + terraform_version: ${TERRAFORM_VERSION} + - run: terraform init + - run: terraform plan -out=tfplan + - name: terraform apply + if: ${{ github.event_name == 'push' && github.ref_name == 'main' }} + run: | + set -o pipefail + terraform apply tfplan | tee apply.log + - name: Backup state + if: ${{ github.event_name == 'push' && github.ref_name == 'main' }} + env: + AWS_ACCESS_KEY_ID: ${{ secrets.B2_TFBACKUP_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.B2_TFBACKUP_SECRET_KEY }} + AWS_DEFAULT_REGION: us-east-1 + AWS_ENDPOINT_URL: https://s3.us-east-005.backblazeb2.com + S3_BUCKET: terraform-state-backup + TF_CLI_ARGS: "" + run: | + set -e -o pipefail + + terraform show -json > state.json + aws s3 cp state.json s3://${S3_BUCKET}/${{ github.repository }}/$(date +%s).json + + DELETE_FILES=$(\ + aws s3api list-objects-v2 --bucket "${S3_BUCKET}" --prefix "${{ github.repository }}" | \ + jq -r '.Contents | map(.Key) | sort | reverse | .[5:] | .[]' \ + ) + for file in ${DELETE_FILES}; do aws s3 rm s3://${S3_BUCKET}/$file; done + - run: terraform show -json tfplan > tfplan.json + env: + TF_CLI_ARGS: "" + - run: terraform show -no-color tfplan > summary.txt + env: + TF_CLI_ARGS: "" + - name: Create status check with details + uses: actions/github-script@v7 + with: + github-token: ${{ github.token }} + script: | + const fs = require('fs').promises + const plan = JSON.parse(await fs.readFile('tfplan.json', 'utf-8')) + const humanSummary = await fs.readFile('summary.txt', 'utf-8') + let applyLog; + try { + applyLog = await fs.readFile('apply.log', 'utf-8') + } catch {} + + function countActions(plan, type) { + return plan.resource_changes.filter(ch => ch.change.actions.includes(type)).length + } + const createCount = countActions(plan, 'create') + const updateCount = countActions(plan, 'update') + const deleteCount = countActions(plan, 'delete') + + const noChanges = createCount == 0 && updateCount == 0 && deleteCount == 0 + const title = noChanges + ? "No changes" + : (context.eventName === 'push' + ? `${createCount} added, ${updateCount} changed, ${deleteCount} destroyed` + : `${createCount} to add, ${updateCount} to change, ${deleteCount} to destroy` + ) + + const codefence = "```" + const summary = ` + # Terraform Plan + ${codefence} + ${humanSummary.trim("\n")} + ${codefence} + ${!!applyLog ? ` + # Terraform Apply + ${codefence} + ${applyLog.replace(/\u001b\[[^m]+m/g, '').trim()} + ${codefence} + ` : ""} + ` + + const sha = context.eventName === 'pull_request' + ? context.payload.pull_request.head.sha + : context.sha + await github.rest.checks.create({ + owner: context.repo.owner, + repo: context.repo.repo, + head_sha: sha, + status: 'completed', + conclusion: noChanges ? 'neutral' : 'success', + name: context.eventName === 'push' ? "Apply" : "Plan", + output: { + title, + summary, + }, + }); + - name: Show plan on PR + uses: actions/github-script@v7 + if: ${{ github.event_name == 'pull_request' }} + with: + github-token: ${{ github.token }} + script: | + const { repository: { pullRequest: { comments } } } = await github.graphql(` + query($owner:String!, $name:String!, $pr:Int!) { + repository(owner:$owner, name:$name) { + pullRequest(number:$pr) { + comments(last: 10) { + nodes { + id, + minimizedReason + author { + ...on Bot { + login + } + } + } + } + } + } + } + `, { + owner: context.repo.owner, + name: context.repo.repo, + pr: context.issue.number, + }) + + const commentsToHide = comments.nodes.filter((comment) => { + return !comment.minimizedReason && comment.author.login == "github-actions" + }) + if (commentsToHide.length > 0) { + await github.graphql(` + mutation { + ${commentsToHide.map((c,i) => + `c${i}: minimizeComment(input: { subjectId: "${c.id}", classifier: OUTDATED }) { + clientMutationId + } + ` + ).join("")} + } + `) + } + + const fs = require('fs').promises + const plan = await fs.readFile('summary.txt', 'utf-8') + + const codefence = "```" + const body = ` + #### :building_construction: Terraform Plan + ${codefence} + ${plan.trim("\n")} + ${codefence}` + + github.rest.issues.createComment({ + issue_number: context.issue.number, + owner: context.repo.owner, + repo: context.repo.repo, + body, + }) diff --git a/pkg/scaffold/testdata/update/expected/backend.tf b/pkg/scaffold/testdata/update/expected/backend.tf new file mode 100644 index 0000000..d03b6a4 --- /dev/null +++ b/pkg/scaffold/testdata/update/expected/backend.tf @@ -0,0 +1,18 @@ +terraform { + required_providers { + cloudflare = { + source = "cloudflare/cloudflare" + version = "~>2.17" + } + tls = { + source = "hashicorp/tls" + version = "~>3.1" + } + } + backend "http" { + address = "https://ffddorf-terraform-backend.fly.dev/state/terraform-backend/default" + lock_address = "https://ffddorf-terraform-backend.fly.dev/state/terraform-backend/default" + unlock_address = "https://ffddorf-terraform-backend.fly.dev/state/terraform-backend/default" + username = "github_pat" + } +} diff --git a/pkg/scaffold/tf.go b/pkg/scaffold/tf.go new file mode 100644 index 0000000..fd716f3 --- /dev/null +++ b/pkg/scaffold/tf.go @@ -0,0 +1,129 @@ +package scaffold + +import ( + "context" + "errors" + "fmt" + "io" + "net/url" + "path/filepath" + "slices" + "strings" + + "github.com/hashicorp/hcl/v2" + "github.com/hashicorp/hcl/v2/hclwrite" + "github.com/nimbolus/terraform-backend/pkg/fs" + "github.com/nimbolus/terraform-backend/pkg/git" + "github.com/nimbolus/terraform-backend/pkg/tfcontext" + "github.com/zclconf/go-cty/cty" +) + +func writeBackendConfig(ctx context.Context, dir fs.FS, stdin io.Reader) (reterr error) { + var file *hclwrite.File + var outFile io.WriteCloser + var backendBlock *hclwrite.Block + + _, filename, err := tfcontext.FindBackendBlock(dir) + if err == nil { + ok, err := promptYesNo(ctx, stdin, fmt.Sprintf("There is an existing backend config at %s. Do you want to update it?", filename)) + if err != nil { + return err + } + if !ok { + fmt.Println("Skipping backend update") + return nil + } + + b, err := dir.ReadFile(filename) + if err != nil { + return err + } + + var diags hcl.Diagnostics + file, diags = hclwrite.ParseConfig(b, filename, hcl.Pos{}) + if len(diags) > 0 { + return errors.Join(diags) + } + var tfBlock *hclwrite.Block + for _, block := range file.Body().Blocks() { + if block.Type() != "terraform" { + continue + } + tfBlock = block + for _, innerBlock := range block.Body().Blocks() { + if innerBlock.Type() == "backend" { + backendBlock = innerBlock + } + } + } + if backendBlock == nil { + return errors.New("backend block not found anymore") + } + if backendBlock.Labels()[0] != "http" { + tfBlock.Body().RemoveBlock(backendBlock) + backendBlock = tfBlock.Body().AppendNewBlock("backend", nil) + } + + outFile, err = fs.Create(dir, filename) + if err != nil { + return err + } + defer func() { + if reterr != nil { + // restore original content + _, _ = outFile.Write(b) + } + _ = outFile.Close() + }() + } else { + filename = "backend.tf" + file = hclwrite.NewEmptyFile() + tfBlock := file.Body().AppendNewBlock("terraform", nil) + backendBlock = tfBlock.Body().AppendNewBlock("backend", nil) + outFile, err = fs.Create(dir, "backend.tf") + if err != nil { + return err + } + defer outFile.Close() + } + + origin, err := git.RepoOrigin() + if err != nil { + return fmt.Errorf("failed to read repo origin: %w", err) + } + repoPath := strings.TrimLeft(origin.Path, "/") + segments := strings.SplitN(repoPath, "/", 3) + if len(segments) < 2 { + return fmt.Errorf("invalid repo path: %s", origin.Path) + } + repo := segments[1] + repo = strings.TrimSuffix(repo, ".git") + + backendURL, err := url.Parse(backendAddress) + if err != nil { + return err + } + backendURL.Path = filepath.Join(backendURL.Path, "state", repo, "default") + address := backendURL.String() + + backendBlock.SetLabels([]string{"http"}) + backendBody := backendBlock.Body() + backendAttributes := []string{"address", "lock_address", "unlock_address", "username"} + for name := range backendBody.Attributes() { + if slices.Contains(backendAttributes, name) { + continue + } + backendBody.RemoveAttribute(name) + } + backendBody.SetAttributeValue("address", cty.StringVal(address)) + backendBody.SetAttributeValue("lock_address", cty.StringVal(address)) + backendBody.SetAttributeValue("unlock_address", cty.StringVal(address)) + backendBody.SetAttributeValue("username", cty.StringVal("github_pat")) + + if _, err := file.WriteTo(outFile); err != nil { + return err + } + + fmt.Printf("Wrote backend config to: %s\n", filename) + return nil +} diff --git a/pkg/scaffold/utils.go b/pkg/scaffold/utils.go new file mode 100644 index 0000000..9d753ac --- /dev/null +++ b/pkg/scaffold/utils.go @@ -0,0 +1,43 @@ +package scaffold + +import ( + "bufio" + "context" + "fmt" + "io" + "os" + "strings" +) + +func prompt(ctx context.Context, stdin io.Reader, text string) (string, error) { + fmt.Fprint(os.Stderr, text) + + var err error + var answer string + done := make(chan struct{}) + go func() { + defer close(done) + + rdr := bufio.NewReader(stdin) + var answerBytes []byte + answerBytes, err = rdr.ReadBytes('\n') + if err == nil { + answer = string(answerBytes[:len(answerBytes)-1]) + } + }() + + select { + case <-ctx.Done(): + return "", ctx.Err() + case <-done: + return answer, err + } +} + +func promptYesNo(ctx context.Context, stdin io.Reader, text string) (bool, error) { + answer, err := prompt(ctx, stdin, text+" [y/N] ") + if err != nil { + return false, err + } + return strings.EqualFold(answer, "y"), nil +} diff --git a/pkg/speculative/logs.go b/pkg/speculative/logs.go new file mode 100644 index 0000000..a13b6c3 --- /dev/null +++ b/pkg/speculative/logs.go @@ -0,0 +1,72 @@ +package speculative + +import ( + "bufio" + "fmt" + "io" + "net/http" + "net/url" + "slices" + "strings" +) + +type countingReader struct { + io.Reader + readBytes int +} + +func (c *countingReader) Read(dst []byte) (int, error) { + n, err := c.Reader.Read(dst) + c.readBytes += n + return n, err +} + +var ignoredGroupNames = []string{ + "Operating System", + "Runner Image", + "Runner Image Provisioner", + "GITHUB_TOKEN Permissions", +} + +func streamLogs(logsURL *url.URL, skip int64) (int64, error) { + logs, err := http.Get(logsURL.String()) + if err != nil { + return 0, err + } + if logs.StatusCode != http.StatusOK { + return 0, fmt.Errorf("invalid status for logs: %d", logs.StatusCode) + } + defer logs.Body.Close() + + if _, err := io.Copy(io.Discard, io.LimitReader(logs.Body, skip)); err != nil { + return 0, err + } + + r := &countingReader{Reader: logs.Body} + scanner := bufio.NewScanner(r) + groupDepth := 0 + for scanner.Scan() { + line := scanner.Text() + ts, rest, ok := strings.Cut(line, " ") + if !ok { + rest = ts + } + if groupName, ok := strings.CutPrefix(rest, "##[group]"); ok { + groupDepth++ + if !slices.Contains(ignoredGroupNames, groupName) { + fmt.Printf("\n# %s\n", groupName) + } + } + if groupDepth == 0 { + fmt.Println(rest) + } + if strings.HasPrefix(rest, "##[endgroup]") { + groupDepth-- + } + } + if err := scanner.Err(); err != nil { + return int64(r.readBytes), err + } + + return int64(r.readBytes), err +} diff --git a/pkg/speculative/run.go b/pkg/speculative/run.go new file mode 100644 index 0000000..838a872 --- /dev/null +++ b/pkg/speculative/run.go @@ -0,0 +1,156 @@ +package speculative + +import ( + "context" + "errors" + "fmt" + "os/exec" + "strings" + "time" + + "github.com/cenkalti/backoff" + "github.com/google/go-github/v62/github" + "github.com/nimbolus/terraform-backend/pkg/git" + "github.com/spf13/cobra" +) + +var ( + owner string + repo string + workflowFilename string +) + +func NewCommand() *cobra.Command { + cmd := &cobra.Command{ + Use: "tf-preview-gh", + Short: "Schedules speculative terraform runs on GitHub Actions", + RunE: func(cmd *cobra.Command, args []string) error { + return run(cmd.Context()) + }, + } + + cmd.Flags().StringVar(&owner, "github-owner", "", "Repository owner") + cmd.Flags().StringVar(&repo, "github-repo", "", "Repository name") + cmd.Flags().StringVar(&workflowFilename, "workflow-file", "preview.yaml", "Name of the workflow file to run for previews") + + return cmd +} + +func run(ctx context.Context) error { + if owner == "" || repo == "" { + if ghURL, err := git.RepoOrigin(); err == nil { + parts := strings.Split(ghURL.Path, "/") + if len(parts) >= 2 { + owner = parts[0] + repo = strings.TrimSuffix(parts[1], ".git") + fmt.Printf("Using local repo info: %s/%s\n", owner, repo) + } + } + } + if owner == "" { + return errors.New("Missing flag: -github-owner") + } + if repo == "" { + return errors.New("Missing flag: -github-repo") + } + + serverURL, err := serveWorkspace(ctx) + if err != nil { + return err + } + + // steal token from GH CLI + cmd := exec.CommandContext(ctx, "gh", "auth", "token") + out, err := cmd.Output() + if err != nil { + return err + } + + token := strings.TrimSpace(string(out)) + gh := github.NewClient(nil).WithAuthToken(token) + + startedAt := time.Now().UTC() + + // start workflow + _, err = gh.Actions.CreateWorkflowDispatchEventByFileName(ctx, + owner, repo, workflowFilename, + github.CreateWorkflowDispatchEventRequest{ + Ref: "main", + Inputs: map[string]interface{}{ + "workspace_transfer_url": serverURL, + }, + }, + ) + if err != nil { + return err + } + + fmt.Println("Waiting for run to start...") + + // find workflow run + var run *github.WorkflowRun + err = backoff.Retry(func() error { + workflows, _, err := gh.Actions.ListWorkflowRunsByFileName( + ctx, owner, repo, workflowFilename, + &github.ListWorkflowRunsOptions{ + Created: fmt.Sprintf(">=%s", startedAt.Format("2006-01-02T15:04")), + }, + ) + if err != nil { + return backoff.Permanent(err) + } + if len(workflows.WorkflowRuns) == 0 { + return fmt.Errorf("no workflow runs found") + } + + run = workflows.WorkflowRuns[0] + return nil + }, backoff.NewExponentialBackOff()) + if err != nil { + return err + } + + var jobID int64 + err = backoff.Retry(func() error { + jobs, _, err := gh.Actions.ListWorkflowJobs(ctx, + owner, repo, *run.ID, + &github.ListWorkflowJobsOptions{}, + ) + if err != nil { + return backoff.Permanent(err) + } + if len(jobs.Jobs) == 0 { + return fmt.Errorf("no jobs found") + } + + jobID = *jobs.Jobs[0].ID + return nil + }, backoff.NewExponentialBackOff()) + if err != nil { + return err + } + + logsURL, _, err := gh.Actions.GetWorkflowJobLogs(ctx, owner, repo, jobID, 2) + if err != nil { + return err + } + + var readBytes int64 + for { + n, err := streamLogs(logsURL, readBytes) + if err != nil { + return err + } + readBytes += n + + // check if job is done + job, _, err := gh.Actions.GetWorkflowJobByID(ctx, owner, repo, jobID) + if err != nil { + return err + } + if job.CompletedAt != nil { + fmt.Println("Job complete.") + return nil + } + } +} diff --git a/pkg/speculative/serve.go b/pkg/speculative/serve.go new file mode 100644 index 0000000..66edd61 --- /dev/null +++ b/pkg/speculative/serve.go @@ -0,0 +1,72 @@ +package speculative + +import ( + "context" + "errors" + "fmt" + "io" + "net/http" + "net/url" + "os" + "path/filepath" + + "github.com/google/uuid" + "github.com/hashicorp/go-slug" + "github.com/nimbolus/terraform-backend/pkg/tfcontext" +) + +func serveWorkspace(ctx context.Context) (string, error) { + cwd, err := os.Getwd() + if err != nil { + return "", err + } + + backend, err := tfcontext.FindBackend(os.DirFS(cwd)) + if err != nil { + return "", err + } + backendURL, err := url.Parse(backend.Address) + if err != nil { + return "", fmt.Errorf("failed to parse backend url: %s, %w", backend.Address, err) + } + if backend.Password == "" { + backendPassword, ok := os.LookupEnv("TF_HTTP_PASSWORD") + if !ok || backendPassword == "" { + return "", errors.New("missing backend password") + } + backend.Password = backendPassword + } + + id := uuid.New() + backendURL.Path = filepath.Join(backendURL.Path, "/share/", id.String()) + + pr, pw := io.Pipe() + req, err := http.NewRequestWithContext(ctx, http.MethodPost, backendURL.String(), pr) + if err != nil { + return "", err + } + req.Header.Set("Content-Type", "application/octet-stream") + req.SetBasicAuth(backend.Username, backend.Password) + + go func() { + _, err := slug.Pack(cwd, pw, true) + if err != nil { + fmt.Printf("failed to pack workspace: %v\n", err) + pw.CloseWithError(err) + } else { + pw.Close() + } + }() + + go func() { + resp, err := http.DefaultClient.Do(req) + if err != nil { + fmt.Printf("failed to stream workspace: %v\n", err) + } else if resp.StatusCode/100 != 2 { + fmt.Printf("invalid status code after streaming workspace: %d\n", resp.StatusCode) + } + fmt.Println("done streaming workspace") + }() + + return backendURL.String(), nil +} diff --git a/pkg/tfcontext/parse.go b/pkg/tfcontext/parse.go index eea48fb..f1fb00b 100644 --- a/pkg/tfcontext/parse.go +++ b/pkg/tfcontext/parse.go @@ -2,7 +2,7 @@ package tfcontext import ( "errors" - "os" + "io/fs" "path/filepath" "github.com/hashicorp/hcl/v2" @@ -35,8 +35,8 @@ var backendSchema = &hcl.BodySchema{ }, } -func files(dir string) ([]string, error) { - infos, err := os.ReadDir(dir) +func files(dir fs.FS) ([]string, error) { + infos, err := fs.ReadDir(dir, ".") if err != nil { return nil, err } @@ -53,8 +53,7 @@ func files(dir string) ([]string, error) { continue } - fullPath := filepath.Join(dir, name) - files = append(files, fullPath) + files = append(files, name) } return files, nil @@ -80,19 +79,19 @@ func readAttribute(attrs hcl.Attributes, name string) (string, error) { return val.AsString(), nil } -func FindBackend(dir string) (*BackendConfig, error) { +func FindBackendBlock(dir fs.FS) (*hcl.Block, string, error) { parser := hclparse.NewParser() tfFiles, err := files(dir) if err != nil { - return nil, err + return nil, "", err } var file *hcl.File for _, filename := range tfFiles { - b, err := os.ReadFile(filename) + b, err := fs.ReadFile(dir, filename) if err != nil { - return nil, err + return nil, "", err } file, _ = parser.ParseHCL(b, filename) @@ -108,35 +107,43 @@ func FindBackend(dir string) (*BackendConfig, error) { content, _, _ := block.Body.PartialContent(terraformBlockSchema) for _, innerBlock := range content.Blocks { - if innerBlock.Type != "backend" { - continue - } - if innerBlock.Labels[0] != "http" { - continue - } - - content, _, _ := innerBlock.Body.PartialContent(backendSchema) - address, err := readAttribute(content.Attributes, "address") - if err != nil { - return nil, err + if innerBlock.Type == "backend" { + return innerBlock, filename, nil } - username, err := readAttribute(content.Attributes, "username") - if err != nil { - return nil, err - } - password, err := readAttribute(content.Attributes, "password") - if err != nil { - return nil, err - } - - return &BackendConfig{ - Address: address, - Username: username, - Password: password, - }, nil } } } - return nil, errors.New("backend config not found") + return nil, "", errors.New("backend block not found") +} + +func FindBackend(dir fs.FS) (*BackendConfig, error) { + backend, _, err := FindBackendBlock(dir) + if err != nil { + return nil, err + } + + if backend.Labels[0] != "http" { + return nil, errors.New("not using http backend") + } + + content, _, _ := backend.Body.PartialContent(backendSchema) + address, err := readAttribute(content.Attributes, "address") + if err != nil { + return nil, err + } + username, err := readAttribute(content.Attributes, "username") + if err != nil { + return nil, err + } + password, err := readAttribute(content.Attributes, "password") + if err != nil { + return nil, err + } + + return &BackendConfig{ + Address: address, + Username: username, + Password: password, + }, nil } diff --git a/pkg/tfcontext/parse_test.go b/pkg/tfcontext/parse_test.go index 6d8167e..ef30f7f 100644 --- a/pkg/tfcontext/parse_test.go +++ b/pkg/tfcontext/parse_test.go @@ -1,6 +1,7 @@ package tfcontext_test import ( + "os" "testing" "github.com/nimbolus/terraform-backend/pkg/tfcontext" @@ -9,7 +10,8 @@ import ( ) func TestFindBackend(t *testing.T) { - be, err := tfcontext.FindBackend("./testdata") + dir := os.DirFS("./testdata") + be, err := tfcontext.FindBackend(dir) require.NoError(t, err) assert.Equal(t, "https://dummy-backend.example.com/state", be.Address)