From e469534b841f142a39704a2266de4546392a4f6c Mon Sep 17 00:00:00 2001 From: Paolo Tranquilli Date: Tue, 16 Jul 2024 13:02:24 +0200 Subject: [PATCH] Go/Bazel: use gazelle `go_deps` instead of a vendor directory --- MODULE.bazel | 4 + go/extractor/BUILD.bazel | 4 +- go/extractor/dbscheme/BUILD.bazel | 2 +- go/extractor/go.mod | 6 +- go/extractor/go.sum | 4 +- go/extractor/project/BUILD.bazel | 4 +- go/extractor/trap/BUILD.bazel | 2 +- go/extractor/util/BUILD.bazel | 4 +- go/extractor/vendor/golang.org/x/mod/LICENSE | 27 - go/extractor/vendor/golang.org/x/mod/PATENTS | 22 - .../x/mod/internal/lazyregexp/BUILD.bazel | 11 - .../x/mod/internal/lazyregexp/lazyre.go | 78 - .../golang.org/x/mod/modfile/BUILD.bazel | 21 - .../vendor/golang.org/x/mod/modfile/print.go | 184 -- .../vendor/golang.org/x/mod/modfile/read.go | 958 ---------- .../vendor/golang.org/x/mod/modfile/rule.go | 1663 ----------------- .../vendor/golang.org/x/mod/modfile/work.go | 285 --- .../golang.org/x/mod/module/BUILD.bazel | 18 - .../vendor/golang.org/x/mod/module/module.go | 841 --------- .../vendor/golang.org/x/mod/module/pseudo.go | 250 --- .../golang.org/x/mod/semver/BUILD.bazel | 11 - .../vendor/golang.org/x/mod/semver/semver.go | 401 ---- .../vendor/golang.org/x/tools/LICENSE | 27 - .../vendor/golang.org/x/tools/PATENTS | 22 - .../x/tools/go/gcexportdata/BUILD.bazel | 15 - .../x/tools/go/gcexportdata/gcexportdata.go | 186 -- .../x/tools/go/gcexportdata/importer.go | 75 - .../go/internal/packagesdriver/BUILD.bazel | 12 - .../tools/go/internal/packagesdriver/sizes.go | 53 - .../x/tools/go/packages/BUILD.bazel | 27 - .../golang.org/x/tools/go/packages/doc.go | 250 --- .../x/tools/go/packages/external.go | 140 -- .../golang.org/x/tools/go/packages/golist.go | 1106 ----------- .../x/tools/go/packages/golist_overlay.go | 83 - .../x/tools/go/packages/loadmode_string.go | 57 - .../x/tools/go/packages/packages.go | 1313 ------------- .../golang.org/x/tools/go/packages/visit.go | 59 - .../x/tools/go/types/objectpath/BUILD.bazel | 12 - .../x/tools/go/types/objectpath/objectpath.go | 752 -------- .../x/tools/internal/event/BUILD.bazel | 19 - .../x/tools/internal/event/core/BUILD.bazel | 19 - .../x/tools/internal/event/core/event.go | 85 - .../x/tools/internal/event/core/export.go | 70 - .../x/tools/internal/event/core/fast.go | 77 - .../golang.org/x/tools/internal/event/doc.go | 7 - .../x/tools/internal/event/event.go | 127 -- .../x/tools/internal/event/keys/BUILD.bazel | 16 - .../x/tools/internal/event/keys/keys.go | 564 ------ .../x/tools/internal/event/keys/standard.go | 22 - .../x/tools/internal/event/keys/util.go | 21 - .../x/tools/internal/event/label/BUILD.bazel | 11 - .../x/tools/internal/event/label/label.go | 215 --- .../x/tools/internal/event/tag/BUILD.bazel | 12 - .../x/tools/internal/event/tag/tag.go | 59 - .../x/tools/internal/gcimporter/BUILD.bazel | 29 - .../x/tools/internal/gcimporter/bimport.go | 150 -- .../x/tools/internal/gcimporter/exportdata.go | 99 - .../x/tools/internal/gcimporter/gcimporter.go | 273 --- .../x/tools/internal/gcimporter/iexport.go | 1321 ------------- .../x/tools/internal/gcimporter/iimport.go | 1089 ----------- .../internal/gcimporter/newInterface10.go | 22 - .../internal/gcimporter/newInterface11.go | 14 - .../internal/gcimporter/support_go117.go | 16 - .../internal/gcimporter/support_go118.go | 37 - .../x/tools/internal/gcimporter/unified_no.go | 10 - .../tools/internal/gcimporter/unified_yes.go | 10 - .../x/tools/internal/gcimporter/ureader_no.go | 19 - .../tools/internal/gcimporter/ureader_yes.go | 728 -------- .../x/tools/internal/gocommand/BUILD.bazel | 22 - .../x/tools/internal/gocommand/invoke.go | 465 ----- .../x/tools/internal/gocommand/vendor.go | 109 -- .../x/tools/internal/gocommand/version.go | 71 - .../internal/packagesinternal/BUILD.bazel | 11 - .../internal/packagesinternal/packages.go | 22 - .../x/tools/internal/pkgbits/BUILD.bazel | 23 - .../x/tools/internal/pkgbits/codes.go | 77 - .../x/tools/internal/pkgbits/decoder.go | 517 ----- .../x/tools/internal/pkgbits/doc.go | 32 - .../x/tools/internal/pkgbits/encoder.go | 383 ---- .../x/tools/internal/pkgbits/flags.go | 9 - .../x/tools/internal/pkgbits/frames_go1.go | 21 - .../x/tools/internal/pkgbits/frames_go17.go | 28 - .../x/tools/internal/pkgbits/reloc.go | 42 - .../x/tools/internal/pkgbits/support.go | 17 - .../x/tools/internal/pkgbits/sync.go | 113 -- .../internal/pkgbits/syncmarker_string.go | 89 - .../tools/internal/tokeninternal/BUILD.bazel | 11 - .../internal/tokeninternal/tokeninternal.go | 151 -- .../x/tools/internal/typeparams/BUILD.bazel | 17 - .../x/tools/internal/typeparams/common.go | 204 -- .../x/tools/internal/typeparams/coretype.go | 122 -- .../x/tools/internal/typeparams/normalize.go | 218 --- .../x/tools/internal/typeparams/termlist.go | 163 -- .../x/tools/internal/typeparams/typeterm.go | 169 -- .../tools/internal/typesinternal/BUILD.bazel | 16 - .../tools/internal/typesinternal/errorcode.go | 1560 ---------------- .../typesinternal/errorcode_string.go | 179 -- .../x/tools/internal/typesinternal/types.go | 52 - .../tools/internal/typesinternal/types_118.go | 19 - .../x/tools/internal/versions/BUILD.bazel | 17 - .../x/tools/internal/versions/gover.go | 172 -- .../x/tools/internal/versions/types.go | 19 - .../x/tools/internal/versions/types_go121.go | 20 - .../x/tools/internal/versions/types_go122.go | 24 - .../x/tools/internal/versions/versions.go | 52 - go/extractor/vendor/modules.txt | 25 - go/gen.py | 9 - 107 files changed, 19 insertions(+), 19381 deletions(-) delete mode 100644 go/extractor/vendor/golang.org/x/mod/LICENSE delete mode 100644 go/extractor/vendor/golang.org/x/mod/PATENTS delete mode 100644 go/extractor/vendor/golang.org/x/mod/internal/lazyregexp/BUILD.bazel delete mode 100644 go/extractor/vendor/golang.org/x/mod/internal/lazyregexp/lazyre.go delete mode 100644 go/extractor/vendor/golang.org/x/mod/modfile/BUILD.bazel delete mode 100644 go/extractor/vendor/golang.org/x/mod/modfile/print.go delete mode 100644 go/extractor/vendor/golang.org/x/mod/modfile/read.go delete mode 100644 go/extractor/vendor/golang.org/x/mod/modfile/rule.go delete mode 100644 go/extractor/vendor/golang.org/x/mod/modfile/work.go delete mode 100644 go/extractor/vendor/golang.org/x/mod/module/BUILD.bazel delete mode 100644 go/extractor/vendor/golang.org/x/mod/module/module.go delete mode 100644 go/extractor/vendor/golang.org/x/mod/module/pseudo.go delete mode 100644 go/extractor/vendor/golang.org/x/mod/semver/BUILD.bazel delete mode 100644 go/extractor/vendor/golang.org/x/mod/semver/semver.go delete mode 100644 go/extractor/vendor/golang.org/x/tools/LICENSE delete mode 100644 go/extractor/vendor/golang.org/x/tools/PATENTS delete mode 100644 go/extractor/vendor/golang.org/x/tools/go/gcexportdata/BUILD.bazel delete mode 100644 go/extractor/vendor/golang.org/x/tools/go/gcexportdata/gcexportdata.go delete mode 100644 go/extractor/vendor/golang.org/x/tools/go/gcexportdata/importer.go delete mode 100644 go/extractor/vendor/golang.org/x/tools/go/internal/packagesdriver/BUILD.bazel delete mode 100644 go/extractor/vendor/golang.org/x/tools/go/internal/packagesdriver/sizes.go delete mode 100644 go/extractor/vendor/golang.org/x/tools/go/packages/BUILD.bazel delete mode 100644 go/extractor/vendor/golang.org/x/tools/go/packages/doc.go delete mode 100644 go/extractor/vendor/golang.org/x/tools/go/packages/external.go delete mode 100644 go/extractor/vendor/golang.org/x/tools/go/packages/golist.go delete mode 100644 go/extractor/vendor/golang.org/x/tools/go/packages/golist_overlay.go delete mode 100644 go/extractor/vendor/golang.org/x/tools/go/packages/loadmode_string.go delete mode 100644 go/extractor/vendor/golang.org/x/tools/go/packages/packages.go delete mode 100644 go/extractor/vendor/golang.org/x/tools/go/packages/visit.go delete mode 100644 go/extractor/vendor/golang.org/x/tools/go/types/objectpath/BUILD.bazel delete mode 100644 go/extractor/vendor/golang.org/x/tools/go/types/objectpath/objectpath.go delete mode 100644 go/extractor/vendor/golang.org/x/tools/internal/event/BUILD.bazel delete mode 100644 go/extractor/vendor/golang.org/x/tools/internal/event/core/BUILD.bazel delete mode 100644 go/extractor/vendor/golang.org/x/tools/internal/event/core/event.go delete mode 100644 go/extractor/vendor/golang.org/x/tools/internal/event/core/export.go delete mode 100644 go/extractor/vendor/golang.org/x/tools/internal/event/core/fast.go delete mode 100644 go/extractor/vendor/golang.org/x/tools/internal/event/doc.go delete mode 100644 go/extractor/vendor/golang.org/x/tools/internal/event/event.go delete mode 100644 go/extractor/vendor/golang.org/x/tools/internal/event/keys/BUILD.bazel delete mode 100644 go/extractor/vendor/golang.org/x/tools/internal/event/keys/keys.go delete mode 100644 go/extractor/vendor/golang.org/x/tools/internal/event/keys/standard.go delete mode 100644 go/extractor/vendor/golang.org/x/tools/internal/event/keys/util.go delete mode 100644 go/extractor/vendor/golang.org/x/tools/internal/event/label/BUILD.bazel delete mode 100644 go/extractor/vendor/golang.org/x/tools/internal/event/label/label.go delete mode 100644 go/extractor/vendor/golang.org/x/tools/internal/event/tag/BUILD.bazel delete mode 100644 go/extractor/vendor/golang.org/x/tools/internal/event/tag/tag.go delete mode 100644 go/extractor/vendor/golang.org/x/tools/internal/gcimporter/BUILD.bazel delete mode 100644 go/extractor/vendor/golang.org/x/tools/internal/gcimporter/bimport.go delete mode 100644 go/extractor/vendor/golang.org/x/tools/internal/gcimporter/exportdata.go delete mode 100644 go/extractor/vendor/golang.org/x/tools/internal/gcimporter/gcimporter.go delete mode 100644 go/extractor/vendor/golang.org/x/tools/internal/gcimporter/iexport.go delete mode 100644 go/extractor/vendor/golang.org/x/tools/internal/gcimporter/iimport.go delete mode 100644 go/extractor/vendor/golang.org/x/tools/internal/gcimporter/newInterface10.go delete mode 100644 go/extractor/vendor/golang.org/x/tools/internal/gcimporter/newInterface11.go delete mode 100644 go/extractor/vendor/golang.org/x/tools/internal/gcimporter/support_go117.go delete mode 100644 go/extractor/vendor/golang.org/x/tools/internal/gcimporter/support_go118.go delete mode 100644 go/extractor/vendor/golang.org/x/tools/internal/gcimporter/unified_no.go delete mode 100644 go/extractor/vendor/golang.org/x/tools/internal/gcimporter/unified_yes.go delete mode 100644 go/extractor/vendor/golang.org/x/tools/internal/gcimporter/ureader_no.go delete mode 100644 go/extractor/vendor/golang.org/x/tools/internal/gcimporter/ureader_yes.go delete mode 100644 go/extractor/vendor/golang.org/x/tools/internal/gocommand/BUILD.bazel delete mode 100644 go/extractor/vendor/golang.org/x/tools/internal/gocommand/invoke.go delete mode 100644 go/extractor/vendor/golang.org/x/tools/internal/gocommand/vendor.go delete mode 100644 go/extractor/vendor/golang.org/x/tools/internal/gocommand/version.go delete mode 100644 go/extractor/vendor/golang.org/x/tools/internal/packagesinternal/BUILD.bazel delete mode 100644 go/extractor/vendor/golang.org/x/tools/internal/packagesinternal/packages.go delete mode 100644 go/extractor/vendor/golang.org/x/tools/internal/pkgbits/BUILD.bazel delete mode 100644 go/extractor/vendor/golang.org/x/tools/internal/pkgbits/codes.go delete mode 100644 go/extractor/vendor/golang.org/x/tools/internal/pkgbits/decoder.go delete mode 100644 go/extractor/vendor/golang.org/x/tools/internal/pkgbits/doc.go delete mode 100644 go/extractor/vendor/golang.org/x/tools/internal/pkgbits/encoder.go delete mode 100644 go/extractor/vendor/golang.org/x/tools/internal/pkgbits/flags.go delete mode 100644 go/extractor/vendor/golang.org/x/tools/internal/pkgbits/frames_go1.go delete mode 100644 go/extractor/vendor/golang.org/x/tools/internal/pkgbits/frames_go17.go delete mode 100644 go/extractor/vendor/golang.org/x/tools/internal/pkgbits/reloc.go delete mode 100644 go/extractor/vendor/golang.org/x/tools/internal/pkgbits/support.go delete mode 100644 go/extractor/vendor/golang.org/x/tools/internal/pkgbits/sync.go delete mode 100644 go/extractor/vendor/golang.org/x/tools/internal/pkgbits/syncmarker_string.go delete mode 100644 go/extractor/vendor/golang.org/x/tools/internal/tokeninternal/BUILD.bazel delete mode 100644 go/extractor/vendor/golang.org/x/tools/internal/tokeninternal/tokeninternal.go delete mode 100644 go/extractor/vendor/golang.org/x/tools/internal/typeparams/BUILD.bazel delete mode 100644 go/extractor/vendor/golang.org/x/tools/internal/typeparams/common.go delete mode 100644 go/extractor/vendor/golang.org/x/tools/internal/typeparams/coretype.go delete mode 100644 go/extractor/vendor/golang.org/x/tools/internal/typeparams/normalize.go delete mode 100644 go/extractor/vendor/golang.org/x/tools/internal/typeparams/termlist.go delete mode 100644 go/extractor/vendor/golang.org/x/tools/internal/typeparams/typeterm.go delete mode 100644 go/extractor/vendor/golang.org/x/tools/internal/typesinternal/BUILD.bazel delete mode 100644 go/extractor/vendor/golang.org/x/tools/internal/typesinternal/errorcode.go delete mode 100644 go/extractor/vendor/golang.org/x/tools/internal/typesinternal/errorcode_string.go delete mode 100644 go/extractor/vendor/golang.org/x/tools/internal/typesinternal/types.go delete mode 100644 go/extractor/vendor/golang.org/x/tools/internal/typesinternal/types_118.go delete mode 100644 go/extractor/vendor/golang.org/x/tools/internal/versions/BUILD.bazel delete mode 100644 go/extractor/vendor/golang.org/x/tools/internal/versions/gover.go delete mode 100644 go/extractor/vendor/golang.org/x/tools/internal/versions/types.go delete mode 100644 go/extractor/vendor/golang.org/x/tools/internal/versions/types_go121.go delete mode 100644 go/extractor/vendor/golang.org/x/tools/internal/versions/types_go122.go delete mode 100644 go/extractor/vendor/golang.org/x/tools/internal/versions/versions.go delete mode 100644 go/extractor/vendor/modules.txt diff --git a/MODULE.bazel b/MODULE.bazel index b0957b89a75c..e4efe0ec2532 100644 --- a/MODULE.bazel +++ b/MODULE.bazel @@ -141,6 +141,10 @@ use_repo( go_sdk = use_extension("@rules_go//go:extensions.bzl", "go_sdk") go_sdk.download(version = "1.22.2") +go_deps = use_extension("@gazelle//:extensions.bzl", "go_deps") +go_deps.from_file(go_mod = "//go/extractor:go.mod") +use_repo(go_deps, "org_golang_x_mod", "org_golang_x_tools") + lfs_files = use_repo_rule("//misc/bazel:lfs.bzl", "lfs_files") lfs_files( diff --git a/go/extractor/BUILD.bazel b/go/extractor/BUILD.bazel index 8c69d9cc01ff..ded27a11177b 100644 --- a/go/extractor/BUILD.bazel +++ b/go/extractor/BUILD.bazel @@ -21,8 +21,8 @@ go_library( "//go/extractor/toolchain", "//go/extractor/trap", "//go/extractor/util", - "//go/extractor/vendor/golang.org/x/mod/modfile", - "//go/extractor/vendor/golang.org/x/tools/go/packages", + "@org_golang_x_mod//modfile:go_default_library", + "@org_golang_x_tools//go/packages:go_default_library", ], ) diff --git a/go/extractor/dbscheme/BUILD.bazel b/go/extractor/dbscheme/BUILD.bazel index 496a5ccdf49d..379c2fb93d39 100644 --- a/go/extractor/dbscheme/BUILD.bazel +++ b/go/extractor/dbscheme/BUILD.bazel @@ -12,6 +12,6 @@ go_library( visibility = ["//visibility:public"], deps = [ "//go/extractor/trap", - "//go/extractor/vendor/golang.org/x/tools/go/packages", + "@org_golang_x_tools//go/packages:go_default_library", ], ) diff --git a/go/extractor/go.mod b/go/extractor/go.mod index 9dafc31bbbe9..56aba6b534ab 100644 --- a/go/extractor/go.mod +++ b/go/extractor/go.mod @@ -2,7 +2,11 @@ module github.com/github/codeql-go/extractor go 1.22.0 +// when updating this, run +// bazel run @rules_go//go -- mod tidy +// when adding or removing dependencies, run +// bazel mod tidy require ( - golang.org/x/mod v0.15.0 + golang.org/x/mod v0.16.0 golang.org/x/tools v0.18.0 ) diff --git a/go/extractor/go.sum b/go/extractor/go.sum index dab099e92a05..e84e985d95f9 100644 --- a/go/extractor/go.sum +++ b/go/extractor/go.sum @@ -1,5 +1,5 @@ -golang.org/x/mod v0.15.0 h1:SernR4v+D55NyBH2QiEQrlBAnj1ECL6AGrA5+dPaMY8= -golang.org/x/mod v0.15.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c= +golang.org/x/mod v0.16.0 h1:QX4fJ0Rr5cPQCF7O9lh9Se4pmwfwskqZfq5moyldzic= +golang.org/x/mod v0.16.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c= golang.org/x/sync v0.6.0 h1:5BMeUDZ7vkXGfEr1x9B4bRcTH4lpkTkpdh0T/J+qjbQ= golang.org/x/sync v0.6.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= golang.org/x/tools v0.18.0 h1:k8NLag8AGHnn+PHbl7g43CtqZAwG60vZkLqgyZgIHgQ= diff --git a/go/extractor/project/BUILD.bazel b/go/extractor/project/BUILD.bazel index 9170ff95be35..7a8627ae8b1b 100644 --- a/go/extractor/project/BUILD.bazel +++ b/go/extractor/project/BUILD.bazel @@ -11,7 +11,7 @@ go_library( "//go/extractor/diagnostics", "//go/extractor/toolchain", "//go/extractor/util", - "//go/extractor/vendor/golang.org/x/mod/modfile", + "@org_golang_x_mod//modfile:go_default_library", ], ) @@ -21,6 +21,6 @@ go_test( embed = [":project"], deps = [ "//go/extractor/util", - "//go/extractor/vendor/golang.org/x/mod/modfile", + "@org_golang_x_mod//modfile:go_default_library", ], ) diff --git a/go/extractor/trap/BUILD.bazel b/go/extractor/trap/BUILD.bazel index 6cc7c4983b2e..80973ec94c17 100644 --- a/go/extractor/trap/BUILD.bazel +++ b/go/extractor/trap/BUILD.bazel @@ -14,7 +14,7 @@ go_library( deps = [ "//go/extractor/srcarchive", "//go/extractor/util", - "//go/extractor/vendor/golang.org/x/tools/go/packages", + "@org_golang_x_tools//go/packages:go_default_library", ], ) diff --git a/go/extractor/util/BUILD.bazel b/go/extractor/util/BUILD.bazel index 7c4723e63bf4..41e81a907ef3 100644 --- a/go/extractor/util/BUILD.bazel +++ b/go/extractor/util/BUILD.bazel @@ -10,7 +10,7 @@ go_library( ], importpath = "github.com/github/codeql-go/extractor/util", visibility = ["//visibility:public"], - deps = ["//go/extractor/vendor/golang.org/x/mod/semver"], + deps = ["@org_golang_x_mod//semver:go_default_library"], ) go_test( @@ -20,5 +20,5 @@ go_test( "util_test.go", ], embed = [":util"], - deps = ["//go/extractor/vendor/golang.org/x/mod/semver"], + deps = ["@org_golang_x_mod//semver:go_default_library"], ) diff --git a/go/extractor/vendor/golang.org/x/mod/LICENSE b/go/extractor/vendor/golang.org/x/mod/LICENSE deleted file mode 100644 index 6a66aea5eafe..000000000000 --- a/go/extractor/vendor/golang.org/x/mod/LICENSE +++ /dev/null @@ -1,27 +0,0 @@ -Copyright (c) 2009 The Go Authors. All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are -met: - - * Redistributions of source code must retain the above copyright -notice, this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above -copyright notice, this list of conditions and the following disclaimer -in the documentation and/or other materials provided with the -distribution. - * Neither the name of Google Inc. nor the names of its -contributors may be used to endorse or promote products derived from -this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/go/extractor/vendor/golang.org/x/mod/PATENTS b/go/extractor/vendor/golang.org/x/mod/PATENTS deleted file mode 100644 index 733099041f84..000000000000 --- a/go/extractor/vendor/golang.org/x/mod/PATENTS +++ /dev/null @@ -1,22 +0,0 @@ -Additional IP Rights Grant (Patents) - -"This implementation" means the copyrightable works distributed by -Google as part of the Go project. - -Google hereby grants to You a perpetual, worldwide, non-exclusive, -no-charge, royalty-free, irrevocable (except as stated in this section) -patent license to make, have made, use, offer to sell, sell, import, -transfer and otherwise run, modify and propagate the contents of this -implementation of Go, where such license applies only to those patent -claims, both currently owned or controlled by Google and acquired in -the future, licensable by Google that are necessarily infringed by this -implementation of Go. This grant does not include claims that would be -infringed only as a consequence of further modification of this -implementation. If you or your agent or exclusive licensee institute or -order or agree to the institution of patent litigation against any -entity (including a cross-claim or counterclaim in a lawsuit) alleging -that this implementation of Go or any code incorporated within this -implementation of Go constitutes direct or contributory patent -infringement, or inducement of patent infringement, then any patent -rights granted to you under this License for this implementation of Go -shall terminate as of the date such litigation is filed. diff --git a/go/extractor/vendor/golang.org/x/mod/internal/lazyregexp/BUILD.bazel b/go/extractor/vendor/golang.org/x/mod/internal/lazyregexp/BUILD.bazel deleted file mode 100644 index deb5dc2b019a..000000000000 --- a/go/extractor/vendor/golang.org/x/mod/internal/lazyregexp/BUILD.bazel +++ /dev/null @@ -1,11 +0,0 @@ -# generated running `bazel run //go/gazelle`, do not edit - -load("@rules_go//go:def.bzl", "go_library") - -go_library( - name = "lazyregexp", - srcs = ["lazyre.go"], - importmap = "github.com/github/codeql-go/extractor/vendor/golang.org/x/mod/internal/lazyregexp", - importpath = "golang.org/x/mod/internal/lazyregexp", - visibility = ["//go/extractor/vendor/golang.org/x/mod:__subpackages__"], -) diff --git a/go/extractor/vendor/golang.org/x/mod/internal/lazyregexp/lazyre.go b/go/extractor/vendor/golang.org/x/mod/internal/lazyregexp/lazyre.go deleted file mode 100644 index 150f887e7a4b..000000000000 --- a/go/extractor/vendor/golang.org/x/mod/internal/lazyregexp/lazyre.go +++ /dev/null @@ -1,78 +0,0 @@ -// Copyright 2018 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Package lazyregexp is a thin wrapper over regexp, allowing the use of global -// regexp variables without forcing them to be compiled at init. -package lazyregexp - -import ( - "os" - "regexp" - "strings" - "sync" -) - -// Regexp is a wrapper around [regexp.Regexp], where the underlying regexp will be -// compiled the first time it is needed. -type Regexp struct { - str string - once sync.Once - rx *regexp.Regexp -} - -func (r *Regexp) re() *regexp.Regexp { - r.once.Do(r.build) - return r.rx -} - -func (r *Regexp) build() { - r.rx = regexp.MustCompile(r.str) - r.str = "" -} - -func (r *Regexp) FindSubmatch(s []byte) [][]byte { - return r.re().FindSubmatch(s) -} - -func (r *Regexp) FindStringSubmatch(s string) []string { - return r.re().FindStringSubmatch(s) -} - -func (r *Regexp) FindStringSubmatchIndex(s string) []int { - return r.re().FindStringSubmatchIndex(s) -} - -func (r *Regexp) ReplaceAllString(src, repl string) string { - return r.re().ReplaceAllString(src, repl) -} - -func (r *Regexp) FindString(s string) string { - return r.re().FindString(s) -} - -func (r *Regexp) FindAllString(s string, n int) []string { - return r.re().FindAllString(s, n) -} - -func (r *Regexp) MatchString(s string) bool { - return r.re().MatchString(s) -} - -func (r *Regexp) SubexpNames() []string { - return r.re().SubexpNames() -} - -var inTest = len(os.Args) > 0 && strings.HasSuffix(strings.TrimSuffix(os.Args[0], ".exe"), ".test") - -// New creates a new lazy regexp, delaying the compiling work until it is first -// needed. If the code is being run as part of tests, the regexp compiling will -// happen immediately. -func New(str string) *Regexp { - lr := &Regexp{str: str} - if inTest { - // In tests, always compile the regexps early. - lr.re() - } - return lr -} diff --git a/go/extractor/vendor/golang.org/x/mod/modfile/BUILD.bazel b/go/extractor/vendor/golang.org/x/mod/modfile/BUILD.bazel deleted file mode 100644 index 097bacb107ce..000000000000 --- a/go/extractor/vendor/golang.org/x/mod/modfile/BUILD.bazel +++ /dev/null @@ -1,21 +0,0 @@ -# generated running `bazel run //go/gazelle`, do not edit - -load("@rules_go//go:def.bzl", "go_library") - -go_library( - name = "modfile", - srcs = [ - "print.go", - "read.go", - "rule.go", - "work.go", - ], - importmap = "github.com/github/codeql-go/extractor/vendor/golang.org/x/mod/modfile", - importpath = "golang.org/x/mod/modfile", - visibility = ["//visibility:public"], - deps = [ - "//go/extractor/vendor/golang.org/x/mod/internal/lazyregexp", - "//go/extractor/vendor/golang.org/x/mod/module", - "//go/extractor/vendor/golang.org/x/mod/semver", - ], -) diff --git a/go/extractor/vendor/golang.org/x/mod/modfile/print.go b/go/extractor/vendor/golang.org/x/mod/modfile/print.go deleted file mode 100644 index 2a0123d4b915..000000000000 --- a/go/extractor/vendor/golang.org/x/mod/modfile/print.go +++ /dev/null @@ -1,184 +0,0 @@ -// Copyright 2018 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Module file printer. - -package modfile - -import ( - "bytes" - "fmt" - "strings" -) - -// Format returns a go.mod file as a byte slice, formatted in standard style. -func Format(f *FileSyntax) []byte { - pr := &printer{} - pr.file(f) - - // remove trailing blank lines - b := pr.Bytes() - for len(b) > 0 && b[len(b)-1] == '\n' && (len(b) == 1 || b[len(b)-2] == '\n') { - b = b[:len(b)-1] - } - return b -} - -// A printer collects the state during printing of a file or expression. -type printer struct { - bytes.Buffer // output buffer - comment []Comment // pending end-of-line comments - margin int // left margin (indent), a number of tabs -} - -// printf prints to the buffer. -func (p *printer) printf(format string, args ...interface{}) { - fmt.Fprintf(p, format, args...) -} - -// indent returns the position on the current line, in bytes, 0-indexed. -func (p *printer) indent() int { - b := p.Bytes() - n := 0 - for n < len(b) && b[len(b)-1-n] != '\n' { - n++ - } - return n -} - -// newline ends the current line, flushing end-of-line comments. -func (p *printer) newline() { - if len(p.comment) > 0 { - p.printf(" ") - for i, com := range p.comment { - if i > 0 { - p.trim() - p.printf("\n") - for i := 0; i < p.margin; i++ { - p.printf("\t") - } - } - p.printf("%s", strings.TrimSpace(com.Token)) - } - p.comment = p.comment[:0] - } - - p.trim() - if b := p.Bytes(); len(b) == 0 || (len(b) >= 2 && b[len(b)-1] == '\n' && b[len(b)-2] == '\n') { - // skip the blank line at top of file or after a blank line - } else { - p.printf("\n") - } - for i := 0; i < p.margin; i++ { - p.printf("\t") - } -} - -// trim removes trailing spaces and tabs from the current line. -func (p *printer) trim() { - // Remove trailing spaces and tabs from line we're about to end. - b := p.Bytes() - n := len(b) - for n > 0 && (b[n-1] == '\t' || b[n-1] == ' ') { - n-- - } - p.Truncate(n) -} - -// file formats the given file into the print buffer. -func (p *printer) file(f *FileSyntax) { - for _, com := range f.Before { - p.printf("%s", strings.TrimSpace(com.Token)) - p.newline() - } - - for i, stmt := range f.Stmt { - switch x := stmt.(type) { - case *CommentBlock: - // comments already handled - p.expr(x) - - default: - p.expr(x) - p.newline() - } - - for _, com := range stmt.Comment().After { - p.printf("%s", strings.TrimSpace(com.Token)) - p.newline() - } - - if i+1 < len(f.Stmt) { - p.newline() - } - } -} - -func (p *printer) expr(x Expr) { - // Emit line-comments preceding this expression. - if before := x.Comment().Before; len(before) > 0 { - // Want to print a line comment. - // Line comments must be at the current margin. - p.trim() - if p.indent() > 0 { - // There's other text on the line. Start a new line. - p.printf("\n") - } - // Re-indent to margin. - for i := 0; i < p.margin; i++ { - p.printf("\t") - } - for _, com := range before { - p.printf("%s", strings.TrimSpace(com.Token)) - p.newline() - } - } - - switch x := x.(type) { - default: - panic(fmt.Errorf("printer: unexpected type %T", x)) - - case *CommentBlock: - // done - - case *LParen: - p.printf("(") - case *RParen: - p.printf(")") - - case *Line: - p.tokens(x.Token) - - case *LineBlock: - p.tokens(x.Token) - p.printf(" ") - p.expr(&x.LParen) - p.margin++ - for _, l := range x.Line { - p.newline() - p.expr(l) - } - p.margin-- - p.newline() - p.expr(&x.RParen) - } - - // Queue end-of-line comments for printing when we - // reach the end of the line. - p.comment = append(p.comment, x.Comment().Suffix...) -} - -func (p *printer) tokens(tokens []string) { - sep := "" - for _, t := range tokens { - if t == "," || t == ")" || t == "]" || t == "}" { - sep = "" - } - p.printf("%s%s", sep, t) - sep = " " - if t == "(" || t == "[" || t == "{" { - sep = "" - } - } -} diff --git a/go/extractor/vendor/golang.org/x/mod/modfile/read.go b/go/extractor/vendor/golang.org/x/mod/modfile/read.go deleted file mode 100644 index 5b5bb5e115b3..000000000000 --- a/go/extractor/vendor/golang.org/x/mod/modfile/read.go +++ /dev/null @@ -1,958 +0,0 @@ -// Copyright 2018 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package modfile - -import ( - "bytes" - "errors" - "fmt" - "os" - "strconv" - "strings" - "unicode" - "unicode/utf8" -) - -// A Position describes an arbitrary source position in a file, including the -// file, line, column, and byte offset. -type Position struct { - Line int // line in input (starting at 1) - LineRune int // rune in line (starting at 1) - Byte int // byte in input (starting at 0) -} - -// add returns the position at the end of s, assuming it starts at p. -func (p Position) add(s string) Position { - p.Byte += len(s) - if n := strings.Count(s, "\n"); n > 0 { - p.Line += n - s = s[strings.LastIndex(s, "\n")+1:] - p.LineRune = 1 - } - p.LineRune += utf8.RuneCountInString(s) - return p -} - -// An Expr represents an input element. -type Expr interface { - // Span returns the start and end position of the expression, - // excluding leading or trailing comments. - Span() (start, end Position) - - // Comment returns the comments attached to the expression. - // This method would normally be named 'Comments' but that - // would interfere with embedding a type of the same name. - Comment() *Comments -} - -// A Comment represents a single // comment. -type Comment struct { - Start Position - Token string // without trailing newline - Suffix bool // an end of line (not whole line) comment -} - -// Comments collects the comments associated with an expression. -type Comments struct { - Before []Comment // whole-line comments before this expression - Suffix []Comment // end-of-line comments after this expression - - // For top-level expressions only, After lists whole-line - // comments following the expression. - After []Comment -} - -// Comment returns the receiver. This isn't useful by itself, but -// a [Comments] struct is embedded into all the expression -// implementation types, and this gives each of those a Comment -// method to satisfy the Expr interface. -func (c *Comments) Comment() *Comments { - return c -} - -// A FileSyntax represents an entire go.mod file. -type FileSyntax struct { - Name string // file path - Comments - Stmt []Expr -} - -func (x *FileSyntax) Span() (start, end Position) { - if len(x.Stmt) == 0 { - return - } - start, _ = x.Stmt[0].Span() - _, end = x.Stmt[len(x.Stmt)-1].Span() - return start, end -} - -// addLine adds a line containing the given tokens to the file. -// -// If the first token of the hint matches the first token of the -// line, the new line is added at the end of the block containing hint, -// extracting hint into a new block if it is not yet in one. -// -// If the hint is non-nil buts its first token does not match, -// the new line is added after the block containing hint -// (or hint itself, if not in a block). -// -// If no hint is provided, addLine appends the line to the end of -// the last block with a matching first token, -// or to the end of the file if no such block exists. -func (x *FileSyntax) addLine(hint Expr, tokens ...string) *Line { - if hint == nil { - // If no hint given, add to the last statement of the given type. - Loop: - for i := len(x.Stmt) - 1; i >= 0; i-- { - stmt := x.Stmt[i] - switch stmt := stmt.(type) { - case *Line: - if stmt.Token != nil && stmt.Token[0] == tokens[0] { - hint = stmt - break Loop - } - case *LineBlock: - if stmt.Token[0] == tokens[0] { - hint = stmt - break Loop - } - } - } - } - - newLineAfter := func(i int) *Line { - new := &Line{Token: tokens} - if i == len(x.Stmt) { - x.Stmt = append(x.Stmt, new) - } else { - x.Stmt = append(x.Stmt, nil) - copy(x.Stmt[i+2:], x.Stmt[i+1:]) - x.Stmt[i+1] = new - } - return new - } - - if hint != nil { - for i, stmt := range x.Stmt { - switch stmt := stmt.(type) { - case *Line: - if stmt == hint { - if stmt.Token == nil || stmt.Token[0] != tokens[0] { - return newLineAfter(i) - } - - // Convert line to line block. - stmt.InBlock = true - block := &LineBlock{Token: stmt.Token[:1], Line: []*Line{stmt}} - stmt.Token = stmt.Token[1:] - x.Stmt[i] = block - new := &Line{Token: tokens[1:], InBlock: true} - block.Line = append(block.Line, new) - return new - } - - case *LineBlock: - if stmt == hint { - if stmt.Token[0] != tokens[0] { - return newLineAfter(i) - } - - new := &Line{Token: tokens[1:], InBlock: true} - stmt.Line = append(stmt.Line, new) - return new - } - - for j, line := range stmt.Line { - if line == hint { - if stmt.Token[0] != tokens[0] { - return newLineAfter(i) - } - - // Add new line after hint within the block. - stmt.Line = append(stmt.Line, nil) - copy(stmt.Line[j+2:], stmt.Line[j+1:]) - new := &Line{Token: tokens[1:], InBlock: true} - stmt.Line[j+1] = new - return new - } - } - } - } - } - - new := &Line{Token: tokens} - x.Stmt = append(x.Stmt, new) - return new -} - -func (x *FileSyntax) updateLine(line *Line, tokens ...string) { - if line.InBlock { - tokens = tokens[1:] - } - line.Token = tokens -} - -// markRemoved modifies line so that it (and its end-of-line comment, if any) -// will be dropped by (*FileSyntax).Cleanup. -func (line *Line) markRemoved() { - line.Token = nil - line.Comments.Suffix = nil -} - -// Cleanup cleans up the file syntax x after any edit operations. -// To avoid quadratic behavior, (*Line).markRemoved marks the line as dead -// by setting line.Token = nil but does not remove it from the slice -// in which it appears. After edits have all been indicated, -// calling Cleanup cleans out the dead lines. -func (x *FileSyntax) Cleanup() { - w := 0 - for _, stmt := range x.Stmt { - switch stmt := stmt.(type) { - case *Line: - if stmt.Token == nil { - continue - } - case *LineBlock: - ww := 0 - for _, line := range stmt.Line { - if line.Token != nil { - stmt.Line[ww] = line - ww++ - } - } - if ww == 0 { - continue - } - if ww == 1 { - // Collapse block into single line. - line := &Line{ - Comments: Comments{ - Before: commentsAdd(stmt.Before, stmt.Line[0].Before), - Suffix: commentsAdd(stmt.Line[0].Suffix, stmt.Suffix), - After: commentsAdd(stmt.Line[0].After, stmt.After), - }, - Token: stringsAdd(stmt.Token, stmt.Line[0].Token), - } - x.Stmt[w] = line - w++ - continue - } - stmt.Line = stmt.Line[:ww] - } - x.Stmt[w] = stmt - w++ - } - x.Stmt = x.Stmt[:w] -} - -func commentsAdd(x, y []Comment) []Comment { - return append(x[:len(x):len(x)], y...) -} - -func stringsAdd(x, y []string) []string { - return append(x[:len(x):len(x)], y...) -} - -// A CommentBlock represents a top-level block of comments separate -// from any rule. -type CommentBlock struct { - Comments - Start Position -} - -func (x *CommentBlock) Span() (start, end Position) { - return x.Start, x.Start -} - -// A Line is a single line of tokens. -type Line struct { - Comments - Start Position - Token []string - InBlock bool - End Position -} - -func (x *Line) Span() (start, end Position) { - return x.Start, x.End -} - -// A LineBlock is a factored block of lines, like -// -// require ( -// "x" -// "y" -// ) -type LineBlock struct { - Comments - Start Position - LParen LParen - Token []string - Line []*Line - RParen RParen -} - -func (x *LineBlock) Span() (start, end Position) { - return x.Start, x.RParen.Pos.add(")") -} - -// An LParen represents the beginning of a parenthesized line block. -// It is a place to store suffix comments. -type LParen struct { - Comments - Pos Position -} - -func (x *LParen) Span() (start, end Position) { - return x.Pos, x.Pos.add(")") -} - -// An RParen represents the end of a parenthesized line block. -// It is a place to store whole-line (before) comments. -type RParen struct { - Comments - Pos Position -} - -func (x *RParen) Span() (start, end Position) { - return x.Pos, x.Pos.add(")") -} - -// An input represents a single input file being parsed. -type input struct { - // Lexing state. - filename string // name of input file, for errors - complete []byte // entire input - remaining []byte // remaining input - tokenStart []byte // token being scanned to end of input - token token // next token to be returned by lex, peek - pos Position // current input position - comments []Comment // accumulated comments - - // Parser state. - file *FileSyntax // returned top-level syntax tree - parseErrors ErrorList // errors encountered during parsing - - // Comment assignment state. - pre []Expr // all expressions, in preorder traversal - post []Expr // all expressions, in postorder traversal -} - -func newInput(filename string, data []byte) *input { - return &input{ - filename: filename, - complete: data, - remaining: data, - pos: Position{Line: 1, LineRune: 1, Byte: 0}, - } -} - -// parse parses the input file. -func parse(file string, data []byte) (f *FileSyntax, err error) { - // The parser panics for both routine errors like syntax errors - // and for programmer bugs like array index errors. - // Turn both into error returns. Catching bug panics is - // especially important when processing many files. - in := newInput(file, data) - defer func() { - if e := recover(); e != nil && e != &in.parseErrors { - in.parseErrors = append(in.parseErrors, Error{ - Filename: in.filename, - Pos: in.pos, - Err: fmt.Errorf("internal error: %v", e), - }) - } - if err == nil && len(in.parseErrors) > 0 { - err = in.parseErrors - } - }() - - // Prime the lexer by reading in the first token. It will be available - // in the next peek() or lex() call. - in.readToken() - - // Invoke the parser. - in.parseFile() - if len(in.parseErrors) > 0 { - return nil, in.parseErrors - } - in.file.Name = in.filename - - // Assign comments to nearby syntax. - in.assignComments() - - return in.file, nil -} - -// Error is called to report an error. -// Error does not return: it panics. -func (in *input) Error(s string) { - in.parseErrors = append(in.parseErrors, Error{ - Filename: in.filename, - Pos: in.pos, - Err: errors.New(s), - }) - panic(&in.parseErrors) -} - -// eof reports whether the input has reached end of file. -func (in *input) eof() bool { - return len(in.remaining) == 0 -} - -// peekRune returns the next rune in the input without consuming it. -func (in *input) peekRune() int { - if len(in.remaining) == 0 { - return 0 - } - r, _ := utf8.DecodeRune(in.remaining) - return int(r) -} - -// peekPrefix reports whether the remaining input begins with the given prefix. -func (in *input) peekPrefix(prefix string) bool { - // This is like bytes.HasPrefix(in.remaining, []byte(prefix)) - // but without the allocation of the []byte copy of prefix. - for i := 0; i < len(prefix); i++ { - if i >= len(in.remaining) || in.remaining[i] != prefix[i] { - return false - } - } - return true -} - -// readRune consumes and returns the next rune in the input. -func (in *input) readRune() int { - if len(in.remaining) == 0 { - in.Error("internal lexer error: readRune at EOF") - } - r, size := utf8.DecodeRune(in.remaining) - in.remaining = in.remaining[size:] - if r == '\n' { - in.pos.Line++ - in.pos.LineRune = 1 - } else { - in.pos.LineRune++ - } - in.pos.Byte += size - return int(r) -} - -type token struct { - kind tokenKind - pos Position - endPos Position - text string -} - -type tokenKind int - -const ( - _EOF tokenKind = -(iota + 1) - _EOLCOMMENT - _IDENT - _STRING - _COMMENT - - // newlines and punctuation tokens are allowed as ASCII codes. -) - -func (k tokenKind) isComment() bool { - return k == _COMMENT || k == _EOLCOMMENT -} - -// isEOL returns whether a token terminates a line. -func (k tokenKind) isEOL() bool { - return k == _EOF || k == _EOLCOMMENT || k == '\n' -} - -// startToken marks the beginning of the next input token. -// It must be followed by a call to endToken, once the token's text has -// been consumed using readRune. -func (in *input) startToken() { - in.tokenStart = in.remaining - in.token.text = "" - in.token.pos = in.pos -} - -// endToken marks the end of an input token. -// It records the actual token string in tok.text. -// A single trailing newline (LF or CRLF) will be removed from comment tokens. -func (in *input) endToken(kind tokenKind) { - in.token.kind = kind - text := string(in.tokenStart[:len(in.tokenStart)-len(in.remaining)]) - if kind.isComment() { - if strings.HasSuffix(text, "\r\n") { - text = text[:len(text)-2] - } else { - text = strings.TrimSuffix(text, "\n") - } - } - in.token.text = text - in.token.endPos = in.pos -} - -// peek returns the kind of the next token returned by lex. -func (in *input) peek() tokenKind { - return in.token.kind -} - -// lex is called from the parser to obtain the next input token. -func (in *input) lex() token { - tok := in.token - in.readToken() - return tok -} - -// readToken lexes the next token from the text and stores it in in.token. -func (in *input) readToken() { - // Skip past spaces, stopping at non-space or EOF. - for !in.eof() { - c := in.peekRune() - if c == ' ' || c == '\t' || c == '\r' { - in.readRune() - continue - } - - // Comment runs to end of line. - if in.peekPrefix("//") { - in.startToken() - - // Is this comment the only thing on its line? - // Find the last \n before this // and see if it's all - // spaces from there to here. - i := bytes.LastIndex(in.complete[:in.pos.Byte], []byte("\n")) - suffix := len(bytes.TrimSpace(in.complete[i+1:in.pos.Byte])) > 0 - in.readRune() - in.readRune() - - // Consume comment. - for len(in.remaining) > 0 && in.readRune() != '\n' { - } - - // If we are at top level (not in a statement), hand the comment to - // the parser as a _COMMENT token. The grammar is written - // to handle top-level comments itself. - if !suffix { - in.endToken(_COMMENT) - return - } - - // Otherwise, save comment for later attachment to syntax tree. - in.endToken(_EOLCOMMENT) - in.comments = append(in.comments, Comment{in.token.pos, in.token.text, suffix}) - return - } - - if in.peekPrefix("/*") { - in.Error("mod files must use // comments (not /* */ comments)") - } - - // Found non-space non-comment. - break - } - - // Found the beginning of the next token. - in.startToken() - - // End of file. - if in.eof() { - in.endToken(_EOF) - return - } - - // Punctuation tokens. - switch c := in.peekRune(); c { - case '\n', '(', ')', '[', ']', '{', '}', ',': - in.readRune() - in.endToken(tokenKind(c)) - return - - case '"', '`': // quoted string - quote := c - in.readRune() - for { - if in.eof() { - in.pos = in.token.pos - in.Error("unexpected EOF in string") - } - if in.peekRune() == '\n' { - in.Error("unexpected newline in string") - } - c := in.readRune() - if c == quote { - break - } - if c == '\\' && quote != '`' { - if in.eof() { - in.pos = in.token.pos - in.Error("unexpected EOF in string") - } - in.readRune() - } - } - in.endToken(_STRING) - return - } - - // Checked all punctuation. Must be identifier token. - if c := in.peekRune(); !isIdent(c) { - in.Error(fmt.Sprintf("unexpected input character %#q", c)) - } - - // Scan over identifier. - for isIdent(in.peekRune()) { - if in.peekPrefix("//") { - break - } - if in.peekPrefix("/*") { - in.Error("mod files must use // comments (not /* */ comments)") - } - in.readRune() - } - in.endToken(_IDENT) -} - -// isIdent reports whether c is an identifier rune. -// We treat most printable runes as identifier runes, except for a handful of -// ASCII punctuation characters. -func isIdent(c int) bool { - switch r := rune(c); r { - case ' ', '(', ')', '[', ']', '{', '}', ',': - return false - default: - return !unicode.IsSpace(r) && unicode.IsPrint(r) - } -} - -// Comment assignment. -// We build two lists of all subexpressions, preorder and postorder. -// The preorder list is ordered by start location, with outer expressions first. -// The postorder list is ordered by end location, with outer expressions last. -// We use the preorder list to assign each whole-line comment to the syntax -// immediately following it, and we use the postorder list to assign each -// end-of-line comment to the syntax immediately preceding it. - -// order walks the expression adding it and its subexpressions to the -// preorder and postorder lists. -func (in *input) order(x Expr) { - if x != nil { - in.pre = append(in.pre, x) - } - switch x := x.(type) { - default: - panic(fmt.Errorf("order: unexpected type %T", x)) - case nil: - // nothing - case *LParen, *RParen: - // nothing - case *CommentBlock: - // nothing - case *Line: - // nothing - case *FileSyntax: - for _, stmt := range x.Stmt { - in.order(stmt) - } - case *LineBlock: - in.order(&x.LParen) - for _, l := range x.Line { - in.order(l) - } - in.order(&x.RParen) - } - if x != nil { - in.post = append(in.post, x) - } -} - -// assignComments attaches comments to nearby syntax. -func (in *input) assignComments() { - const debug = false - - // Generate preorder and postorder lists. - in.order(in.file) - - // Split into whole-line comments and suffix comments. - var line, suffix []Comment - for _, com := range in.comments { - if com.Suffix { - suffix = append(suffix, com) - } else { - line = append(line, com) - } - } - - if debug { - for _, c := range line { - fmt.Fprintf(os.Stderr, "LINE %q :%d:%d #%d\n", c.Token, c.Start.Line, c.Start.LineRune, c.Start.Byte) - } - } - - // Assign line comments to syntax immediately following. - for _, x := range in.pre { - start, _ := x.Span() - if debug { - fmt.Fprintf(os.Stderr, "pre %T :%d:%d #%d\n", x, start.Line, start.LineRune, start.Byte) - } - xcom := x.Comment() - for len(line) > 0 && start.Byte >= line[0].Start.Byte { - if debug { - fmt.Fprintf(os.Stderr, "ASSIGN LINE %q #%d\n", line[0].Token, line[0].Start.Byte) - } - xcom.Before = append(xcom.Before, line[0]) - line = line[1:] - } - } - - // Remaining line comments go at end of file. - in.file.After = append(in.file.After, line...) - - if debug { - for _, c := range suffix { - fmt.Fprintf(os.Stderr, "SUFFIX %q :%d:%d #%d\n", c.Token, c.Start.Line, c.Start.LineRune, c.Start.Byte) - } - } - - // Assign suffix comments to syntax immediately before. - for i := len(in.post) - 1; i >= 0; i-- { - x := in.post[i] - - start, end := x.Span() - if debug { - fmt.Fprintf(os.Stderr, "post %T :%d:%d #%d :%d:%d #%d\n", x, start.Line, start.LineRune, start.Byte, end.Line, end.LineRune, end.Byte) - } - - // Do not assign suffix comments to end of line block or whole file. - // Instead assign them to the last element inside. - switch x.(type) { - case *FileSyntax: - continue - } - - // Do not assign suffix comments to something that starts - // on an earlier line, so that in - // - // x ( y - // z ) // comment - // - // we assign the comment to z and not to x ( ... ). - if start.Line != end.Line { - continue - } - xcom := x.Comment() - for len(suffix) > 0 && end.Byte <= suffix[len(suffix)-1].Start.Byte { - if debug { - fmt.Fprintf(os.Stderr, "ASSIGN SUFFIX %q #%d\n", suffix[len(suffix)-1].Token, suffix[len(suffix)-1].Start.Byte) - } - xcom.Suffix = append(xcom.Suffix, suffix[len(suffix)-1]) - suffix = suffix[:len(suffix)-1] - } - } - - // We assigned suffix comments in reverse. - // If multiple suffix comments were appended to the same - // expression node, they are now in reverse. Fix that. - for _, x := range in.post { - reverseComments(x.Comment().Suffix) - } - - // Remaining suffix comments go at beginning of file. - in.file.Before = append(in.file.Before, suffix...) -} - -// reverseComments reverses the []Comment list. -func reverseComments(list []Comment) { - for i, j := 0, len(list)-1; i < j; i, j = i+1, j-1 { - list[i], list[j] = list[j], list[i] - } -} - -func (in *input) parseFile() { - in.file = new(FileSyntax) - var cb *CommentBlock - for { - switch in.peek() { - case '\n': - in.lex() - if cb != nil { - in.file.Stmt = append(in.file.Stmt, cb) - cb = nil - } - case _COMMENT: - tok := in.lex() - if cb == nil { - cb = &CommentBlock{Start: tok.pos} - } - com := cb.Comment() - com.Before = append(com.Before, Comment{Start: tok.pos, Token: tok.text}) - case _EOF: - if cb != nil { - in.file.Stmt = append(in.file.Stmt, cb) - } - return - default: - in.parseStmt() - if cb != nil { - in.file.Stmt[len(in.file.Stmt)-1].Comment().Before = cb.Before - cb = nil - } - } - } -} - -func (in *input) parseStmt() { - tok := in.lex() - start := tok.pos - end := tok.endPos - tokens := []string{tok.text} - for { - tok := in.lex() - switch { - case tok.kind.isEOL(): - in.file.Stmt = append(in.file.Stmt, &Line{ - Start: start, - Token: tokens, - End: end, - }) - return - - case tok.kind == '(': - if next := in.peek(); next.isEOL() { - // Start of block: no more tokens on this line. - in.file.Stmt = append(in.file.Stmt, in.parseLineBlock(start, tokens, tok)) - return - } else if next == ')' { - rparen := in.lex() - if in.peek().isEOL() { - // Empty block. - in.lex() - in.file.Stmt = append(in.file.Stmt, &LineBlock{ - Start: start, - Token: tokens, - LParen: LParen{Pos: tok.pos}, - RParen: RParen{Pos: rparen.pos}, - }) - return - } - // '( )' in the middle of the line, not a block. - tokens = append(tokens, tok.text, rparen.text) - } else { - // '(' in the middle of the line, not a block. - tokens = append(tokens, tok.text) - } - - default: - tokens = append(tokens, tok.text) - end = tok.endPos - } - } -} - -func (in *input) parseLineBlock(start Position, token []string, lparen token) *LineBlock { - x := &LineBlock{ - Start: start, - Token: token, - LParen: LParen{Pos: lparen.pos}, - } - var comments []Comment - for { - switch in.peek() { - case _EOLCOMMENT: - // Suffix comment, will be attached later by assignComments. - in.lex() - case '\n': - // Blank line. Add an empty comment to preserve it. - in.lex() - if len(comments) == 0 && len(x.Line) > 0 || len(comments) > 0 && comments[len(comments)-1].Token != "" { - comments = append(comments, Comment{}) - } - case _COMMENT: - tok := in.lex() - comments = append(comments, Comment{Start: tok.pos, Token: tok.text}) - case _EOF: - in.Error(fmt.Sprintf("syntax error (unterminated block started at %s:%d:%d)", in.filename, x.Start.Line, x.Start.LineRune)) - case ')': - rparen := in.lex() - x.RParen.Before = comments - x.RParen.Pos = rparen.pos - if !in.peek().isEOL() { - in.Error("syntax error (expected newline after closing paren)") - } - in.lex() - return x - default: - l := in.parseLine() - x.Line = append(x.Line, l) - l.Comment().Before = comments - comments = nil - } - } -} - -func (in *input) parseLine() *Line { - tok := in.lex() - if tok.kind.isEOL() { - in.Error("internal parse error: parseLine at end of line") - } - start := tok.pos - end := tok.endPos - tokens := []string{tok.text} - for { - tok := in.lex() - if tok.kind.isEOL() { - return &Line{ - Start: start, - Token: tokens, - End: end, - InBlock: true, - } - } - tokens = append(tokens, tok.text) - end = tok.endPos - } -} - -var ( - slashSlash = []byte("//") - moduleStr = []byte("module") -) - -// ModulePath returns the module path from the gomod file text. -// If it cannot find a module path, it returns an empty string. -// It is tolerant of unrelated problems in the go.mod file. -func ModulePath(mod []byte) string { - for len(mod) > 0 { - line := mod - mod = nil - if i := bytes.IndexByte(line, '\n'); i >= 0 { - line, mod = line[:i], line[i+1:] - } - if i := bytes.Index(line, slashSlash); i >= 0 { - line = line[:i] - } - line = bytes.TrimSpace(line) - if !bytes.HasPrefix(line, moduleStr) { - continue - } - line = line[len(moduleStr):] - n := len(line) - line = bytes.TrimSpace(line) - if len(line) == n || len(line) == 0 { - continue - } - - if line[0] == '"' || line[0] == '`' { - p, err := strconv.Unquote(string(line)) - if err != nil { - return "" // malformed quoted string or multiline module path - } - return p - } - - return string(line) - } - return "" // missing module path -} diff --git a/go/extractor/vendor/golang.org/x/mod/modfile/rule.go b/go/extractor/vendor/golang.org/x/mod/modfile/rule.go deleted file mode 100644 index 35fd1f534cf8..000000000000 --- a/go/extractor/vendor/golang.org/x/mod/modfile/rule.go +++ /dev/null @@ -1,1663 +0,0 @@ -// Copyright 2018 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Package modfile implements a parser and formatter for go.mod files. -// -// The go.mod syntax is described in -// https://pkg.go.dev/cmd/go/#hdr-The_go_mod_file. -// -// The [Parse] and [ParseLax] functions both parse a go.mod file and return an -// abstract syntax tree. ParseLax ignores unknown statements and may be used to -// parse go.mod files that may have been developed with newer versions of Go. -// -// The [File] struct returned by Parse and ParseLax represent an abstract -// go.mod file. File has several methods like [File.AddNewRequire] and -// [File.DropReplace] that can be used to programmatically edit a file. -// -// The [Format] function formats a File back to a byte slice which can be -// written to a file. -package modfile - -import ( - "errors" - "fmt" - "path/filepath" - "sort" - "strconv" - "strings" - "unicode" - - "golang.org/x/mod/internal/lazyregexp" - "golang.org/x/mod/module" - "golang.org/x/mod/semver" -) - -// A File is the parsed, interpreted form of a go.mod file. -type File struct { - Module *Module - Go *Go - Toolchain *Toolchain - Require []*Require - Exclude []*Exclude - Replace []*Replace - Retract []*Retract - - Syntax *FileSyntax -} - -// A Module is the module statement. -type Module struct { - Mod module.Version - Deprecated string - Syntax *Line -} - -// A Go is the go statement. -type Go struct { - Version string // "1.23" - Syntax *Line -} - -// A Toolchain is the toolchain statement. -type Toolchain struct { - Name string // "go1.21rc1" - Syntax *Line -} - -// An Exclude is a single exclude statement. -type Exclude struct { - Mod module.Version - Syntax *Line -} - -// A Replace is a single replace statement. -type Replace struct { - Old module.Version - New module.Version - Syntax *Line -} - -// A Retract is a single retract statement. -type Retract struct { - VersionInterval - Rationale string - Syntax *Line -} - -// A VersionInterval represents a range of versions with upper and lower bounds. -// Intervals are closed: both bounds are included. When Low is equal to High, -// the interval may refer to a single version ('v1.2.3') or an interval -// ('[v1.2.3, v1.2.3]'); both have the same representation. -type VersionInterval struct { - Low, High string -} - -// A Require is a single require statement. -type Require struct { - Mod module.Version - Indirect bool // has "// indirect" comment - Syntax *Line -} - -func (r *Require) markRemoved() { - r.Syntax.markRemoved() - *r = Require{} -} - -func (r *Require) setVersion(v string) { - r.Mod.Version = v - - if line := r.Syntax; len(line.Token) > 0 { - if line.InBlock { - // If the line is preceded by an empty line, remove it; see - // https://golang.org/issue/33779. - if len(line.Comments.Before) == 1 && len(line.Comments.Before[0].Token) == 0 { - line.Comments.Before = line.Comments.Before[:0] - } - if len(line.Token) >= 2 { // example.com v1.2.3 - line.Token[1] = v - } - } else { - if len(line.Token) >= 3 { // require example.com v1.2.3 - line.Token[2] = v - } - } - } -} - -// setIndirect sets line to have (or not have) a "// indirect" comment. -func (r *Require) setIndirect(indirect bool) { - r.Indirect = indirect - line := r.Syntax - if isIndirect(line) == indirect { - return - } - if indirect { - // Adding comment. - if len(line.Suffix) == 0 { - // New comment. - line.Suffix = []Comment{{Token: "// indirect", Suffix: true}} - return - } - - com := &line.Suffix[0] - text := strings.TrimSpace(strings.TrimPrefix(com.Token, string(slashSlash))) - if text == "" { - // Empty comment. - com.Token = "// indirect" - return - } - - // Insert at beginning of existing comment. - com.Token = "// indirect; " + text - return - } - - // Removing comment. - f := strings.TrimSpace(strings.TrimPrefix(line.Suffix[0].Token, string(slashSlash))) - if f == "indirect" { - // Remove whole comment. - line.Suffix = nil - return - } - - // Remove comment prefix. - com := &line.Suffix[0] - i := strings.Index(com.Token, "indirect;") - com.Token = "//" + com.Token[i+len("indirect;"):] -} - -// isIndirect reports whether line has a "// indirect" comment, -// meaning it is in go.mod only for its effect on indirect dependencies, -// so that it can be dropped entirely once the effective version of the -// indirect dependency reaches the given minimum version. -func isIndirect(line *Line) bool { - if len(line.Suffix) == 0 { - return false - } - f := strings.Fields(strings.TrimPrefix(line.Suffix[0].Token, string(slashSlash))) - return (len(f) == 1 && f[0] == "indirect" || len(f) > 1 && f[0] == "indirect;") -} - -func (f *File) AddModuleStmt(path string) error { - if f.Syntax == nil { - f.Syntax = new(FileSyntax) - } - if f.Module == nil { - f.Module = &Module{ - Mod: module.Version{Path: path}, - Syntax: f.Syntax.addLine(nil, "module", AutoQuote(path)), - } - } else { - f.Module.Mod.Path = path - f.Syntax.updateLine(f.Module.Syntax, "module", AutoQuote(path)) - } - return nil -} - -func (f *File) AddComment(text string) { - if f.Syntax == nil { - f.Syntax = new(FileSyntax) - } - f.Syntax.Stmt = append(f.Syntax.Stmt, &CommentBlock{ - Comments: Comments{ - Before: []Comment{ - { - Token: text, - }, - }, - }, - }) -} - -type VersionFixer func(path, version string) (string, error) - -// errDontFix is returned by a VersionFixer to indicate the version should be -// left alone, even if it's not canonical. -var dontFixRetract VersionFixer = func(_, vers string) (string, error) { - return vers, nil -} - -// Parse parses and returns a go.mod file. -// -// file is the name of the file, used in positions and errors. -// -// data is the content of the file. -// -// fix is an optional function that canonicalizes module versions. -// If fix is nil, all module versions must be canonical ([module.CanonicalVersion] -// must return the same string). -func Parse(file string, data []byte, fix VersionFixer) (*File, error) { - return parseToFile(file, data, fix, true) -} - -// ParseLax is like Parse but ignores unknown statements. -// It is used when parsing go.mod files other than the main module, -// under the theory that most statement types we add in the future will -// only apply in the main module, like exclude and replace, -// and so we get better gradual deployments if old go commands -// simply ignore those statements when found in go.mod files -// in dependencies. -func ParseLax(file string, data []byte, fix VersionFixer) (*File, error) { - return parseToFile(file, data, fix, false) -} - -func parseToFile(file string, data []byte, fix VersionFixer, strict bool) (parsed *File, err error) { - fs, err := parse(file, data) - if err != nil { - return nil, err - } - f := &File{ - Syntax: fs, - } - var errs ErrorList - - // fix versions in retract directives after the file is parsed. - // We need the module path to fix versions, and it might be at the end. - defer func() { - oldLen := len(errs) - f.fixRetract(fix, &errs) - if len(errs) > oldLen { - parsed, err = nil, errs - } - }() - - for _, x := range fs.Stmt { - switch x := x.(type) { - case *Line: - f.add(&errs, nil, x, x.Token[0], x.Token[1:], fix, strict) - - case *LineBlock: - if len(x.Token) > 1 { - if strict { - errs = append(errs, Error{ - Filename: file, - Pos: x.Start, - Err: fmt.Errorf("unknown block type: %s", strings.Join(x.Token, " ")), - }) - } - continue - } - switch x.Token[0] { - default: - if strict { - errs = append(errs, Error{ - Filename: file, - Pos: x.Start, - Err: fmt.Errorf("unknown block type: %s", strings.Join(x.Token, " ")), - }) - } - continue - case "module", "require", "exclude", "replace", "retract": - for _, l := range x.Line { - f.add(&errs, x, l, x.Token[0], l.Token, fix, strict) - } - } - } - } - - if len(errs) > 0 { - return nil, errs - } - return f, nil -} - -var GoVersionRE = lazyregexp.New(`^([1-9][0-9]*)\.(0|[1-9][0-9]*)(\.(0|[1-9][0-9]*))?([a-z]+[0-9]+)?$`) -var laxGoVersionRE = lazyregexp.New(`^v?(([1-9][0-9]*)\.(0|[1-9][0-9]*))([^0-9].*)$`) - -// Toolchains must be named beginning with `go1`, -// like "go1.20.3" or "go1.20.3-gccgo". As a special case, "default" is also permitted. -var ToolchainRE = lazyregexp.New(`^default$|^go1($|\.)`) - -func (f *File) add(errs *ErrorList, block *LineBlock, line *Line, verb string, args []string, fix VersionFixer, strict bool) { - // If strict is false, this module is a dependency. - // We ignore all unknown directives as well as main-module-only - // directives like replace and exclude. It will work better for - // forward compatibility if we can depend on modules that have unknown - // statements (presumed relevant only when acting as the main module) - // and simply ignore those statements. - if !strict { - switch verb { - case "go", "module", "retract", "require": - // want these even for dependency go.mods - default: - return - } - } - - wrapModPathError := func(modPath string, err error) { - *errs = append(*errs, Error{ - Filename: f.Syntax.Name, - Pos: line.Start, - ModPath: modPath, - Verb: verb, - Err: err, - }) - } - wrapError := func(err error) { - *errs = append(*errs, Error{ - Filename: f.Syntax.Name, - Pos: line.Start, - Err: err, - }) - } - errorf := func(format string, args ...interface{}) { - wrapError(fmt.Errorf(format, args...)) - } - - switch verb { - default: - errorf("unknown directive: %s", verb) - - case "go": - if f.Go != nil { - errorf("repeated go statement") - return - } - if len(args) != 1 { - errorf("go directive expects exactly one argument") - return - } else if !GoVersionRE.MatchString(args[0]) { - fixed := false - if !strict { - if m := laxGoVersionRE.FindStringSubmatch(args[0]); m != nil { - args[0] = m[1] - fixed = true - } - } - if !fixed { - errorf("invalid go version '%s': must match format 1.23.0", args[0]) - return - } - } - - f.Go = &Go{Syntax: line} - f.Go.Version = args[0] - - case "toolchain": - if f.Toolchain != nil { - errorf("repeated toolchain statement") - return - } - if len(args) != 1 { - errorf("toolchain directive expects exactly one argument") - return - } else if strict && !ToolchainRE.MatchString(args[0]) { - errorf("invalid toolchain version '%s': must match format go1.23.0 or local", args[0]) - return - } - f.Toolchain = &Toolchain{Syntax: line} - f.Toolchain.Name = args[0] - - case "module": - if f.Module != nil { - errorf("repeated module statement") - return - } - deprecated := parseDeprecation(block, line) - f.Module = &Module{ - Syntax: line, - Deprecated: deprecated, - } - if len(args) != 1 { - errorf("usage: module module/path") - return - } - s, err := parseString(&args[0]) - if err != nil { - errorf("invalid quoted string: %v", err) - return - } - f.Module.Mod = module.Version{Path: s} - - case "require", "exclude": - if len(args) != 2 { - errorf("usage: %s module/path v1.2.3", verb) - return - } - s, err := parseString(&args[0]) - if err != nil { - errorf("invalid quoted string: %v", err) - return - } - v, err := parseVersion(verb, s, &args[1], fix) - if err != nil { - wrapError(err) - return - } - pathMajor, err := modulePathMajor(s) - if err != nil { - wrapError(err) - return - } - if err := module.CheckPathMajor(v, pathMajor); err != nil { - wrapModPathError(s, err) - return - } - if verb == "require" { - f.Require = append(f.Require, &Require{ - Mod: module.Version{Path: s, Version: v}, - Syntax: line, - Indirect: isIndirect(line), - }) - } else { - f.Exclude = append(f.Exclude, &Exclude{ - Mod: module.Version{Path: s, Version: v}, - Syntax: line, - }) - } - - case "replace": - replace, wrappederr := parseReplace(f.Syntax.Name, line, verb, args, fix) - if wrappederr != nil { - *errs = append(*errs, *wrappederr) - return - } - f.Replace = append(f.Replace, replace) - - case "retract": - rationale := parseDirectiveComment(block, line) - vi, err := parseVersionInterval(verb, "", &args, dontFixRetract) - if err != nil { - if strict { - wrapError(err) - return - } else { - // Only report errors parsing intervals in the main module. We may - // support additional syntax in the future, such as open and half-open - // intervals. Those can't be supported now, because they break the - // go.mod parser, even in lax mode. - return - } - } - if len(args) > 0 && strict { - // In the future, there may be additional information after the version. - errorf("unexpected token after version: %q", args[0]) - return - } - retract := &Retract{ - VersionInterval: vi, - Rationale: rationale, - Syntax: line, - } - f.Retract = append(f.Retract, retract) - } -} - -func parseReplace(filename string, line *Line, verb string, args []string, fix VersionFixer) (*Replace, *Error) { - wrapModPathError := func(modPath string, err error) *Error { - return &Error{ - Filename: filename, - Pos: line.Start, - ModPath: modPath, - Verb: verb, - Err: err, - } - } - wrapError := func(err error) *Error { - return &Error{ - Filename: filename, - Pos: line.Start, - Err: err, - } - } - errorf := func(format string, args ...interface{}) *Error { - return wrapError(fmt.Errorf(format, args...)) - } - - arrow := 2 - if len(args) >= 2 && args[1] == "=>" { - arrow = 1 - } - if len(args) < arrow+2 || len(args) > arrow+3 || args[arrow] != "=>" { - return nil, errorf("usage: %s module/path [v1.2.3] => other/module v1.4\n\t or %s module/path [v1.2.3] => ../local/directory", verb, verb) - } - s, err := parseString(&args[0]) - if err != nil { - return nil, errorf("invalid quoted string: %v", err) - } - pathMajor, err := modulePathMajor(s) - if err != nil { - return nil, wrapModPathError(s, err) - - } - var v string - if arrow == 2 { - v, err = parseVersion(verb, s, &args[1], fix) - if err != nil { - return nil, wrapError(err) - } - if err := module.CheckPathMajor(v, pathMajor); err != nil { - return nil, wrapModPathError(s, err) - } - } - ns, err := parseString(&args[arrow+1]) - if err != nil { - return nil, errorf("invalid quoted string: %v", err) - } - nv := "" - if len(args) == arrow+2 { - if !IsDirectoryPath(ns) { - if strings.Contains(ns, "@") { - return nil, errorf("replacement module must match format 'path version', not 'path@version'") - } - return nil, errorf("replacement module without version must be directory path (rooted or starting with . or ..)") - } - if filepath.Separator == '/' && strings.Contains(ns, `\`) { - return nil, errorf("replacement directory appears to be Windows path (on a non-windows system)") - } - } - if len(args) == arrow+3 { - nv, err = parseVersion(verb, ns, &args[arrow+2], fix) - if err != nil { - return nil, wrapError(err) - } - if IsDirectoryPath(ns) { - return nil, errorf("replacement module directory path %q cannot have version", ns) - } - } - return &Replace{ - Old: module.Version{Path: s, Version: v}, - New: module.Version{Path: ns, Version: nv}, - Syntax: line, - }, nil -} - -// fixRetract applies fix to each retract directive in f, appending any errors -// to errs. -// -// Most versions are fixed as we parse the file, but for retract directives, -// the relevant module path is the one specified with the module directive, -// and that might appear at the end of the file (or not at all). -func (f *File) fixRetract(fix VersionFixer, errs *ErrorList) { - if fix == nil { - return - } - path := "" - if f.Module != nil { - path = f.Module.Mod.Path - } - var r *Retract - wrapError := func(err error) { - *errs = append(*errs, Error{ - Filename: f.Syntax.Name, - Pos: r.Syntax.Start, - Err: err, - }) - } - - for _, r = range f.Retract { - if path == "" { - wrapError(errors.New("no module directive found, so retract cannot be used")) - return // only print the first one of these - } - - args := r.Syntax.Token - if args[0] == "retract" { - args = args[1:] - } - vi, err := parseVersionInterval("retract", path, &args, fix) - if err != nil { - wrapError(err) - } - r.VersionInterval = vi - } -} - -func (f *WorkFile) add(errs *ErrorList, line *Line, verb string, args []string, fix VersionFixer) { - wrapError := func(err error) { - *errs = append(*errs, Error{ - Filename: f.Syntax.Name, - Pos: line.Start, - Err: err, - }) - } - errorf := func(format string, args ...interface{}) { - wrapError(fmt.Errorf(format, args...)) - } - - switch verb { - default: - errorf("unknown directive: %s", verb) - - case "go": - if f.Go != nil { - errorf("repeated go statement") - return - } - if len(args) != 1 { - errorf("go directive expects exactly one argument") - return - } else if !GoVersionRE.MatchString(args[0]) { - errorf("invalid go version '%s': must match format 1.23", args[0]) - return - } - - f.Go = &Go{Syntax: line} - f.Go.Version = args[0] - - case "toolchain": - if f.Toolchain != nil { - errorf("repeated toolchain statement") - return - } - if len(args) != 1 { - errorf("toolchain directive expects exactly one argument") - return - } else if !ToolchainRE.MatchString(args[0]) { - errorf("invalid toolchain version '%s': must match format go1.23 or local", args[0]) - return - } - - f.Toolchain = &Toolchain{Syntax: line} - f.Toolchain.Name = args[0] - - case "use": - if len(args) != 1 { - errorf("usage: %s local/dir", verb) - return - } - s, err := parseString(&args[0]) - if err != nil { - errorf("invalid quoted string: %v", err) - return - } - f.Use = append(f.Use, &Use{ - Path: s, - Syntax: line, - }) - - case "replace": - replace, wrappederr := parseReplace(f.Syntax.Name, line, verb, args, fix) - if wrappederr != nil { - *errs = append(*errs, *wrappederr) - return - } - f.Replace = append(f.Replace, replace) - } -} - -// IsDirectoryPath reports whether the given path should be interpreted as a directory path. -// Just like on the go command line, relative paths starting with a '.' or '..' path component -// and rooted paths are directory paths; the rest are module paths. -func IsDirectoryPath(ns string) bool { - // Because go.mod files can move from one system to another, - // we check all known path syntaxes, both Unix and Windows. - return ns == "." || strings.HasPrefix(ns, "./") || strings.HasPrefix(ns, `.\`) || - ns == ".." || strings.HasPrefix(ns, "../") || strings.HasPrefix(ns, `..\`) || - strings.HasPrefix(ns, "/") || strings.HasPrefix(ns, `\`) || - len(ns) >= 2 && ('A' <= ns[0] && ns[0] <= 'Z' || 'a' <= ns[0] && ns[0] <= 'z') && ns[1] == ':' -} - -// MustQuote reports whether s must be quoted in order to appear as -// a single token in a go.mod line. -func MustQuote(s string) bool { - for _, r := range s { - switch r { - case ' ', '"', '\'', '`': - return true - - case '(', ')', '[', ']', '{', '}', ',': - if len(s) > 1 { - return true - } - - default: - if !unicode.IsPrint(r) { - return true - } - } - } - return s == "" || strings.Contains(s, "//") || strings.Contains(s, "/*") -} - -// AutoQuote returns s or, if quoting is required for s to appear in a go.mod, -// the quotation of s. -func AutoQuote(s string) string { - if MustQuote(s) { - return strconv.Quote(s) - } - return s -} - -func parseVersionInterval(verb string, path string, args *[]string, fix VersionFixer) (VersionInterval, error) { - toks := *args - if len(toks) == 0 || toks[0] == "(" { - return VersionInterval{}, fmt.Errorf("expected '[' or version") - } - if toks[0] != "[" { - v, err := parseVersion(verb, path, &toks[0], fix) - if err != nil { - return VersionInterval{}, err - } - *args = toks[1:] - return VersionInterval{Low: v, High: v}, nil - } - toks = toks[1:] - - if len(toks) == 0 { - return VersionInterval{}, fmt.Errorf("expected version after '['") - } - low, err := parseVersion(verb, path, &toks[0], fix) - if err != nil { - return VersionInterval{}, err - } - toks = toks[1:] - - if len(toks) == 0 || toks[0] != "," { - return VersionInterval{}, fmt.Errorf("expected ',' after version") - } - toks = toks[1:] - - if len(toks) == 0 { - return VersionInterval{}, fmt.Errorf("expected version after ','") - } - high, err := parseVersion(verb, path, &toks[0], fix) - if err != nil { - return VersionInterval{}, err - } - toks = toks[1:] - - if len(toks) == 0 || toks[0] != "]" { - return VersionInterval{}, fmt.Errorf("expected ']' after version") - } - toks = toks[1:] - - *args = toks - return VersionInterval{Low: low, High: high}, nil -} - -func parseString(s *string) (string, error) { - t := *s - if strings.HasPrefix(t, `"`) { - var err error - if t, err = strconv.Unquote(t); err != nil { - return "", err - } - } else if strings.ContainsAny(t, "\"'`") { - // Other quotes are reserved both for possible future expansion - // and to avoid confusion. For example if someone types 'x' - // we want that to be a syntax error and not a literal x in literal quotation marks. - return "", fmt.Errorf("unquoted string cannot contain quote") - } - *s = AutoQuote(t) - return t, nil -} - -var deprecatedRE = lazyregexp.New(`(?s)(?:^|\n\n)Deprecated: *(.*?)(?:$|\n\n)`) - -// parseDeprecation extracts the text of comments on a "module" directive and -// extracts a deprecation message from that. -// -// A deprecation message is contained in a paragraph within a block of comments -// that starts with "Deprecated:" (case sensitive). The message runs until the -// end of the paragraph and does not include the "Deprecated:" prefix. If the -// comment block has multiple paragraphs that start with "Deprecated:", -// parseDeprecation returns the message from the first. -func parseDeprecation(block *LineBlock, line *Line) string { - text := parseDirectiveComment(block, line) - m := deprecatedRE.FindStringSubmatch(text) - if m == nil { - return "" - } - return m[1] -} - -// parseDirectiveComment extracts the text of comments on a directive. -// If the directive's line does not have comments and is part of a block that -// does have comments, the block's comments are used. -func parseDirectiveComment(block *LineBlock, line *Line) string { - comments := line.Comment() - if block != nil && len(comments.Before) == 0 && len(comments.Suffix) == 0 { - comments = block.Comment() - } - groups := [][]Comment{comments.Before, comments.Suffix} - var lines []string - for _, g := range groups { - for _, c := range g { - if !strings.HasPrefix(c.Token, "//") { - continue // blank line - } - lines = append(lines, strings.TrimSpace(strings.TrimPrefix(c.Token, "//"))) - } - } - return strings.Join(lines, "\n") -} - -type ErrorList []Error - -func (e ErrorList) Error() string { - errStrs := make([]string, len(e)) - for i, err := range e { - errStrs[i] = err.Error() - } - return strings.Join(errStrs, "\n") -} - -type Error struct { - Filename string - Pos Position - Verb string - ModPath string - Err error -} - -func (e *Error) Error() string { - var pos string - if e.Pos.LineRune > 1 { - // Don't print LineRune if it's 1 (beginning of line). - // It's always 1 except in scanner errors, which are rare. - pos = fmt.Sprintf("%s:%d:%d: ", e.Filename, e.Pos.Line, e.Pos.LineRune) - } else if e.Pos.Line > 0 { - pos = fmt.Sprintf("%s:%d: ", e.Filename, e.Pos.Line) - } else if e.Filename != "" { - pos = fmt.Sprintf("%s: ", e.Filename) - } - - var directive string - if e.ModPath != "" { - directive = fmt.Sprintf("%s %s: ", e.Verb, e.ModPath) - } else if e.Verb != "" { - directive = fmt.Sprintf("%s: ", e.Verb) - } - - return pos + directive + e.Err.Error() -} - -func (e *Error) Unwrap() error { return e.Err } - -func parseVersion(verb string, path string, s *string, fix VersionFixer) (string, error) { - t, err := parseString(s) - if err != nil { - return "", &Error{ - Verb: verb, - ModPath: path, - Err: &module.InvalidVersionError{ - Version: *s, - Err: err, - }, - } - } - if fix != nil { - fixed, err := fix(path, t) - if err != nil { - if err, ok := err.(*module.ModuleError); ok { - return "", &Error{ - Verb: verb, - ModPath: path, - Err: err.Err, - } - } - return "", err - } - t = fixed - } else { - cv := module.CanonicalVersion(t) - if cv == "" { - return "", &Error{ - Verb: verb, - ModPath: path, - Err: &module.InvalidVersionError{ - Version: t, - Err: errors.New("must be of the form v1.2.3"), - }, - } - } - t = cv - } - *s = t - return *s, nil -} - -func modulePathMajor(path string) (string, error) { - _, major, ok := module.SplitPathVersion(path) - if !ok { - return "", fmt.Errorf("invalid module path") - } - return major, nil -} - -func (f *File) Format() ([]byte, error) { - return Format(f.Syntax), nil -} - -// Cleanup cleans up the file f after any edit operations. -// To avoid quadratic behavior, modifications like [File.DropRequire] -// clear the entry but do not remove it from the slice. -// Cleanup cleans out all the cleared entries. -func (f *File) Cleanup() { - w := 0 - for _, r := range f.Require { - if r.Mod.Path != "" { - f.Require[w] = r - w++ - } - } - f.Require = f.Require[:w] - - w = 0 - for _, x := range f.Exclude { - if x.Mod.Path != "" { - f.Exclude[w] = x - w++ - } - } - f.Exclude = f.Exclude[:w] - - w = 0 - for _, r := range f.Replace { - if r.Old.Path != "" { - f.Replace[w] = r - w++ - } - } - f.Replace = f.Replace[:w] - - w = 0 - for _, r := range f.Retract { - if r.Low != "" || r.High != "" { - f.Retract[w] = r - w++ - } - } - f.Retract = f.Retract[:w] - - f.Syntax.Cleanup() -} - -func (f *File) AddGoStmt(version string) error { - if !GoVersionRE.MatchString(version) { - return fmt.Errorf("invalid language version %q", version) - } - if f.Go == nil { - var hint Expr - if f.Module != nil && f.Module.Syntax != nil { - hint = f.Module.Syntax - } - f.Go = &Go{ - Version: version, - Syntax: f.Syntax.addLine(hint, "go", version), - } - } else { - f.Go.Version = version - f.Syntax.updateLine(f.Go.Syntax, "go", version) - } - return nil -} - -// DropGoStmt deletes the go statement from the file. -func (f *File) DropGoStmt() { - if f.Go != nil { - f.Go.Syntax.markRemoved() - f.Go = nil - } -} - -// DropToolchainStmt deletes the toolchain statement from the file. -func (f *File) DropToolchainStmt() { - if f.Toolchain != nil { - f.Toolchain.Syntax.markRemoved() - f.Toolchain = nil - } -} - -func (f *File) AddToolchainStmt(name string) error { - if !ToolchainRE.MatchString(name) { - return fmt.Errorf("invalid toolchain name %q", name) - } - if f.Toolchain == nil { - var hint Expr - if f.Go != nil && f.Go.Syntax != nil { - hint = f.Go.Syntax - } else if f.Module != nil && f.Module.Syntax != nil { - hint = f.Module.Syntax - } - f.Toolchain = &Toolchain{ - Name: name, - Syntax: f.Syntax.addLine(hint, "toolchain", name), - } - } else { - f.Toolchain.Name = name - f.Syntax.updateLine(f.Toolchain.Syntax, "toolchain", name) - } - return nil -} - -// AddRequire sets the first require line for path to version vers, -// preserving any existing comments for that line and removing all -// other lines for path. -// -// If no line currently exists for path, AddRequire adds a new line -// at the end of the last require block. -func (f *File) AddRequire(path, vers string) error { - need := true - for _, r := range f.Require { - if r.Mod.Path == path { - if need { - r.Mod.Version = vers - f.Syntax.updateLine(r.Syntax, "require", AutoQuote(path), vers) - need = false - } else { - r.Syntax.markRemoved() - *r = Require{} - } - } - } - - if need { - f.AddNewRequire(path, vers, false) - } - return nil -} - -// AddNewRequire adds a new require line for path at version vers at the end of -// the last require block, regardless of any existing require lines for path. -func (f *File) AddNewRequire(path, vers string, indirect bool) { - line := f.Syntax.addLine(nil, "require", AutoQuote(path), vers) - r := &Require{ - Mod: module.Version{Path: path, Version: vers}, - Syntax: line, - } - r.setIndirect(indirect) - f.Require = append(f.Require, r) -} - -// SetRequire updates the requirements of f to contain exactly req, preserving -// the existing block structure and line comment contents (except for 'indirect' -// markings) for the first requirement on each named module path. -// -// The Syntax field is ignored for the requirements in req. -// -// Any requirements not already present in the file are added to the block -// containing the last require line. -// -// The requirements in req must specify at most one distinct version for each -// module path. -// -// If any existing requirements may be removed, the caller should call -// [File.Cleanup] after all edits are complete. -func (f *File) SetRequire(req []*Require) { - type elem struct { - version string - indirect bool - } - need := make(map[string]elem) - for _, r := range req { - if prev, dup := need[r.Mod.Path]; dup && prev.version != r.Mod.Version { - panic(fmt.Errorf("SetRequire called with conflicting versions for path %s (%s and %s)", r.Mod.Path, prev.version, r.Mod.Version)) - } - need[r.Mod.Path] = elem{r.Mod.Version, r.Indirect} - } - - // Update or delete the existing Require entries to preserve - // only the first for each module path in req. - for _, r := range f.Require { - e, ok := need[r.Mod.Path] - if ok { - r.setVersion(e.version) - r.setIndirect(e.indirect) - } else { - r.markRemoved() - } - delete(need, r.Mod.Path) - } - - // Add new entries in the last block of the file for any paths that weren't - // already present. - // - // This step is nondeterministic, but the final result will be deterministic - // because we will sort the block. - for path, e := range need { - f.AddNewRequire(path, e.version, e.indirect) - } - - f.SortBlocks() -} - -// SetRequireSeparateIndirect updates the requirements of f to contain the given -// requirements. Comment contents (except for 'indirect' markings) are retained -// from the first existing requirement for each module path. Like SetRequire, -// SetRequireSeparateIndirect adds requirements for new paths in req, -// updates the version and "// indirect" comment on existing requirements, -// and deletes requirements on paths not in req. Existing duplicate requirements -// are deleted. -// -// As its name suggests, SetRequireSeparateIndirect puts direct and indirect -// requirements into two separate blocks, one containing only direct -// requirements, and the other containing only indirect requirements. -// SetRequireSeparateIndirect may move requirements between these two blocks -// when their indirect markings change. However, SetRequireSeparateIndirect -// won't move requirements from other blocks, especially blocks with comments. -// -// If the file initially has one uncommented block of requirements, -// SetRequireSeparateIndirect will split it into a direct-only and indirect-only -// block. This aids in the transition to separate blocks. -func (f *File) SetRequireSeparateIndirect(req []*Require) { - // hasComments returns whether a line or block has comments - // other than "indirect". - hasComments := func(c Comments) bool { - return len(c.Before) > 0 || len(c.After) > 0 || len(c.Suffix) > 1 || - (len(c.Suffix) == 1 && - strings.TrimSpace(strings.TrimPrefix(c.Suffix[0].Token, string(slashSlash))) != "indirect") - } - - // moveReq adds r to block. If r was in another block, moveReq deletes - // it from that block and transfers its comments. - moveReq := func(r *Require, block *LineBlock) { - var line *Line - if r.Syntax == nil { - line = &Line{Token: []string{AutoQuote(r.Mod.Path), r.Mod.Version}} - r.Syntax = line - if r.Indirect { - r.setIndirect(true) - } - } else { - line = new(Line) - *line = *r.Syntax - if !line.InBlock && len(line.Token) > 0 && line.Token[0] == "require" { - line.Token = line.Token[1:] - } - r.Syntax.Token = nil // Cleanup will delete the old line. - r.Syntax = line - } - line.InBlock = true - block.Line = append(block.Line, line) - } - - // Examine existing require lines and blocks. - var ( - // We may insert new requirements into the last uncommented - // direct-only and indirect-only blocks. We may also move requirements - // to the opposite block if their indirect markings change. - lastDirectIndex = -1 - lastIndirectIndex = -1 - - // If there are no direct-only or indirect-only blocks, a new block may - // be inserted after the last require line or block. - lastRequireIndex = -1 - - // If there's only one require line or block, and it's uncommented, - // we'll move its requirements to the direct-only or indirect-only blocks. - requireLineOrBlockCount = 0 - - // Track the block each requirement belongs to (if any) so we can - // move them later. - lineToBlock = make(map[*Line]*LineBlock) - ) - for i, stmt := range f.Syntax.Stmt { - switch stmt := stmt.(type) { - case *Line: - if len(stmt.Token) == 0 || stmt.Token[0] != "require" { - continue - } - lastRequireIndex = i - requireLineOrBlockCount++ - if !hasComments(stmt.Comments) { - if isIndirect(stmt) { - lastIndirectIndex = i - } else { - lastDirectIndex = i - } - } - - case *LineBlock: - if len(stmt.Token) == 0 || stmt.Token[0] != "require" { - continue - } - lastRequireIndex = i - requireLineOrBlockCount++ - allDirect := len(stmt.Line) > 0 && !hasComments(stmt.Comments) - allIndirect := len(stmt.Line) > 0 && !hasComments(stmt.Comments) - for _, line := range stmt.Line { - lineToBlock[line] = stmt - if hasComments(line.Comments) { - allDirect = false - allIndirect = false - } else if isIndirect(line) { - allDirect = false - } else { - allIndirect = false - } - } - if allDirect { - lastDirectIndex = i - } - if allIndirect { - lastIndirectIndex = i - } - } - } - - oneFlatUncommentedBlock := requireLineOrBlockCount == 1 && - !hasComments(*f.Syntax.Stmt[lastRequireIndex].Comment()) - - // Create direct and indirect blocks if needed. Convert lines into blocks - // if needed. If we end up with an empty block or a one-line block, - // Cleanup will delete it or convert it to a line later. - insertBlock := func(i int) *LineBlock { - block := &LineBlock{Token: []string{"require"}} - f.Syntax.Stmt = append(f.Syntax.Stmt, nil) - copy(f.Syntax.Stmt[i+1:], f.Syntax.Stmt[i:]) - f.Syntax.Stmt[i] = block - return block - } - - ensureBlock := func(i int) *LineBlock { - switch stmt := f.Syntax.Stmt[i].(type) { - case *LineBlock: - return stmt - case *Line: - block := &LineBlock{ - Token: []string{"require"}, - Line: []*Line{stmt}, - } - stmt.Token = stmt.Token[1:] // remove "require" - stmt.InBlock = true - f.Syntax.Stmt[i] = block - return block - default: - panic(fmt.Sprintf("unexpected statement: %v", stmt)) - } - } - - var lastDirectBlock *LineBlock - if lastDirectIndex < 0 { - if lastIndirectIndex >= 0 { - lastDirectIndex = lastIndirectIndex - lastIndirectIndex++ - } else if lastRequireIndex >= 0 { - lastDirectIndex = lastRequireIndex + 1 - } else { - lastDirectIndex = len(f.Syntax.Stmt) - } - lastDirectBlock = insertBlock(lastDirectIndex) - } else { - lastDirectBlock = ensureBlock(lastDirectIndex) - } - - var lastIndirectBlock *LineBlock - if lastIndirectIndex < 0 { - lastIndirectIndex = lastDirectIndex + 1 - lastIndirectBlock = insertBlock(lastIndirectIndex) - } else { - lastIndirectBlock = ensureBlock(lastIndirectIndex) - } - - // Delete requirements we don't want anymore. - // Update versions and indirect comments on requirements we want to keep. - // If a requirement is in last{Direct,Indirect}Block with the wrong - // indirect marking after this, or if the requirement is in an single - // uncommented mixed block (oneFlatUncommentedBlock), move it to the - // correct block. - // - // Some blocks may be empty after this. Cleanup will remove them. - need := make(map[string]*Require) - for _, r := range req { - need[r.Mod.Path] = r - } - have := make(map[string]*Require) - for _, r := range f.Require { - path := r.Mod.Path - if need[path] == nil || have[path] != nil { - // Requirement not needed, or duplicate requirement. Delete. - r.markRemoved() - continue - } - have[r.Mod.Path] = r - r.setVersion(need[path].Mod.Version) - r.setIndirect(need[path].Indirect) - if need[path].Indirect && - (oneFlatUncommentedBlock || lineToBlock[r.Syntax] == lastDirectBlock) { - moveReq(r, lastIndirectBlock) - } else if !need[path].Indirect && - (oneFlatUncommentedBlock || lineToBlock[r.Syntax] == lastIndirectBlock) { - moveReq(r, lastDirectBlock) - } - } - - // Add new requirements. - for path, r := range need { - if have[path] == nil { - if r.Indirect { - moveReq(r, lastIndirectBlock) - } else { - moveReq(r, lastDirectBlock) - } - f.Require = append(f.Require, r) - } - } - - f.SortBlocks() -} - -func (f *File) DropRequire(path string) error { - for _, r := range f.Require { - if r.Mod.Path == path { - r.Syntax.markRemoved() - *r = Require{} - } - } - return nil -} - -// AddExclude adds a exclude statement to the mod file. Errors if the provided -// version is not a canonical version string -func (f *File) AddExclude(path, vers string) error { - if err := checkCanonicalVersion(path, vers); err != nil { - return err - } - - var hint *Line - for _, x := range f.Exclude { - if x.Mod.Path == path && x.Mod.Version == vers { - return nil - } - if x.Mod.Path == path { - hint = x.Syntax - } - } - - f.Exclude = append(f.Exclude, &Exclude{Mod: module.Version{Path: path, Version: vers}, Syntax: f.Syntax.addLine(hint, "exclude", AutoQuote(path), vers)}) - return nil -} - -func (f *File) DropExclude(path, vers string) error { - for _, x := range f.Exclude { - if x.Mod.Path == path && x.Mod.Version == vers { - x.Syntax.markRemoved() - *x = Exclude{} - } - } - return nil -} - -func (f *File) AddReplace(oldPath, oldVers, newPath, newVers string) error { - return addReplace(f.Syntax, &f.Replace, oldPath, oldVers, newPath, newVers) -} - -func addReplace(syntax *FileSyntax, replace *[]*Replace, oldPath, oldVers, newPath, newVers string) error { - need := true - old := module.Version{Path: oldPath, Version: oldVers} - new := module.Version{Path: newPath, Version: newVers} - tokens := []string{"replace", AutoQuote(oldPath)} - if oldVers != "" { - tokens = append(tokens, oldVers) - } - tokens = append(tokens, "=>", AutoQuote(newPath)) - if newVers != "" { - tokens = append(tokens, newVers) - } - - var hint *Line - for _, r := range *replace { - if r.Old.Path == oldPath && (oldVers == "" || r.Old.Version == oldVers) { - if need { - // Found replacement for old; update to use new. - r.New = new - syntax.updateLine(r.Syntax, tokens...) - need = false - continue - } - // Already added; delete other replacements for same. - r.Syntax.markRemoved() - *r = Replace{} - } - if r.Old.Path == oldPath { - hint = r.Syntax - } - } - if need { - *replace = append(*replace, &Replace{Old: old, New: new, Syntax: syntax.addLine(hint, tokens...)}) - } - return nil -} - -func (f *File) DropReplace(oldPath, oldVers string) error { - for _, r := range f.Replace { - if r.Old.Path == oldPath && r.Old.Version == oldVers { - r.Syntax.markRemoved() - *r = Replace{} - } - } - return nil -} - -// AddRetract adds a retract statement to the mod file. Errors if the provided -// version interval does not consist of canonical version strings -func (f *File) AddRetract(vi VersionInterval, rationale string) error { - var path string - if f.Module != nil { - path = f.Module.Mod.Path - } - if err := checkCanonicalVersion(path, vi.High); err != nil { - return err - } - if err := checkCanonicalVersion(path, vi.Low); err != nil { - return err - } - - r := &Retract{ - VersionInterval: vi, - } - if vi.Low == vi.High { - r.Syntax = f.Syntax.addLine(nil, "retract", AutoQuote(vi.Low)) - } else { - r.Syntax = f.Syntax.addLine(nil, "retract", "[", AutoQuote(vi.Low), ",", AutoQuote(vi.High), "]") - } - if rationale != "" { - for _, line := range strings.Split(rationale, "\n") { - com := Comment{Token: "// " + line} - r.Syntax.Comment().Before = append(r.Syntax.Comment().Before, com) - } - } - return nil -} - -func (f *File) DropRetract(vi VersionInterval) error { - for _, r := range f.Retract { - if r.VersionInterval == vi { - r.Syntax.markRemoved() - *r = Retract{} - } - } - return nil -} - -func (f *File) SortBlocks() { - f.removeDups() // otherwise sorting is unsafe - - // semanticSortForExcludeVersionV is the Go version (plus leading "v") at which - // lines in exclude blocks start to use semantic sort instead of lexicographic sort. - // See go.dev/issue/60028. - const semanticSortForExcludeVersionV = "v1.21" - useSemanticSortForExclude := f.Go != nil && semver.Compare("v"+f.Go.Version, semanticSortForExcludeVersionV) >= 0 - - for _, stmt := range f.Syntax.Stmt { - block, ok := stmt.(*LineBlock) - if !ok { - continue - } - less := lineLess - if block.Token[0] == "exclude" && useSemanticSortForExclude { - less = lineExcludeLess - } else if block.Token[0] == "retract" { - less = lineRetractLess - } - sort.SliceStable(block.Line, func(i, j int) bool { - return less(block.Line[i], block.Line[j]) - }) - } -} - -// removeDups removes duplicate exclude and replace directives. -// -// Earlier exclude directives take priority. -// -// Later replace directives take priority. -// -// require directives are not de-duplicated. That's left up to higher-level -// logic (MVS). -// -// retract directives are not de-duplicated since comments are -// meaningful, and versions may be retracted multiple times. -func (f *File) removeDups() { - removeDups(f.Syntax, &f.Exclude, &f.Replace) -} - -func removeDups(syntax *FileSyntax, exclude *[]*Exclude, replace *[]*Replace) { - kill := make(map[*Line]bool) - - // Remove duplicate excludes. - if exclude != nil { - haveExclude := make(map[module.Version]bool) - for _, x := range *exclude { - if haveExclude[x.Mod] { - kill[x.Syntax] = true - continue - } - haveExclude[x.Mod] = true - } - var excl []*Exclude - for _, x := range *exclude { - if !kill[x.Syntax] { - excl = append(excl, x) - } - } - *exclude = excl - } - - // Remove duplicate replacements. - // Later replacements take priority over earlier ones. - haveReplace := make(map[module.Version]bool) - for i := len(*replace) - 1; i >= 0; i-- { - x := (*replace)[i] - if haveReplace[x.Old] { - kill[x.Syntax] = true - continue - } - haveReplace[x.Old] = true - } - var repl []*Replace - for _, x := range *replace { - if !kill[x.Syntax] { - repl = append(repl, x) - } - } - *replace = repl - - // Duplicate require and retract directives are not removed. - - // Drop killed statements from the syntax tree. - var stmts []Expr - for _, stmt := range syntax.Stmt { - switch stmt := stmt.(type) { - case *Line: - if kill[stmt] { - continue - } - case *LineBlock: - var lines []*Line - for _, line := range stmt.Line { - if !kill[line] { - lines = append(lines, line) - } - } - stmt.Line = lines - if len(lines) == 0 { - continue - } - } - stmts = append(stmts, stmt) - } - syntax.Stmt = stmts -} - -// lineLess returns whether li should be sorted before lj. It sorts -// lexicographically without assigning any special meaning to tokens. -func lineLess(li, lj *Line) bool { - for k := 0; k < len(li.Token) && k < len(lj.Token); k++ { - if li.Token[k] != lj.Token[k] { - return li.Token[k] < lj.Token[k] - } - } - return len(li.Token) < len(lj.Token) -} - -// lineExcludeLess reports whether li should be sorted before lj for lines in -// an "exclude" block. -func lineExcludeLess(li, lj *Line) bool { - if len(li.Token) != 2 || len(lj.Token) != 2 { - // Not a known exclude specification. - // Fall back to sorting lexicographically. - return lineLess(li, lj) - } - // An exclude specification has two tokens: ModulePath and Version. - // Compare module path by string order and version by semver rules. - if pi, pj := li.Token[0], lj.Token[0]; pi != pj { - return pi < pj - } - return semver.Compare(li.Token[1], lj.Token[1]) < 0 -} - -// lineRetractLess returns whether li should be sorted before lj for lines in -// a "retract" block. It treats each line as a version interval. Single versions -// are compared as if they were intervals with the same low and high version. -// Intervals are sorted in descending order, first by low version, then by -// high version, using semver.Compare. -func lineRetractLess(li, lj *Line) bool { - interval := func(l *Line) VersionInterval { - if len(l.Token) == 1 { - return VersionInterval{Low: l.Token[0], High: l.Token[0]} - } else if len(l.Token) == 5 && l.Token[0] == "[" && l.Token[2] == "," && l.Token[4] == "]" { - return VersionInterval{Low: l.Token[1], High: l.Token[3]} - } else { - // Line in unknown format. Treat as an invalid version. - return VersionInterval{} - } - } - vii := interval(li) - vij := interval(lj) - if cmp := semver.Compare(vii.Low, vij.Low); cmp != 0 { - return cmp > 0 - } - return semver.Compare(vii.High, vij.High) > 0 -} - -// checkCanonicalVersion returns a non-nil error if vers is not a canonical -// version string or does not match the major version of path. -// -// If path is non-empty, the error text suggests a format with a major version -// corresponding to the path. -func checkCanonicalVersion(path, vers string) error { - _, pathMajor, pathMajorOk := module.SplitPathVersion(path) - - if vers == "" || vers != module.CanonicalVersion(vers) { - if pathMajor == "" { - return &module.InvalidVersionError{ - Version: vers, - Err: fmt.Errorf("must be of the form v1.2.3"), - } - } - return &module.InvalidVersionError{ - Version: vers, - Err: fmt.Errorf("must be of the form %s.2.3", module.PathMajorPrefix(pathMajor)), - } - } - - if pathMajorOk { - if err := module.CheckPathMajor(vers, pathMajor); err != nil { - if pathMajor == "" { - // In this context, the user probably wrote "v2.3.4" when they meant - // "v2.3.4+incompatible". Suggest that instead of "v0 or v1". - return &module.InvalidVersionError{ - Version: vers, - Err: fmt.Errorf("should be %s+incompatible (or module %s/%v)", vers, path, semver.Major(vers)), - } - } - return err - } - } - - return nil -} diff --git a/go/extractor/vendor/golang.org/x/mod/modfile/work.go b/go/extractor/vendor/golang.org/x/mod/modfile/work.go deleted file mode 100644 index d7b99376ebe7..000000000000 --- a/go/extractor/vendor/golang.org/x/mod/modfile/work.go +++ /dev/null @@ -1,285 +0,0 @@ -// Copyright 2021 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package modfile - -import ( - "fmt" - "sort" - "strings" -) - -// A WorkFile is the parsed, interpreted form of a go.work file. -type WorkFile struct { - Go *Go - Toolchain *Toolchain - Use []*Use - Replace []*Replace - - Syntax *FileSyntax -} - -// A Use is a single directory statement. -type Use struct { - Path string // Use path of module. - ModulePath string // Module path in the comment. - Syntax *Line -} - -// ParseWork parses and returns a go.work file. -// -// file is the name of the file, used in positions and errors. -// -// data is the content of the file. -// -// fix is an optional function that canonicalizes module versions. -// If fix is nil, all module versions must be canonical ([module.CanonicalVersion] -// must return the same string). -func ParseWork(file string, data []byte, fix VersionFixer) (*WorkFile, error) { - fs, err := parse(file, data) - if err != nil { - return nil, err - } - f := &WorkFile{ - Syntax: fs, - } - var errs ErrorList - - for _, x := range fs.Stmt { - switch x := x.(type) { - case *Line: - f.add(&errs, x, x.Token[0], x.Token[1:], fix) - - case *LineBlock: - if len(x.Token) > 1 { - errs = append(errs, Error{ - Filename: file, - Pos: x.Start, - Err: fmt.Errorf("unknown block type: %s", strings.Join(x.Token, " ")), - }) - continue - } - switch x.Token[0] { - default: - errs = append(errs, Error{ - Filename: file, - Pos: x.Start, - Err: fmt.Errorf("unknown block type: %s", strings.Join(x.Token, " ")), - }) - continue - case "use", "replace": - for _, l := range x.Line { - f.add(&errs, l, x.Token[0], l.Token, fix) - } - } - } - } - - if len(errs) > 0 { - return nil, errs - } - return f, nil -} - -// Cleanup cleans up the file f after any edit operations. -// To avoid quadratic behavior, modifications like [WorkFile.DropRequire] -// clear the entry but do not remove it from the slice. -// Cleanup cleans out all the cleared entries. -func (f *WorkFile) Cleanup() { - w := 0 - for _, r := range f.Use { - if r.Path != "" { - f.Use[w] = r - w++ - } - } - f.Use = f.Use[:w] - - w = 0 - for _, r := range f.Replace { - if r.Old.Path != "" { - f.Replace[w] = r - w++ - } - } - f.Replace = f.Replace[:w] - - f.Syntax.Cleanup() -} - -func (f *WorkFile) AddGoStmt(version string) error { - if !GoVersionRE.MatchString(version) { - return fmt.Errorf("invalid language version %q", version) - } - if f.Go == nil { - stmt := &Line{Token: []string{"go", version}} - f.Go = &Go{ - Version: version, - Syntax: stmt, - } - // Find the first non-comment-only block and add - // the go statement before it. That will keep file comments at the top. - i := 0 - for i = 0; i < len(f.Syntax.Stmt); i++ { - if _, ok := f.Syntax.Stmt[i].(*CommentBlock); !ok { - break - } - } - f.Syntax.Stmt = append(append(f.Syntax.Stmt[:i:i], stmt), f.Syntax.Stmt[i:]...) - } else { - f.Go.Version = version - f.Syntax.updateLine(f.Go.Syntax, "go", version) - } - return nil -} - -func (f *WorkFile) AddToolchainStmt(name string) error { - if !ToolchainRE.MatchString(name) { - return fmt.Errorf("invalid toolchain name %q", name) - } - if f.Toolchain == nil { - stmt := &Line{Token: []string{"toolchain", name}} - f.Toolchain = &Toolchain{ - Name: name, - Syntax: stmt, - } - // Find the go line and add the toolchain line after it. - // Or else find the first non-comment-only block and add - // the toolchain line before it. That will keep file comments at the top. - i := 0 - for i = 0; i < len(f.Syntax.Stmt); i++ { - if line, ok := f.Syntax.Stmt[i].(*Line); ok && len(line.Token) > 0 && line.Token[0] == "go" { - i++ - goto Found - } - } - for i = 0; i < len(f.Syntax.Stmt); i++ { - if _, ok := f.Syntax.Stmt[i].(*CommentBlock); !ok { - break - } - } - Found: - f.Syntax.Stmt = append(append(f.Syntax.Stmt[:i:i], stmt), f.Syntax.Stmt[i:]...) - } else { - f.Toolchain.Name = name - f.Syntax.updateLine(f.Toolchain.Syntax, "toolchain", name) - } - return nil -} - -// DropGoStmt deletes the go statement from the file. -func (f *WorkFile) DropGoStmt() { - if f.Go != nil { - f.Go.Syntax.markRemoved() - f.Go = nil - } -} - -// DropToolchainStmt deletes the toolchain statement from the file. -func (f *WorkFile) DropToolchainStmt() { - if f.Toolchain != nil { - f.Toolchain.Syntax.markRemoved() - f.Toolchain = nil - } -} - -func (f *WorkFile) AddUse(diskPath, modulePath string) error { - need := true - for _, d := range f.Use { - if d.Path == diskPath { - if need { - d.ModulePath = modulePath - f.Syntax.updateLine(d.Syntax, "use", AutoQuote(diskPath)) - need = false - } else { - d.Syntax.markRemoved() - *d = Use{} - } - } - } - - if need { - f.AddNewUse(diskPath, modulePath) - } - return nil -} - -func (f *WorkFile) AddNewUse(diskPath, modulePath string) { - line := f.Syntax.addLine(nil, "use", AutoQuote(diskPath)) - f.Use = append(f.Use, &Use{Path: diskPath, ModulePath: modulePath, Syntax: line}) -} - -func (f *WorkFile) SetUse(dirs []*Use) { - need := make(map[string]string) - for _, d := range dirs { - need[d.Path] = d.ModulePath - } - - for _, d := range f.Use { - if modulePath, ok := need[d.Path]; ok { - d.ModulePath = modulePath - } else { - d.Syntax.markRemoved() - *d = Use{} - } - } - - // TODO(#45713): Add module path to comment. - - for diskPath, modulePath := range need { - f.AddNewUse(diskPath, modulePath) - } - f.SortBlocks() -} - -func (f *WorkFile) DropUse(path string) error { - for _, d := range f.Use { - if d.Path == path { - d.Syntax.markRemoved() - *d = Use{} - } - } - return nil -} - -func (f *WorkFile) AddReplace(oldPath, oldVers, newPath, newVers string) error { - return addReplace(f.Syntax, &f.Replace, oldPath, oldVers, newPath, newVers) -} - -func (f *WorkFile) DropReplace(oldPath, oldVers string) error { - for _, r := range f.Replace { - if r.Old.Path == oldPath && r.Old.Version == oldVers { - r.Syntax.markRemoved() - *r = Replace{} - } - } - return nil -} - -func (f *WorkFile) SortBlocks() { - f.removeDups() // otherwise sorting is unsafe - - for _, stmt := range f.Syntax.Stmt { - block, ok := stmt.(*LineBlock) - if !ok { - continue - } - sort.SliceStable(block.Line, func(i, j int) bool { - return lineLess(block.Line[i], block.Line[j]) - }) - } -} - -// removeDups removes duplicate replace directives. -// -// Later replace directives take priority. -// -// require directives are not de-duplicated. That's left up to higher-level -// logic (MVS). -// -// retract directives are not de-duplicated since comments are -// meaningful, and versions may be retracted multiple times. -func (f *WorkFile) removeDups() { - removeDups(f.Syntax, nil, &f.Replace) -} diff --git a/go/extractor/vendor/golang.org/x/mod/module/BUILD.bazel b/go/extractor/vendor/golang.org/x/mod/module/BUILD.bazel deleted file mode 100644 index 3bf5ae9997d1..000000000000 --- a/go/extractor/vendor/golang.org/x/mod/module/BUILD.bazel +++ /dev/null @@ -1,18 +0,0 @@ -# generated running `bazel run //go/gazelle`, do not edit - -load("@rules_go//go:def.bzl", "go_library") - -go_library( - name = "module", - srcs = [ - "module.go", - "pseudo.go", - ], - importmap = "github.com/github/codeql-go/extractor/vendor/golang.org/x/mod/module", - importpath = "golang.org/x/mod/module", - visibility = ["//visibility:public"], - deps = [ - "//go/extractor/vendor/golang.org/x/mod/internal/lazyregexp", - "//go/extractor/vendor/golang.org/x/mod/semver", - ], -) diff --git a/go/extractor/vendor/golang.org/x/mod/module/module.go b/go/extractor/vendor/golang.org/x/mod/module/module.go deleted file mode 100644 index 2a364b229b9f..000000000000 --- a/go/extractor/vendor/golang.org/x/mod/module/module.go +++ /dev/null @@ -1,841 +0,0 @@ -// Copyright 2018 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Package module defines the module.Version type along with support code. -// -// The [module.Version] type is a simple Path, Version pair: -// -// type Version struct { -// Path string -// Version string -// } -// -// There are no restrictions imposed directly by use of this structure, -// but additional checking functions, most notably [Check], verify that -// a particular path, version pair is valid. -// -// # Escaped Paths -// -// Module paths appear as substrings of file system paths -// (in the download cache) and of web server URLs in the proxy protocol. -// In general we cannot rely on file systems to be case-sensitive, -// nor can we rely on web servers, since they read from file systems. -// That is, we cannot rely on the file system to keep rsc.io/QUOTE -// and rsc.io/quote separate. Windows and macOS don't. -// Instead, we must never require two different casings of a file path. -// Because we want the download cache to match the proxy protocol, -// and because we want the proxy protocol to be possible to serve -// from a tree of static files (which might be stored on a case-insensitive -// file system), the proxy protocol must never require two different casings -// of a URL path either. -// -// One possibility would be to make the escaped form be the lowercase -// hexadecimal encoding of the actual path bytes. This would avoid ever -// needing different casings of a file path, but it would be fairly illegible -// to most programmers when those paths appeared in the file system -// (including in file paths in compiler errors and stack traces) -// in web server logs, and so on. Instead, we want a safe escaped form that -// leaves most paths unaltered. -// -// The safe escaped form is to replace every uppercase letter -// with an exclamation mark followed by the letter's lowercase equivalent. -// -// For example, -// -// github.com/Azure/azure-sdk-for-go -> github.com/!azure/azure-sdk-for-go. -// github.com/GoogleCloudPlatform/cloudsql-proxy -> github.com/!google!cloud!platform/cloudsql-proxy -// github.com/Sirupsen/logrus -> github.com/!sirupsen/logrus. -// -// Import paths that avoid upper-case letters are left unchanged. -// Note that because import paths are ASCII-only and avoid various -// problematic punctuation (like : < and >), the escaped form is also ASCII-only -// and avoids the same problematic punctuation. -// -// Import paths have never allowed exclamation marks, so there is no -// need to define how to escape a literal !. -// -// # Unicode Restrictions -// -// Today, paths are disallowed from using Unicode. -// -// Although paths are currently disallowed from using Unicode, -// we would like at some point to allow Unicode letters as well, to assume that -// file systems and URLs are Unicode-safe (storing UTF-8), and apply -// the !-for-uppercase convention for escaping them in the file system. -// But there are at least two subtle considerations. -// -// First, note that not all case-fold equivalent distinct runes -// form an upper/lower pair. -// For example, U+004B ('K'), U+006B ('k'), and U+212A ('β„ͺ' for Kelvin) -// are three distinct runes that case-fold to each other. -// When we do add Unicode letters, we must not assume that upper/lower -// are the only case-equivalent pairs. -// Perhaps the Kelvin symbol would be disallowed entirely, for example. -// Or perhaps it would escape as "!!k", or perhaps as "(212A)". -// -// Second, it would be nice to allow Unicode marks as well as letters, -// but marks include combining marks, and then we must deal not -// only with case folding but also normalization: both U+00E9 ('Γ©') -// and U+0065 U+0301 ('e' followed by combining acute accent) -// look the same on the page and are treated by some file systems -// as the same path. If we do allow Unicode marks in paths, there -// must be some kind of normalization to allow only one canonical -// encoding of any character used in an import path. -package module - -// IMPORTANT NOTE -// -// This file essentially defines the set of valid import paths for the go command. -// There are many subtle considerations, including Unicode ambiguity, -// security, network, and file system representations. -// -// This file also defines the set of valid module path and version combinations, -// another topic with many subtle considerations. -// -// Changes to the semantics in this file require approval from rsc. - -import ( - "errors" - "fmt" - "path" - "sort" - "strings" - "unicode" - "unicode/utf8" - - "golang.org/x/mod/semver" -) - -// A Version (for clients, a module.Version) is defined by a module path and version pair. -// These are stored in their plain (unescaped) form. -type Version struct { - // Path is a module path, like "golang.org/x/text" or "rsc.io/quote/v2". - Path string - - // Version is usually a semantic version in canonical form. - // There are three exceptions to this general rule. - // First, the top-level target of a build has no specific version - // and uses Version = "". - // Second, during MVS calculations the version "none" is used - // to represent the decision to take no version of a given module. - // Third, filesystem paths found in "replace" directives are - // represented by a path with an empty version. - Version string `json:",omitempty"` -} - -// String returns a representation of the Version suitable for logging -// (Path@Version, or just Path if Version is empty). -func (m Version) String() string { - if m.Version == "" { - return m.Path - } - return m.Path + "@" + m.Version -} - -// A ModuleError indicates an error specific to a module. -type ModuleError struct { - Path string - Version string - Err error -} - -// VersionError returns a [ModuleError] derived from a [Version] and error, -// or err itself if it is already such an error. -func VersionError(v Version, err error) error { - var mErr *ModuleError - if errors.As(err, &mErr) && mErr.Path == v.Path && mErr.Version == v.Version { - return err - } - return &ModuleError{ - Path: v.Path, - Version: v.Version, - Err: err, - } -} - -func (e *ModuleError) Error() string { - if v, ok := e.Err.(*InvalidVersionError); ok { - return fmt.Sprintf("%s@%s: invalid %s: %v", e.Path, v.Version, v.noun(), v.Err) - } - if e.Version != "" { - return fmt.Sprintf("%s@%s: %v", e.Path, e.Version, e.Err) - } - return fmt.Sprintf("module %s: %v", e.Path, e.Err) -} - -func (e *ModuleError) Unwrap() error { return e.Err } - -// An InvalidVersionError indicates an error specific to a version, with the -// module path unknown or specified externally. -// -// A [ModuleError] may wrap an InvalidVersionError, but an InvalidVersionError -// must not wrap a ModuleError. -type InvalidVersionError struct { - Version string - Pseudo bool - Err error -} - -// noun returns either "version" or "pseudo-version", depending on whether -// e.Version is a pseudo-version. -func (e *InvalidVersionError) noun() string { - if e.Pseudo { - return "pseudo-version" - } - return "version" -} - -func (e *InvalidVersionError) Error() string { - return fmt.Sprintf("%s %q invalid: %s", e.noun(), e.Version, e.Err) -} - -func (e *InvalidVersionError) Unwrap() error { return e.Err } - -// An InvalidPathError indicates a module, import, or file path doesn't -// satisfy all naming constraints. See [CheckPath], [CheckImportPath], -// and [CheckFilePath] for specific restrictions. -type InvalidPathError struct { - Kind string // "module", "import", or "file" - Path string - Err error -} - -func (e *InvalidPathError) Error() string { - return fmt.Sprintf("malformed %s path %q: %v", e.Kind, e.Path, e.Err) -} - -func (e *InvalidPathError) Unwrap() error { return e.Err } - -// Check checks that a given module path, version pair is valid. -// In addition to the path being a valid module path -// and the version being a valid semantic version, -// the two must correspond. -// For example, the path "yaml/v2" only corresponds to -// semantic versions beginning with "v2.". -func Check(path, version string) error { - if err := CheckPath(path); err != nil { - return err - } - if !semver.IsValid(version) { - return &ModuleError{ - Path: path, - Err: &InvalidVersionError{Version: version, Err: errors.New("not a semantic version")}, - } - } - _, pathMajor, _ := SplitPathVersion(path) - if err := CheckPathMajor(version, pathMajor); err != nil { - return &ModuleError{Path: path, Err: err} - } - return nil -} - -// firstPathOK reports whether r can appear in the first element of a module path. -// The first element of the path must be an LDH domain name, at least for now. -// To avoid case ambiguity, the domain name must be entirely lower case. -func firstPathOK(r rune) bool { - return r == '-' || r == '.' || - '0' <= r && r <= '9' || - 'a' <= r && r <= 'z' -} - -// modPathOK reports whether r can appear in a module path element. -// Paths can be ASCII letters, ASCII digits, and limited ASCII punctuation: - . _ and ~. -// -// This matches what "go get" has historically recognized in import paths, -// and avoids confusing sequences like '%20' or '+' that would change meaning -// if used in a URL. -// -// TODO(rsc): We would like to allow Unicode letters, but that requires additional -// care in the safe encoding (see "escaped paths" above). -func modPathOK(r rune) bool { - if r < utf8.RuneSelf { - return r == '-' || r == '.' || r == '_' || r == '~' || - '0' <= r && r <= '9' || - 'A' <= r && r <= 'Z' || - 'a' <= r && r <= 'z' - } - return false -} - -// importPathOK reports whether r can appear in a package import path element. -// -// Import paths are intermediate between module paths and file paths: we allow -// disallow characters that would be confusing or ambiguous as arguments to -// 'go get' (such as '@' and ' ' ), but allow certain characters that are -// otherwise-unambiguous on the command line and historically used for some -// binary names (such as '++' as a suffix for compiler binaries and wrappers). -func importPathOK(r rune) bool { - return modPathOK(r) || r == '+' -} - -// fileNameOK reports whether r can appear in a file name. -// For now we allow all Unicode letters but otherwise limit to pathOK plus a few more punctuation characters. -// If we expand the set of allowed characters here, we have to -// work harder at detecting potential case-folding and normalization collisions. -// See note about "escaped paths" above. -func fileNameOK(r rune) bool { - if r < utf8.RuneSelf { - // Entire set of ASCII punctuation, from which we remove characters: - // ! " # $ % & ' ( ) * + , - . / : ; < = > ? @ [ \ ] ^ _ ` { | } ~ - // We disallow some shell special characters: " ' * < > ? ` | - // (Note that some of those are disallowed by the Windows file system as well.) - // We also disallow path separators / : and \ (fileNameOK is only called on path element characters). - // We allow spaces (U+0020) in file names. - const allowed = "!#$%&()+,-.=@[]^_{}~ " - if '0' <= r && r <= '9' || 'A' <= r && r <= 'Z' || 'a' <= r && r <= 'z' { - return true - } - return strings.ContainsRune(allowed, r) - } - // It may be OK to add more ASCII punctuation here, but only carefully. - // For example Windows disallows < > \, and macOS disallows :, so we must not allow those. - return unicode.IsLetter(r) -} - -// CheckPath checks that a module path is valid. -// A valid module path is a valid import path, as checked by [CheckImportPath], -// with three additional constraints. -// First, the leading path element (up to the first slash, if any), -// by convention a domain name, must contain only lower-case ASCII letters, -// ASCII digits, dots (U+002E), and dashes (U+002D); -// it must contain at least one dot and cannot start with a dash. -// Second, for a final path element of the form /vN, where N looks numeric -// (ASCII digits and dots) must not begin with a leading zero, must not be /v1, -// and must not contain any dots. For paths beginning with "gopkg.in/", -// this second requirement is replaced by a requirement that the path -// follow the gopkg.in server's conventions. -// Third, no path element may begin with a dot. -func CheckPath(path string) (err error) { - defer func() { - if err != nil { - err = &InvalidPathError{Kind: "module", Path: path, Err: err} - } - }() - - if err := checkPath(path, modulePath); err != nil { - return err - } - i := strings.Index(path, "/") - if i < 0 { - i = len(path) - } - if i == 0 { - return fmt.Errorf("leading slash") - } - if !strings.Contains(path[:i], ".") { - return fmt.Errorf("missing dot in first path element") - } - if path[0] == '-' { - return fmt.Errorf("leading dash in first path element") - } - for _, r := range path[:i] { - if !firstPathOK(r) { - return fmt.Errorf("invalid char %q in first path element", r) - } - } - if _, _, ok := SplitPathVersion(path); !ok { - return fmt.Errorf("invalid version") - } - return nil -} - -// CheckImportPath checks that an import path is valid. -// -// A valid import path consists of one or more valid path elements -// separated by slashes (U+002F). (It must not begin with nor end in a slash.) -// -// A valid path element is a non-empty string made up of -// ASCII letters, ASCII digits, and limited ASCII punctuation: - . _ and ~. -// It must not end with a dot (U+002E), nor contain two dots in a row. -// -// The element prefix up to the first dot must not be a reserved file name -// on Windows, regardless of case (CON, com1, NuL, and so on). The element -// must not have a suffix of a tilde followed by one or more ASCII digits -// (to exclude paths elements that look like Windows short-names). -// -// CheckImportPath may be less restrictive in the future, but see the -// top-level package documentation for additional information about -// subtleties of Unicode. -func CheckImportPath(path string) error { - if err := checkPath(path, importPath); err != nil { - return &InvalidPathError{Kind: "import", Path: path, Err: err} - } - return nil -} - -// pathKind indicates what kind of path we're checking. Module paths, -// import paths, and file paths have different restrictions. -type pathKind int - -const ( - modulePath pathKind = iota - importPath - filePath -) - -// checkPath checks that a general path is valid. kind indicates what -// specific constraints should be applied. -// -// checkPath returns an error describing why the path is not valid. -// Because these checks apply to module, import, and file paths, -// and because other checks may be applied, the caller is expected to wrap -// this error with [InvalidPathError]. -func checkPath(path string, kind pathKind) error { - if !utf8.ValidString(path) { - return fmt.Errorf("invalid UTF-8") - } - if path == "" { - return fmt.Errorf("empty string") - } - if path[0] == '-' && kind != filePath { - return fmt.Errorf("leading dash") - } - if strings.Contains(path, "//") { - return fmt.Errorf("double slash") - } - if path[len(path)-1] == '/' { - return fmt.Errorf("trailing slash") - } - elemStart := 0 - for i, r := range path { - if r == '/' { - if err := checkElem(path[elemStart:i], kind); err != nil { - return err - } - elemStart = i + 1 - } - } - if err := checkElem(path[elemStart:], kind); err != nil { - return err - } - return nil -} - -// checkElem checks whether an individual path element is valid. -func checkElem(elem string, kind pathKind) error { - if elem == "" { - return fmt.Errorf("empty path element") - } - if strings.Count(elem, ".") == len(elem) { - return fmt.Errorf("invalid path element %q", elem) - } - if elem[0] == '.' && kind == modulePath { - return fmt.Errorf("leading dot in path element") - } - if elem[len(elem)-1] == '.' { - return fmt.Errorf("trailing dot in path element") - } - for _, r := range elem { - ok := false - switch kind { - case modulePath: - ok = modPathOK(r) - case importPath: - ok = importPathOK(r) - case filePath: - ok = fileNameOK(r) - default: - panic(fmt.Sprintf("internal error: invalid kind %v", kind)) - } - if !ok { - return fmt.Errorf("invalid char %q", r) - } - } - - // Windows disallows a bunch of path elements, sadly. - // See https://docs.microsoft.com/en-us/windows/desktop/fileio/naming-a-file - short := elem - if i := strings.Index(short, "."); i >= 0 { - short = short[:i] - } - for _, bad := range badWindowsNames { - if strings.EqualFold(bad, short) { - return fmt.Errorf("%q disallowed as path element component on Windows", short) - } - } - - if kind == filePath { - // don't check for Windows short-names in file names. They're - // only an issue for import paths. - return nil - } - - // Reject path components that look like Windows short-names. - // Those usually end in a tilde followed by one or more ASCII digits. - if tilde := strings.LastIndexByte(short, '~'); tilde >= 0 && tilde < len(short)-1 { - suffix := short[tilde+1:] - suffixIsDigits := true - for _, r := range suffix { - if r < '0' || r > '9' { - suffixIsDigits = false - break - } - } - if suffixIsDigits { - return fmt.Errorf("trailing tilde and digits in path element") - } - } - - return nil -} - -// CheckFilePath checks that a slash-separated file path is valid. -// The definition of a valid file path is the same as the definition -// of a valid import path except that the set of allowed characters is larger: -// all Unicode letters, ASCII digits, the ASCII space character (U+0020), -// and the ASCII punctuation characters -// β€œ!#$%&()+,-.=@[]^_{}~”. -// (The excluded punctuation characters, " * < > ? ` ' | / \ and :, -// have special meanings in certain shells or operating systems.) -// -// CheckFilePath may be less restrictive in the future, but see the -// top-level package documentation for additional information about -// subtleties of Unicode. -func CheckFilePath(path string) error { - if err := checkPath(path, filePath); err != nil { - return &InvalidPathError{Kind: "file", Path: path, Err: err} - } - return nil -} - -// badWindowsNames are the reserved file path elements on Windows. -// See https://docs.microsoft.com/en-us/windows/desktop/fileio/naming-a-file -var badWindowsNames = []string{ - "CON", - "PRN", - "AUX", - "NUL", - "COM1", - "COM2", - "COM3", - "COM4", - "COM5", - "COM6", - "COM7", - "COM8", - "COM9", - "LPT1", - "LPT2", - "LPT3", - "LPT4", - "LPT5", - "LPT6", - "LPT7", - "LPT8", - "LPT9", -} - -// SplitPathVersion returns prefix and major version such that prefix+pathMajor == path -// and version is either empty or "/vN" for N >= 2. -// As a special case, gopkg.in paths are recognized directly; -// they require ".vN" instead of "/vN", and for all N, not just N >= 2. -// SplitPathVersion returns with ok = false when presented with -// a path whose last path element does not satisfy the constraints -// applied by [CheckPath], such as "example.com/pkg/v1" or "example.com/pkg/v1.2". -func SplitPathVersion(path string) (prefix, pathMajor string, ok bool) { - if strings.HasPrefix(path, "gopkg.in/") { - return splitGopkgIn(path) - } - - i := len(path) - dot := false - for i > 0 && ('0' <= path[i-1] && path[i-1] <= '9' || path[i-1] == '.') { - if path[i-1] == '.' { - dot = true - } - i-- - } - if i <= 1 || i == len(path) || path[i-1] != 'v' || path[i-2] != '/' { - return path, "", true - } - prefix, pathMajor = path[:i-2], path[i-2:] - if dot || len(pathMajor) <= 2 || pathMajor[2] == '0' || pathMajor == "/v1" { - return path, "", false - } - return prefix, pathMajor, true -} - -// splitGopkgIn is like SplitPathVersion but only for gopkg.in paths. -func splitGopkgIn(path string) (prefix, pathMajor string, ok bool) { - if !strings.HasPrefix(path, "gopkg.in/") { - return path, "", false - } - i := len(path) - if strings.HasSuffix(path, "-unstable") { - i -= len("-unstable") - } - for i > 0 && ('0' <= path[i-1] && path[i-1] <= '9') { - i-- - } - if i <= 1 || path[i-1] != 'v' || path[i-2] != '.' { - // All gopkg.in paths must end in vN for some N. - return path, "", false - } - prefix, pathMajor = path[:i-2], path[i-2:] - if len(pathMajor) <= 2 || pathMajor[2] == '0' && pathMajor != ".v0" { - return path, "", false - } - return prefix, pathMajor, true -} - -// MatchPathMajor reports whether the semantic version v -// matches the path major version pathMajor. -// -// MatchPathMajor returns true if and only if [CheckPathMajor] returns nil. -func MatchPathMajor(v, pathMajor string) bool { - return CheckPathMajor(v, pathMajor) == nil -} - -// CheckPathMajor returns a non-nil error if the semantic version v -// does not match the path major version pathMajor. -func CheckPathMajor(v, pathMajor string) error { - // TODO(jayconrod): return errors or panic for invalid inputs. This function - // (and others) was covered by integration tests for cmd/go, and surrounding - // code protected against invalid inputs like non-canonical versions. - if strings.HasPrefix(pathMajor, ".v") && strings.HasSuffix(pathMajor, "-unstable") { - pathMajor = strings.TrimSuffix(pathMajor, "-unstable") - } - if strings.HasPrefix(v, "v0.0.0-") && pathMajor == ".v1" { - // Allow old bug in pseudo-versions that generated v0.0.0- pseudoversion for gopkg .v1. - // For example, gopkg.in/yaml.v2@v2.2.1's go.mod requires gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405. - return nil - } - m := semver.Major(v) - if pathMajor == "" { - if m == "v0" || m == "v1" || semver.Build(v) == "+incompatible" { - return nil - } - pathMajor = "v0 or v1" - } else if pathMajor[0] == '/' || pathMajor[0] == '.' { - if m == pathMajor[1:] { - return nil - } - pathMajor = pathMajor[1:] - } - return &InvalidVersionError{ - Version: v, - Err: fmt.Errorf("should be %s, not %s", pathMajor, semver.Major(v)), - } -} - -// PathMajorPrefix returns the major-version tag prefix implied by pathMajor. -// An empty PathMajorPrefix allows either v0 or v1. -// -// Note that [MatchPathMajor] may accept some versions that do not actually begin -// with this prefix: namely, it accepts a 'v0.0.0-' prefix for a '.v1' -// pathMajor, even though that pathMajor implies 'v1' tagging. -func PathMajorPrefix(pathMajor string) string { - if pathMajor == "" { - return "" - } - if pathMajor[0] != '/' && pathMajor[0] != '.' { - panic("pathMajor suffix " + pathMajor + " passed to PathMajorPrefix lacks separator") - } - if strings.HasPrefix(pathMajor, ".v") && strings.HasSuffix(pathMajor, "-unstable") { - pathMajor = strings.TrimSuffix(pathMajor, "-unstable") - } - m := pathMajor[1:] - if m != semver.Major(m) { - panic("pathMajor suffix " + pathMajor + "passed to PathMajorPrefix is not a valid major version") - } - return m -} - -// CanonicalVersion returns the canonical form of the version string v. -// It is the same as [semver.Canonical] except that it preserves the special build suffix "+incompatible". -func CanonicalVersion(v string) string { - cv := semver.Canonical(v) - if semver.Build(v) == "+incompatible" { - cv += "+incompatible" - } - return cv -} - -// Sort sorts the list by Path, breaking ties by comparing [Version] fields. -// The Version fields are interpreted as semantic versions (using [semver.Compare]) -// optionally followed by a tie-breaking suffix introduced by a slash character, -// like in "v0.0.1/go.mod". -func Sort(list []Version) { - sort.Slice(list, func(i, j int) bool { - mi := list[i] - mj := list[j] - if mi.Path != mj.Path { - return mi.Path < mj.Path - } - // To help go.sum formatting, allow version/file. - // Compare semver prefix by semver rules, - // file by string order. - vi := mi.Version - vj := mj.Version - var fi, fj string - if k := strings.Index(vi, "/"); k >= 0 { - vi, fi = vi[:k], vi[k:] - } - if k := strings.Index(vj, "/"); k >= 0 { - vj, fj = vj[:k], vj[k:] - } - if vi != vj { - return semver.Compare(vi, vj) < 0 - } - return fi < fj - }) -} - -// EscapePath returns the escaped form of the given module path. -// It fails if the module path is invalid. -func EscapePath(path string) (escaped string, err error) { - if err := CheckPath(path); err != nil { - return "", err - } - - return escapeString(path) -} - -// EscapeVersion returns the escaped form of the given module version. -// Versions are allowed to be in non-semver form but must be valid file names -// and not contain exclamation marks. -func EscapeVersion(v string) (escaped string, err error) { - if err := checkElem(v, filePath); err != nil || strings.Contains(v, "!") { - return "", &InvalidVersionError{ - Version: v, - Err: fmt.Errorf("disallowed version string"), - } - } - return escapeString(v) -} - -func escapeString(s string) (escaped string, err error) { - haveUpper := false - for _, r := range s { - if r == '!' || r >= utf8.RuneSelf { - // This should be disallowed by CheckPath, but diagnose anyway. - // The correctness of the escaping loop below depends on it. - return "", fmt.Errorf("internal error: inconsistency in EscapePath") - } - if 'A' <= r && r <= 'Z' { - haveUpper = true - } - } - - if !haveUpper { - return s, nil - } - - var buf []byte - for _, r := range s { - if 'A' <= r && r <= 'Z' { - buf = append(buf, '!', byte(r+'a'-'A')) - } else { - buf = append(buf, byte(r)) - } - } - return string(buf), nil -} - -// UnescapePath returns the module path for the given escaped path. -// It fails if the escaped path is invalid or describes an invalid path. -func UnescapePath(escaped string) (path string, err error) { - path, ok := unescapeString(escaped) - if !ok { - return "", fmt.Errorf("invalid escaped module path %q", escaped) - } - if err := CheckPath(path); err != nil { - return "", fmt.Errorf("invalid escaped module path %q: %v", escaped, err) - } - return path, nil -} - -// UnescapeVersion returns the version string for the given escaped version. -// It fails if the escaped form is invalid or describes an invalid version. -// Versions are allowed to be in non-semver form but must be valid file names -// and not contain exclamation marks. -func UnescapeVersion(escaped string) (v string, err error) { - v, ok := unescapeString(escaped) - if !ok { - return "", fmt.Errorf("invalid escaped version %q", escaped) - } - if err := checkElem(v, filePath); err != nil { - return "", fmt.Errorf("invalid escaped version %q: %v", v, err) - } - return v, nil -} - -func unescapeString(escaped string) (string, bool) { - var buf []byte - - bang := false - for _, r := range escaped { - if r >= utf8.RuneSelf { - return "", false - } - if bang { - bang = false - if r < 'a' || 'z' < r { - return "", false - } - buf = append(buf, byte(r+'A'-'a')) - continue - } - if r == '!' { - bang = true - continue - } - if 'A' <= r && r <= 'Z' { - return "", false - } - buf = append(buf, byte(r)) - } - if bang { - return "", false - } - return string(buf), true -} - -// MatchPrefixPatterns reports whether any path prefix of target matches one of -// the glob patterns (as defined by [path.Match]) in the comma-separated globs -// list. This implements the algorithm used when matching a module path to the -// GOPRIVATE environment variable, as described by 'go help module-private'. -// -// It ignores any empty or malformed patterns in the list. -// Trailing slashes on patterns are ignored. -func MatchPrefixPatterns(globs, target string) bool { - for globs != "" { - // Extract next non-empty glob in comma-separated list. - var glob string - if i := strings.Index(globs, ","); i >= 0 { - glob, globs = globs[:i], globs[i+1:] - } else { - glob, globs = globs, "" - } - glob = strings.TrimSuffix(glob, "/") - if glob == "" { - continue - } - - // A glob with N+1 path elements (N slashes) needs to be matched - // against the first N+1 path elements of target, - // which end just before the N+1'th slash. - n := strings.Count(glob, "/") - prefix := target - // Walk target, counting slashes, truncating at the N+1'th slash. - for i := 0; i < len(target); i++ { - if target[i] == '/' { - if n == 0 { - prefix = target[:i] - break - } - n-- - } - } - if n > 0 { - // Not enough prefix elements. - continue - } - matched, _ := path.Match(glob, prefix) - if matched { - return true - } - } - return false -} diff --git a/go/extractor/vendor/golang.org/x/mod/module/pseudo.go b/go/extractor/vendor/golang.org/x/mod/module/pseudo.go deleted file mode 100644 index 9cf19d3254eb..000000000000 --- a/go/extractor/vendor/golang.org/x/mod/module/pseudo.go +++ /dev/null @@ -1,250 +0,0 @@ -// Copyright 2018 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Pseudo-versions -// -// Code authors are expected to tag the revisions they want users to use, -// including prereleases. However, not all authors tag versions at all, -// and not all commits a user might want to try will have tags. -// A pseudo-version is a version with a special form that allows us to -// address an untagged commit and order that version with respect to -// other versions we might encounter. -// -// A pseudo-version takes one of the general forms: -// -// (1) vX.0.0-yyyymmddhhmmss-abcdef123456 -// (2) vX.Y.(Z+1)-0.yyyymmddhhmmss-abcdef123456 -// (3) vX.Y.(Z+1)-0.yyyymmddhhmmss-abcdef123456+incompatible -// (4) vX.Y.Z-pre.0.yyyymmddhhmmss-abcdef123456 -// (5) vX.Y.Z-pre.0.yyyymmddhhmmss-abcdef123456+incompatible -// -// If there is no recently tagged version with the right major version vX, -// then form (1) is used, creating a space of pseudo-versions at the bottom -// of the vX version range, less than any tagged version, including the unlikely v0.0.0. -// -// If the most recent tagged version before the target commit is vX.Y.Z or vX.Y.Z+incompatible, -// then the pseudo-version uses form (2) or (3), making it a prerelease for the next -// possible semantic version after vX.Y.Z. The leading 0 segment in the prerelease string -// ensures that the pseudo-version compares less than possible future explicit prereleases -// like vX.Y.(Z+1)-rc1 or vX.Y.(Z+1)-1. -// -// If the most recent tagged version before the target commit is vX.Y.Z-pre or vX.Y.Z-pre+incompatible, -// then the pseudo-version uses form (4) or (5), making it a slightly later prerelease. - -package module - -import ( - "errors" - "fmt" - "strings" - "time" - - "golang.org/x/mod/internal/lazyregexp" - "golang.org/x/mod/semver" -) - -var pseudoVersionRE = lazyregexp.New(`^v[0-9]+\.(0\.0-|\d+\.\d+-([^+]*\.)?0\.)\d{14}-[A-Za-z0-9]+(\+[0-9A-Za-z-]+(\.[0-9A-Za-z-]+)*)?$`) - -const PseudoVersionTimestampFormat = "20060102150405" - -// PseudoVersion returns a pseudo-version for the given major version ("v1") -// preexisting older tagged version ("" or "v1.2.3" or "v1.2.3-pre"), revision time, -// and revision identifier (usually a 12-byte commit hash prefix). -func PseudoVersion(major, older string, t time.Time, rev string) string { - if major == "" { - major = "v0" - } - segment := fmt.Sprintf("%s-%s", t.UTC().Format(PseudoVersionTimestampFormat), rev) - build := semver.Build(older) - older = semver.Canonical(older) - if older == "" { - return major + ".0.0-" + segment // form (1) - } - if semver.Prerelease(older) != "" { - return older + ".0." + segment + build // form (4), (5) - } - - // Form (2), (3). - // Extract patch from vMAJOR.MINOR.PATCH - i := strings.LastIndex(older, ".") + 1 - v, patch := older[:i], older[i:] - - // Reassemble. - return v + incDecimal(patch) + "-0." + segment + build -} - -// ZeroPseudoVersion returns a pseudo-version with a zero timestamp and -// revision, which may be used as a placeholder. -func ZeroPseudoVersion(major string) string { - return PseudoVersion(major, "", time.Time{}, "000000000000") -} - -// incDecimal returns the decimal string incremented by 1. -func incDecimal(decimal string) string { - // Scan right to left turning 9s to 0s until you find a digit to increment. - digits := []byte(decimal) - i := len(digits) - 1 - for ; i >= 0 && digits[i] == '9'; i-- { - digits[i] = '0' - } - if i >= 0 { - digits[i]++ - } else { - // digits is all zeros - digits[0] = '1' - digits = append(digits, '0') - } - return string(digits) -} - -// decDecimal returns the decimal string decremented by 1, or the empty string -// if the decimal is all zeroes. -func decDecimal(decimal string) string { - // Scan right to left turning 0s to 9s until you find a digit to decrement. - digits := []byte(decimal) - i := len(digits) - 1 - for ; i >= 0 && digits[i] == '0'; i-- { - digits[i] = '9' - } - if i < 0 { - // decimal is all zeros - return "" - } - if i == 0 && digits[i] == '1' && len(digits) > 1 { - digits = digits[1:] - } else { - digits[i]-- - } - return string(digits) -} - -// IsPseudoVersion reports whether v is a pseudo-version. -func IsPseudoVersion(v string) bool { - return strings.Count(v, "-") >= 2 && semver.IsValid(v) && pseudoVersionRE.MatchString(v) -} - -// IsZeroPseudoVersion returns whether v is a pseudo-version with a zero base, -// timestamp, and revision, as returned by [ZeroPseudoVersion]. -func IsZeroPseudoVersion(v string) bool { - return v == ZeroPseudoVersion(semver.Major(v)) -} - -// PseudoVersionTime returns the time stamp of the pseudo-version v. -// It returns an error if v is not a pseudo-version or if the time stamp -// embedded in the pseudo-version is not a valid time. -func PseudoVersionTime(v string) (time.Time, error) { - _, timestamp, _, _, err := parsePseudoVersion(v) - if err != nil { - return time.Time{}, err - } - t, err := time.Parse("20060102150405", timestamp) - if err != nil { - return time.Time{}, &InvalidVersionError{ - Version: v, - Pseudo: true, - Err: fmt.Errorf("malformed time %q", timestamp), - } - } - return t, nil -} - -// PseudoVersionRev returns the revision identifier of the pseudo-version v. -// It returns an error if v is not a pseudo-version. -func PseudoVersionRev(v string) (rev string, err error) { - _, _, rev, _, err = parsePseudoVersion(v) - return -} - -// PseudoVersionBase returns the canonical parent version, if any, upon which -// the pseudo-version v is based. -// -// If v has no parent version (that is, if it is "vX.0.0-[…]"), -// PseudoVersionBase returns the empty string and a nil error. -func PseudoVersionBase(v string) (string, error) { - base, _, _, build, err := parsePseudoVersion(v) - if err != nil { - return "", err - } - - switch pre := semver.Prerelease(base); pre { - case "": - // vX.0.0-yyyymmddhhmmss-abcdef123456 β†’ "" - if build != "" { - // Pseudo-versions of the form vX.0.0-yyyymmddhhmmss-abcdef123456+incompatible - // are nonsensical: the "vX.0.0-" prefix implies that there is no parent tag, - // but the "+incompatible" suffix implies that the major version of - // the parent tag is not compatible with the module's import path. - // - // There are a few such entries in the index generated by proxy.golang.org, - // but we believe those entries were generated by the proxy itself. - return "", &InvalidVersionError{ - Version: v, - Pseudo: true, - Err: fmt.Errorf("lacks base version, but has build metadata %q", build), - } - } - return "", nil - - case "-0": - // vX.Y.(Z+1)-0.yyyymmddhhmmss-abcdef123456 β†’ vX.Y.Z - // vX.Y.(Z+1)-0.yyyymmddhhmmss-abcdef123456+incompatible β†’ vX.Y.Z+incompatible - base = strings.TrimSuffix(base, pre) - i := strings.LastIndexByte(base, '.') - if i < 0 { - panic("base from parsePseudoVersion missing patch number: " + base) - } - patch := decDecimal(base[i+1:]) - if patch == "" { - // vX.0.0-0 is invalid, but has been observed in the wild in the index - // generated by requests to proxy.golang.org. - // - // NOTE(bcmills): I cannot find a historical bug that accounts for - // pseudo-versions of this form, nor have I seen such versions in any - // actual go.mod files. If we find actual examples of this form and a - // reasonable theory of how they came into existence, it seems fine to - // treat them as equivalent to vX.0.0 (especially since the invalid - // pseudo-versions have lower precedence than the real ones). For now, we - // reject them. - return "", &InvalidVersionError{ - Version: v, - Pseudo: true, - Err: fmt.Errorf("version before %s would have negative patch number", base), - } - } - return base[:i+1] + patch + build, nil - - default: - // vX.Y.Z-pre.0.yyyymmddhhmmss-abcdef123456 β†’ vX.Y.Z-pre - // vX.Y.Z-pre.0.yyyymmddhhmmss-abcdef123456+incompatible β†’ vX.Y.Z-pre+incompatible - if !strings.HasSuffix(base, ".0") { - panic(`base from parsePseudoVersion missing ".0" before date: ` + base) - } - return strings.TrimSuffix(base, ".0") + build, nil - } -} - -var errPseudoSyntax = errors.New("syntax error") - -func parsePseudoVersion(v string) (base, timestamp, rev, build string, err error) { - if !IsPseudoVersion(v) { - return "", "", "", "", &InvalidVersionError{ - Version: v, - Pseudo: true, - Err: errPseudoSyntax, - } - } - build = semver.Build(v) - v = strings.TrimSuffix(v, build) - j := strings.LastIndex(v, "-") - v, rev = v[:j], v[j+1:] - i := strings.LastIndex(v, "-") - if j := strings.LastIndex(v, "."); j > i { - base = v[:j] // "vX.Y.Z-pre.0" or "vX.Y.(Z+1)-0" - timestamp = v[j+1:] - } else { - base = v[:i] // "vX.0.0" - timestamp = v[i+1:] - } - return base, timestamp, rev, build, nil -} diff --git a/go/extractor/vendor/golang.org/x/mod/semver/BUILD.bazel b/go/extractor/vendor/golang.org/x/mod/semver/BUILD.bazel deleted file mode 100644 index 760be56c9e08..000000000000 --- a/go/extractor/vendor/golang.org/x/mod/semver/BUILD.bazel +++ /dev/null @@ -1,11 +0,0 @@ -# generated running `bazel run //go/gazelle`, do not edit - -load("@rules_go//go:def.bzl", "go_library") - -go_library( - name = "semver", - srcs = ["semver.go"], - importmap = "github.com/github/codeql-go/extractor/vendor/golang.org/x/mod/semver", - importpath = "golang.org/x/mod/semver", - visibility = ["//visibility:public"], -) diff --git a/go/extractor/vendor/golang.org/x/mod/semver/semver.go b/go/extractor/vendor/golang.org/x/mod/semver/semver.go deleted file mode 100644 index 9a2dfd33a770..000000000000 --- a/go/extractor/vendor/golang.org/x/mod/semver/semver.go +++ /dev/null @@ -1,401 +0,0 @@ -// Copyright 2018 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Package semver implements comparison of semantic version strings. -// In this package, semantic version strings must begin with a leading "v", -// as in "v1.0.0". -// -// The general form of a semantic version string accepted by this package is -// -// vMAJOR[.MINOR[.PATCH[-PRERELEASE][+BUILD]]] -// -// where square brackets indicate optional parts of the syntax; -// MAJOR, MINOR, and PATCH are decimal integers without extra leading zeros; -// PRERELEASE and BUILD are each a series of non-empty dot-separated identifiers -// using only alphanumeric characters and hyphens; and -// all-numeric PRERELEASE identifiers must not have leading zeros. -// -// This package follows Semantic Versioning 2.0.0 (see semver.org) -// with two exceptions. First, it requires the "v" prefix. Second, it recognizes -// vMAJOR and vMAJOR.MINOR (with no prerelease or build suffixes) -// as shorthands for vMAJOR.0.0 and vMAJOR.MINOR.0. -package semver - -import "sort" - -// parsed returns the parsed form of a semantic version string. -type parsed struct { - major string - minor string - patch string - short string - prerelease string - build string -} - -// IsValid reports whether v is a valid semantic version string. -func IsValid(v string) bool { - _, ok := parse(v) - return ok -} - -// Canonical returns the canonical formatting of the semantic version v. -// It fills in any missing .MINOR or .PATCH and discards build metadata. -// Two semantic versions compare equal only if their canonical formattings -// are identical strings. -// The canonical invalid semantic version is the empty string. -func Canonical(v string) string { - p, ok := parse(v) - if !ok { - return "" - } - if p.build != "" { - return v[:len(v)-len(p.build)] - } - if p.short != "" { - return v + p.short - } - return v -} - -// Major returns the major version prefix of the semantic version v. -// For example, Major("v2.1.0") == "v2". -// If v is an invalid semantic version string, Major returns the empty string. -func Major(v string) string { - pv, ok := parse(v) - if !ok { - return "" - } - return v[:1+len(pv.major)] -} - -// MajorMinor returns the major.minor version prefix of the semantic version v. -// For example, MajorMinor("v2.1.0") == "v2.1". -// If v is an invalid semantic version string, MajorMinor returns the empty string. -func MajorMinor(v string) string { - pv, ok := parse(v) - if !ok { - return "" - } - i := 1 + len(pv.major) - if j := i + 1 + len(pv.minor); j <= len(v) && v[i] == '.' && v[i+1:j] == pv.minor { - return v[:j] - } - return v[:i] + "." + pv.minor -} - -// Prerelease returns the prerelease suffix of the semantic version v. -// For example, Prerelease("v2.1.0-pre+meta") == "-pre". -// If v is an invalid semantic version string, Prerelease returns the empty string. -func Prerelease(v string) string { - pv, ok := parse(v) - if !ok { - return "" - } - return pv.prerelease -} - -// Build returns the build suffix of the semantic version v. -// For example, Build("v2.1.0+meta") == "+meta". -// If v is an invalid semantic version string, Build returns the empty string. -func Build(v string) string { - pv, ok := parse(v) - if !ok { - return "" - } - return pv.build -} - -// Compare returns an integer comparing two versions according to -// semantic version precedence. -// The result will be 0 if v == w, -1 if v < w, or +1 if v > w. -// -// An invalid semantic version string is considered less than a valid one. -// All invalid semantic version strings compare equal to each other. -func Compare(v, w string) int { - pv, ok1 := parse(v) - pw, ok2 := parse(w) - if !ok1 && !ok2 { - return 0 - } - if !ok1 { - return -1 - } - if !ok2 { - return +1 - } - if c := compareInt(pv.major, pw.major); c != 0 { - return c - } - if c := compareInt(pv.minor, pw.minor); c != 0 { - return c - } - if c := compareInt(pv.patch, pw.patch); c != 0 { - return c - } - return comparePrerelease(pv.prerelease, pw.prerelease) -} - -// Max canonicalizes its arguments and then returns the version string -// that compares greater. -// -// Deprecated: use [Compare] instead. In most cases, returning a canonicalized -// version is not expected or desired. -func Max(v, w string) string { - v = Canonical(v) - w = Canonical(w) - if Compare(v, w) > 0 { - return v - } - return w -} - -// ByVersion implements [sort.Interface] for sorting semantic version strings. -type ByVersion []string - -func (vs ByVersion) Len() int { return len(vs) } -func (vs ByVersion) Swap(i, j int) { vs[i], vs[j] = vs[j], vs[i] } -func (vs ByVersion) Less(i, j int) bool { - cmp := Compare(vs[i], vs[j]) - if cmp != 0 { - return cmp < 0 - } - return vs[i] < vs[j] -} - -// Sort sorts a list of semantic version strings using [ByVersion]. -func Sort(list []string) { - sort.Sort(ByVersion(list)) -} - -func parse(v string) (p parsed, ok bool) { - if v == "" || v[0] != 'v' { - return - } - p.major, v, ok = parseInt(v[1:]) - if !ok { - return - } - if v == "" { - p.minor = "0" - p.patch = "0" - p.short = ".0.0" - return - } - if v[0] != '.' { - ok = false - return - } - p.minor, v, ok = parseInt(v[1:]) - if !ok { - return - } - if v == "" { - p.patch = "0" - p.short = ".0" - return - } - if v[0] != '.' { - ok = false - return - } - p.patch, v, ok = parseInt(v[1:]) - if !ok { - return - } - if len(v) > 0 && v[0] == '-' { - p.prerelease, v, ok = parsePrerelease(v) - if !ok { - return - } - } - if len(v) > 0 && v[0] == '+' { - p.build, v, ok = parseBuild(v) - if !ok { - return - } - } - if v != "" { - ok = false - return - } - ok = true - return -} - -func parseInt(v string) (t, rest string, ok bool) { - if v == "" { - return - } - if v[0] < '0' || '9' < v[0] { - return - } - i := 1 - for i < len(v) && '0' <= v[i] && v[i] <= '9' { - i++ - } - if v[0] == '0' && i != 1 { - return - } - return v[:i], v[i:], true -} - -func parsePrerelease(v string) (t, rest string, ok bool) { - // "A pre-release version MAY be denoted by appending a hyphen and - // a series of dot separated identifiers immediately following the patch version. - // Identifiers MUST comprise only ASCII alphanumerics and hyphen [0-9A-Za-z-]. - // Identifiers MUST NOT be empty. Numeric identifiers MUST NOT include leading zeroes." - if v == "" || v[0] != '-' { - return - } - i := 1 - start := 1 - for i < len(v) && v[i] != '+' { - if !isIdentChar(v[i]) && v[i] != '.' { - return - } - if v[i] == '.' { - if start == i || isBadNum(v[start:i]) { - return - } - start = i + 1 - } - i++ - } - if start == i || isBadNum(v[start:i]) { - return - } - return v[:i], v[i:], true -} - -func parseBuild(v string) (t, rest string, ok bool) { - if v == "" || v[0] != '+' { - return - } - i := 1 - start := 1 - for i < len(v) { - if !isIdentChar(v[i]) && v[i] != '.' { - return - } - if v[i] == '.' { - if start == i { - return - } - start = i + 1 - } - i++ - } - if start == i { - return - } - return v[:i], v[i:], true -} - -func isIdentChar(c byte) bool { - return 'A' <= c && c <= 'Z' || 'a' <= c && c <= 'z' || '0' <= c && c <= '9' || c == '-' -} - -func isBadNum(v string) bool { - i := 0 - for i < len(v) && '0' <= v[i] && v[i] <= '9' { - i++ - } - return i == len(v) && i > 1 && v[0] == '0' -} - -func isNum(v string) bool { - i := 0 - for i < len(v) && '0' <= v[i] && v[i] <= '9' { - i++ - } - return i == len(v) -} - -func compareInt(x, y string) int { - if x == y { - return 0 - } - if len(x) < len(y) { - return -1 - } - if len(x) > len(y) { - return +1 - } - if x < y { - return -1 - } else { - return +1 - } -} - -func comparePrerelease(x, y string) int { - // "When major, minor, and patch are equal, a pre-release version has - // lower precedence than a normal version. - // Example: 1.0.0-alpha < 1.0.0. - // Precedence for two pre-release versions with the same major, minor, - // and patch version MUST be determined by comparing each dot separated - // identifier from left to right until a difference is found as follows: - // identifiers consisting of only digits are compared numerically and - // identifiers with letters or hyphens are compared lexically in ASCII - // sort order. Numeric identifiers always have lower precedence than - // non-numeric identifiers. A larger set of pre-release fields has a - // higher precedence than a smaller set, if all of the preceding - // identifiers are equal. - // Example: 1.0.0-alpha < 1.0.0-alpha.1 < 1.0.0-alpha.beta < - // 1.0.0-beta < 1.0.0-beta.2 < 1.0.0-beta.11 < 1.0.0-rc.1 < 1.0.0." - if x == y { - return 0 - } - if x == "" { - return +1 - } - if y == "" { - return -1 - } - for x != "" && y != "" { - x = x[1:] // skip - or . - y = y[1:] // skip - or . - var dx, dy string - dx, x = nextIdent(x) - dy, y = nextIdent(y) - if dx != dy { - ix := isNum(dx) - iy := isNum(dy) - if ix != iy { - if ix { - return -1 - } else { - return +1 - } - } - if ix { - if len(dx) < len(dy) { - return -1 - } - if len(dx) > len(dy) { - return +1 - } - } - if dx < dy { - return -1 - } else { - return +1 - } - } - } - if x == "" { - return -1 - } else { - return +1 - } -} - -func nextIdent(x string) (dx, rest string) { - i := 0 - for i < len(x) && x[i] != '.' { - i++ - } - return x[:i], x[i:] -} diff --git a/go/extractor/vendor/golang.org/x/tools/LICENSE b/go/extractor/vendor/golang.org/x/tools/LICENSE deleted file mode 100644 index 6a66aea5eafe..000000000000 --- a/go/extractor/vendor/golang.org/x/tools/LICENSE +++ /dev/null @@ -1,27 +0,0 @@ -Copyright (c) 2009 The Go Authors. All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are -met: - - * Redistributions of source code must retain the above copyright -notice, this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above -copyright notice, this list of conditions and the following disclaimer -in the documentation and/or other materials provided with the -distribution. - * Neither the name of Google Inc. nor the names of its -contributors may be used to endorse or promote products derived from -this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/go/extractor/vendor/golang.org/x/tools/PATENTS b/go/extractor/vendor/golang.org/x/tools/PATENTS deleted file mode 100644 index 733099041f84..000000000000 --- a/go/extractor/vendor/golang.org/x/tools/PATENTS +++ /dev/null @@ -1,22 +0,0 @@ -Additional IP Rights Grant (Patents) - -"This implementation" means the copyrightable works distributed by -Google as part of the Go project. - -Google hereby grants to You a perpetual, worldwide, non-exclusive, -no-charge, royalty-free, irrevocable (except as stated in this section) -patent license to make, have made, use, offer to sell, sell, import, -transfer and otherwise run, modify and propagate the contents of this -implementation of Go, where such license applies only to those patent -claims, both currently owned or controlled by Google and acquired in -the future, licensable by Google that are necessarily infringed by this -implementation of Go. This grant does not include claims that would be -infringed only as a consequence of further modification of this -implementation. If you or your agent or exclusive licensee institute or -order or agree to the institution of patent litigation against any -entity (including a cross-claim or counterclaim in a lawsuit) alleging -that this implementation of Go or any code incorporated within this -implementation of Go constitutes direct or contributory patent -infringement, or inducement of patent infringement, then any patent -rights granted to you under this License for this implementation of Go -shall terminate as of the date such litigation is filed. diff --git a/go/extractor/vendor/golang.org/x/tools/go/gcexportdata/BUILD.bazel b/go/extractor/vendor/golang.org/x/tools/go/gcexportdata/BUILD.bazel deleted file mode 100644 index 5d68c2fe989e..000000000000 --- a/go/extractor/vendor/golang.org/x/tools/go/gcexportdata/BUILD.bazel +++ /dev/null @@ -1,15 +0,0 @@ -# generated running `bazel run //go/gazelle`, do not edit - -load("@rules_go//go:def.bzl", "go_library") - -go_library( - name = "gcexportdata", - srcs = [ - "gcexportdata.go", - "importer.go", - ], - importmap = "github.com/github/codeql-go/extractor/vendor/golang.org/x/tools/go/gcexportdata", - importpath = "golang.org/x/tools/go/gcexportdata", - visibility = ["//visibility:public"], - deps = ["//go/extractor/vendor/golang.org/x/tools/internal/gcimporter"], -) diff --git a/go/extractor/vendor/golang.org/x/tools/go/gcexportdata/gcexportdata.go b/go/extractor/vendor/golang.org/x/tools/go/gcexportdata/gcexportdata.go deleted file mode 100644 index 03543bd4bb8f..000000000000 --- a/go/extractor/vendor/golang.org/x/tools/go/gcexportdata/gcexportdata.go +++ /dev/null @@ -1,186 +0,0 @@ -// Copyright 2016 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Package gcexportdata provides functions for locating, reading, and -// writing export data files containing type information produced by the -// gc compiler. This package supports go1.7 export data format and all -// later versions. -// -// Although it might seem convenient for this package to live alongside -// go/types in the standard library, this would cause version skew -// problems for developer tools that use it, since they must be able to -// consume the outputs of the gc compiler both before and after a Go -// update such as from Go 1.7 to Go 1.8. Because this package lives in -// golang.org/x/tools, sites can update their version of this repo some -// time before the Go 1.8 release and rebuild and redeploy their -// developer tools, which will then be able to consume both Go 1.7 and -// Go 1.8 export data files, so they will work before and after the -// Go update. (See discussion at https://golang.org/issue/15651.) -package gcexportdata // import "golang.org/x/tools/go/gcexportdata" - -import ( - "bufio" - "bytes" - "encoding/json" - "fmt" - "go/token" - "go/types" - "io" - "os/exec" - - "golang.org/x/tools/internal/gcimporter" -) - -// Find returns the name of an object (.o) or archive (.a) file -// containing type information for the specified import path, -// using the go command. -// If no file was found, an empty filename is returned. -// -// A relative srcDir is interpreted relative to the current working directory. -// -// Find also returns the package's resolved (canonical) import path, -// reflecting the effects of srcDir and vendoring on importPath. -// -// Deprecated: Use the higher-level API in golang.org/x/tools/go/packages, -// which is more efficient. -func Find(importPath, srcDir string) (filename, path string) { - cmd := exec.Command("go", "list", "-json", "-export", "--", importPath) - cmd.Dir = srcDir - out, err := cmd.CombinedOutput() - if err != nil { - return "", "" - } - var data struct { - ImportPath string - Export string - } - json.Unmarshal(out, &data) - return data.Export, data.ImportPath -} - -// NewReader returns a reader for the export data section of an object -// (.o) or archive (.a) file read from r. The new reader may provide -// additional trailing data beyond the end of the export data. -func NewReader(r io.Reader) (io.Reader, error) { - buf := bufio.NewReader(r) - _, size, err := gcimporter.FindExportData(buf) - if err != nil { - return nil, err - } - - if size >= 0 { - // We were given an archive and found the __.PKGDEF in it. - // This tells us the size of the export data, and we don't - // need to return the entire file. - return &io.LimitedReader{ - R: buf, - N: size, - }, nil - } else { - // We were given an object file. As such, we don't know how large - // the export data is and must return the entire file. - return buf, nil - } -} - -// readAll works the same way as io.ReadAll, but avoids allocations and copies -// by preallocating a byte slice of the necessary size if the size is known up -// front. This is always possible when the input is an archive. In that case, -// NewReader will return the known size using an io.LimitedReader. -func readAll(r io.Reader) ([]byte, error) { - if lr, ok := r.(*io.LimitedReader); ok { - data := make([]byte, lr.N) - _, err := io.ReadFull(lr, data) - return data, err - } - return io.ReadAll(r) -} - -// Read reads export data from in, decodes it, and returns type -// information for the package. -// -// The package path (effectively its linker symbol prefix) is -// specified by path, since unlike the package name, this information -// may not be recorded in the export data. -// -// File position information is added to fset. -// -// Read may inspect and add to the imports map to ensure that references -// within the export data to other packages are consistent. The caller -// must ensure that imports[path] does not exist, or exists but is -// incomplete (see types.Package.Complete), and Read inserts the -// resulting package into this map entry. -// -// On return, the state of the reader is undefined. -func Read(in io.Reader, fset *token.FileSet, imports map[string]*types.Package, path string) (*types.Package, error) { - data, err := readAll(in) - if err != nil { - return nil, fmt.Errorf("reading export data for %q: %v", path, err) - } - - if bytes.HasPrefix(data, []byte("!")) { - return nil, fmt.Errorf("can't read export data for %q directly from an archive file (call gcexportdata.NewReader first to extract export data)", path) - } - - // The indexed export format starts with an 'i'; the older - // binary export format starts with a 'c', 'd', or 'v' - // (from "version"). Select appropriate importer. - if len(data) > 0 { - switch data[0] { - case 'v', 'c', 'd': // binary, till go1.10 - return nil, fmt.Errorf("binary (%c) import format is no longer supported", data[0]) - - case 'i': // indexed, till go1.19 - _, pkg, err := gcimporter.IImportData(fset, imports, data[1:], path) - return pkg, err - - case 'u': // unified, from go1.20 - _, pkg, err := gcimporter.UImportData(fset, imports, data[1:], path) - return pkg, err - - default: - l := len(data) - if l > 10 { - l = 10 - } - return nil, fmt.Errorf("unexpected export data with prefix %q for path %s", string(data[:l]), path) - } - } - return nil, fmt.Errorf("empty export data for %s", path) -} - -// Write writes encoded type information for the specified package to out. -// The FileSet provides file position information for named objects. -func Write(out io.Writer, fset *token.FileSet, pkg *types.Package) error { - if _, err := io.WriteString(out, "i"); err != nil { - return err - } - return gcimporter.IExportData(out, fset, pkg) -} - -// ReadBundle reads an export bundle from in, decodes it, and returns type -// information for the packages. -// File position information is added to fset. -// -// ReadBundle may inspect and add to the imports map to ensure that references -// within the export bundle to other packages are consistent. -// -// On return, the state of the reader is undefined. -// -// Experimental: This API is experimental and may change in the future. -func ReadBundle(in io.Reader, fset *token.FileSet, imports map[string]*types.Package) ([]*types.Package, error) { - data, err := readAll(in) - if err != nil { - return nil, fmt.Errorf("reading export bundle: %v", err) - } - return gcimporter.IImportBundle(fset, imports, data) -} - -// WriteBundle writes encoded type information for the specified packages to out. -// The FileSet provides file position information for named objects. -// -// Experimental: This API is experimental and may change in the future. -func WriteBundle(out io.Writer, fset *token.FileSet, pkgs []*types.Package) error { - return gcimporter.IExportBundle(out, fset, pkgs) -} diff --git a/go/extractor/vendor/golang.org/x/tools/go/gcexportdata/importer.go b/go/extractor/vendor/golang.org/x/tools/go/gcexportdata/importer.go deleted file mode 100644 index 37a7247e2686..000000000000 --- a/go/extractor/vendor/golang.org/x/tools/go/gcexportdata/importer.go +++ /dev/null @@ -1,75 +0,0 @@ -// Copyright 2016 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package gcexportdata - -import ( - "fmt" - "go/token" - "go/types" - "os" -) - -// NewImporter returns a new instance of the types.Importer interface -// that reads type information from export data files written by gc. -// The Importer also satisfies types.ImporterFrom. -// -// Export data files are located using "go build" workspace conventions -// and the build.Default context. -// -// Use this importer instead of go/importer.For("gc", ...) to avoid the -// version-skew problems described in the documentation of this package, -// or to control the FileSet or access the imports map populated during -// package loading. -// -// Deprecated: Use the higher-level API in golang.org/x/tools/go/packages, -// which is more efficient. -func NewImporter(fset *token.FileSet, imports map[string]*types.Package) types.ImporterFrom { - return importer{fset, imports} -} - -type importer struct { - fset *token.FileSet - imports map[string]*types.Package -} - -func (imp importer) Import(importPath string) (*types.Package, error) { - return imp.ImportFrom(importPath, "", 0) -} - -func (imp importer) ImportFrom(importPath, srcDir string, mode types.ImportMode) (_ *types.Package, err error) { - filename, path := Find(importPath, srcDir) - if filename == "" { - if importPath == "unsafe" { - // Even for unsafe, call Find first in case - // the package was vendored. - return types.Unsafe, nil - } - return nil, fmt.Errorf("can't find import: %s", importPath) - } - - if pkg, ok := imp.imports[path]; ok && pkg.Complete() { - return pkg, nil // cache hit - } - - // open file - f, err := os.Open(filename) - if err != nil { - return nil, err - } - defer func() { - f.Close() - if err != nil { - // add file name to error - err = fmt.Errorf("reading export data: %s: %v", filename, err) - } - }() - - r, err := NewReader(f) - if err != nil { - return nil, err - } - - return Read(r, imp.fset, imp.imports, path) -} diff --git a/go/extractor/vendor/golang.org/x/tools/go/internal/packagesdriver/BUILD.bazel b/go/extractor/vendor/golang.org/x/tools/go/internal/packagesdriver/BUILD.bazel deleted file mode 100644 index 2ef27e2c88af..000000000000 --- a/go/extractor/vendor/golang.org/x/tools/go/internal/packagesdriver/BUILD.bazel +++ /dev/null @@ -1,12 +0,0 @@ -# generated running `bazel run //go/gazelle`, do not edit - -load("@rules_go//go:def.bzl", "go_library") - -go_library( - name = "packagesdriver", - srcs = ["sizes.go"], - importmap = "github.com/github/codeql-go/extractor/vendor/golang.org/x/tools/go/internal/packagesdriver", - importpath = "golang.org/x/tools/go/internal/packagesdriver", - visibility = ["//go/extractor/vendor/golang.org/x/tools/go:__subpackages__"], - deps = ["//go/extractor/vendor/golang.org/x/tools/internal/gocommand"], -) diff --git a/go/extractor/vendor/golang.org/x/tools/go/internal/packagesdriver/sizes.go b/go/extractor/vendor/golang.org/x/tools/go/internal/packagesdriver/sizes.go deleted file mode 100644 index 333676b7cfcc..000000000000 --- a/go/extractor/vendor/golang.org/x/tools/go/internal/packagesdriver/sizes.go +++ /dev/null @@ -1,53 +0,0 @@ -// Copyright 2018 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Package packagesdriver fetches type sizes for go/packages and go/analysis. -package packagesdriver - -import ( - "context" - "fmt" - "strings" - - "golang.org/x/tools/internal/gocommand" -) - -func GetSizesForArgsGolist(ctx context.Context, inv gocommand.Invocation, gocmdRunner *gocommand.Runner) (string, string, error) { - inv.Verb = "list" - inv.Args = []string{"-f", "{{context.GOARCH}} {{context.Compiler}}", "--", "unsafe"} - stdout, stderr, friendlyErr, rawErr := gocmdRunner.RunRaw(ctx, inv) - var goarch, compiler string - if rawErr != nil { - rawErrMsg := rawErr.Error() - if strings.Contains(rawErrMsg, "cannot find main module") || - strings.Contains(rawErrMsg, "go.mod file not found") { - // User's running outside of a module. - // All bets are off. Get GOARCH and guess compiler is gc. - // TODO(matloob): Is this a problem in practice? - inv.Verb = "env" - inv.Args = []string{"GOARCH"} - envout, enverr := gocmdRunner.Run(ctx, inv) - if enverr != nil { - return "", "", enverr - } - goarch = strings.TrimSpace(envout.String()) - compiler = "gc" - } else if friendlyErr != nil { - return "", "", friendlyErr - } else { - // This should be unreachable, but be defensive - // in case RunRaw's error results are inconsistent. - return "", "", rawErr - } - } else { - fields := strings.Fields(stdout.String()) - if len(fields) < 2 { - return "", "", fmt.Errorf("could not parse GOARCH and Go compiler in format \" \":\nstdout: <<%s>>\nstderr: <<%s>>", - stdout.String(), stderr.String()) - } - goarch = fields[0] - compiler = fields[1] - } - return compiler, goarch, nil -} diff --git a/go/extractor/vendor/golang.org/x/tools/go/packages/BUILD.bazel b/go/extractor/vendor/golang.org/x/tools/go/packages/BUILD.bazel deleted file mode 100644 index 03d3e3b01585..000000000000 --- a/go/extractor/vendor/golang.org/x/tools/go/packages/BUILD.bazel +++ /dev/null @@ -1,27 +0,0 @@ -# generated running `bazel run //go/gazelle`, do not edit - -load("@rules_go//go:def.bzl", "go_library") - -go_library( - name = "packages", - srcs = [ - "doc.go", - "external.go", - "golist.go", - "golist_overlay.go", - "loadmode_string.go", - "packages.go", - "visit.go", - ], - importmap = "github.com/github/codeql-go/extractor/vendor/golang.org/x/tools/go/packages", - importpath = "golang.org/x/tools/go/packages", - visibility = ["//visibility:public"], - deps = [ - "//go/extractor/vendor/golang.org/x/tools/go/gcexportdata", - "//go/extractor/vendor/golang.org/x/tools/go/internal/packagesdriver", - "//go/extractor/vendor/golang.org/x/tools/internal/gocommand", - "//go/extractor/vendor/golang.org/x/tools/internal/packagesinternal", - "//go/extractor/vendor/golang.org/x/tools/internal/typesinternal", - "//go/extractor/vendor/golang.org/x/tools/internal/versions", - ], -) diff --git a/go/extractor/vendor/golang.org/x/tools/go/packages/doc.go b/go/extractor/vendor/golang.org/x/tools/go/packages/doc.go deleted file mode 100644 index a8d7b06ac090..000000000000 --- a/go/extractor/vendor/golang.org/x/tools/go/packages/doc.go +++ /dev/null @@ -1,250 +0,0 @@ -// Copyright 2018 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -/* -Package packages loads Go packages for inspection and analysis. - -The [Load] function takes as input a list of patterns and returns a -list of [Package] values describing individual packages matched by those -patterns. -A [Config] specifies configuration options, the most important of which is -the [LoadMode], which controls the amount of detail in the loaded packages. - -Load passes most patterns directly to the underlying build tool. -The default build tool is the go command. -Its supported patterns are described at -https://pkg.go.dev/cmd/go#hdr-Package_lists_and_patterns. -Other build systems may be supported by providing a "driver"; -see [The driver protocol]. - -All patterns with the prefix "query=", where query is a -non-empty string of letters from [a-z], are reserved and may be -interpreted as query operators. - -Two query operators are currently supported: "file" and "pattern". - -The query "file=path/to/file.go" matches the package or packages enclosing -the Go source file path/to/file.go. For example "file=~/go/src/fmt/print.go" -might return the packages "fmt" and "fmt [fmt.test]". - -The query "pattern=string" causes "string" to be passed directly to -the underlying build tool. In most cases this is unnecessary, -but an application can use Load("pattern=" + x) as an escaping mechanism -to ensure that x is not interpreted as a query operator if it contains '='. - -All other query operators are reserved for future use and currently -cause Load to report an error. - -The Package struct provides basic information about the package, including - - - ID, a unique identifier for the package in the returned set; - - GoFiles, the names of the package's Go source files; - - Imports, a map from source import strings to the Packages they name; - - Types, the type information for the package's exported symbols; - - Syntax, the parsed syntax trees for the package's source code; and - - TypesInfo, the result of a complete type-check of the package syntax trees. - -(See the documentation for type Package for the complete list of fields -and more detailed descriptions.) - -For example, - - Load(nil, "bytes", "unicode...") - -returns four Package structs describing the standard library packages -bytes, unicode, unicode/utf16, and unicode/utf8. Note that one pattern -can match multiple packages and that a package might be matched by -multiple patterns: in general it is not possible to determine which -packages correspond to which patterns. - -Note that the list returned by Load contains only the packages matched -by the patterns. Their dependencies can be found by walking the import -graph using the Imports fields. - -The Load function can be configured by passing a pointer to a Config as -the first argument. A nil Config is equivalent to the zero Config, which -causes Load to run in LoadFiles mode, collecting minimal information. -See the documentation for type Config for details. - -As noted earlier, the Config.Mode controls the amount of detail -reported about the loaded packages. See the documentation for type LoadMode -for details. - -Most tools should pass their command-line arguments (after any flags) -uninterpreted to [Load], so that it can interpret them -according to the conventions of the underlying build system. - -See the Example function for typical usage. - -# The driver protocol - -[Load] may be used to load Go packages even in Go projects that use -alternative build systems, by installing an appropriate "driver" -program for the build system and specifying its location in the -GOPACKAGESDRIVER environment variable. -For example, -https://github.com/bazelbuild/rules_go/wiki/Editor-and-tool-integration -explains how to use the driver for Bazel. - -The driver program is responsible for interpreting patterns in its -preferred notation and reporting information about the packages that -those patterns identify. Drivers must also support the special "file=" -and "pattern=" patterns described above. - -The patterns are provided as positional command-line arguments. A -JSON-encoded [DriverRequest] message providing additional information -is written to the driver's standard input. The driver must write a -JSON-encoded [DriverResponse] message to its standard output. (This -message differs from the JSON schema produced by 'go list'.) -*/ -package packages // import "golang.org/x/tools/go/packages" - -/* - -Motivation and design considerations - -The new package's design solves problems addressed by two existing -packages: go/build, which locates and describes packages, and -golang.org/x/tools/go/loader, which loads, parses and type-checks them. -The go/build.Package structure encodes too much of the 'go build' way -of organizing projects, leaving us in need of a data type that describes a -package of Go source code independent of the underlying build system. -We wanted something that works equally well with go build and vgo, and -also other build systems such as Bazel and Blaze, making it possible to -construct analysis tools that work in all these environments. -Tools such as errcheck and staticcheck were essentially unavailable to -the Go community at Google, and some of Google's internal tools for Go -are unavailable externally. -This new package provides a uniform way to obtain package metadata by -querying each of these build systems, optionally supporting their -preferred command-line notations for packages, so that tools integrate -neatly with users' build environments. The Metadata query function -executes an external query tool appropriate to the current workspace. - -Loading packages always returns the complete import graph "all the way down", -even if all you want is information about a single package, because the query -mechanisms of all the build systems we currently support ({go,vgo} list, and -blaze/bazel aspect-based query) cannot provide detailed information -about one package without visiting all its dependencies too, so there is -no additional asymptotic cost to providing transitive information. -(This property might not be true of a hypothetical 5th build system.) - -In calls to TypeCheck, all initial packages, and any package that -transitively depends on one of them, must be loaded from source. -Consider A->B->C->D->E: if A,C are initial, A,B,C must be loaded from -source; D may be loaded from export data, and E may not be loaded at all -(though it's possible that D's export data mentions it, so a -types.Package may be created for it and exposed.) - -The old loader had a feature to suppress type-checking of function -bodies on a per-package basis, primarily intended to reduce the work of -obtaining type information for imported packages. Now that imports are -satisfied by export data, the optimization no longer seems necessary. - -Despite some early attempts, the old loader did not exploit export data, -instead always using the equivalent of WholeProgram mode. This was due -to the complexity of mixing source and export data packages (now -resolved by the upward traversal mentioned above), and because export data -files were nearly always missing or stale. Now that 'go build' supports -caching, all the underlying build systems can guarantee to produce -export data in a reasonable (amortized) time. - -Test "main" packages synthesized by the build system are now reported as -first-class packages, avoiding the need for clients (such as go/ssa) to -reinvent this generation logic. - -One way in which go/packages is simpler than the old loader is in its -treatment of in-package tests. In-package tests are packages that -consist of all the files of the library under test, plus the test files. -The old loader constructed in-package tests by a two-phase process of -mutation called "augmentation": first it would construct and type check -all the ordinary library packages and type-check the packages that -depend on them; then it would add more (test) files to the package and -type-check again. This two-phase approach had four major problems: -1) in processing the tests, the loader modified the library package, - leaving no way for a client application to see both the test - package and the library package; one would mutate into the other. -2) because test files can declare additional methods on types defined in - the library portion of the package, the dispatch of method calls in - the library portion was affected by the presence of the test files. - This should have been a clue that the packages were logically - different. -3) this model of "augmentation" assumed at most one in-package test - per library package, which is true of projects using 'go build', - but not other build systems. -4) because of the two-phase nature of test processing, all packages that - import the library package had to be processed before augmentation, - forcing a "one-shot" API and preventing the client from calling Load - in several times in sequence as is now possible in WholeProgram mode. - (TypeCheck mode has a similar one-shot restriction for a different reason.) - -Early drafts of this package supported "multi-shot" operation. -Although it allowed clients to make a sequence of calls (or concurrent -calls) to Load, building up the graph of Packages incrementally, -it was of marginal value: it complicated the API -(since it allowed some options to vary across calls but not others), -it complicated the implementation, -it cannot be made to work in Types mode, as explained above, -and it was less efficient than making one combined call (when this is possible). -Among the clients we have inspected, none made multiple calls to load -but could not be easily and satisfactorily modified to make only a single call. -However, applications changes may be required. -For example, the ssadump command loads the user-specified packages -and in addition the runtime package. It is tempting to simply append -"runtime" to the user-provided list, but that does not work if the user -specified an ad-hoc package such as [a.go b.go]. -Instead, ssadump no longer requests the runtime package, -but seeks it among the dependencies of the user-specified packages, -and emits an error if it is not found. - -Overlays: The Overlay field in the Config allows providing alternate contents -for Go source files, by providing a mapping from file path to contents. -go/packages will pull in new imports added in overlay files when go/packages -is run in LoadImports mode or greater. -Overlay support for the go list driver isn't complete yet: if the file doesn't -exist on disk, it will only be recognized in an overlay if it is a non-test file -and the package would be reported even without the overlay. - -Questions & Tasks - -- Add GOARCH/GOOS? - They are not portable concepts, but could be made portable. - Our goal has been to allow users to express themselves using the conventions - of the underlying build system: if the build system honors GOARCH - during a build and during a metadata query, then so should - applications built atop that query mechanism. - Conversely, if the target architecture of the build is determined by - command-line flags, the application can pass the relevant - flags through to the build system using a command such as: - myapp -query_flag="--cpu=amd64" -query_flag="--os=darwin" - However, this approach is low-level, unwieldy, and non-portable. - GOOS and GOARCH seem important enough to warrant a dedicated option. - -- How should we handle partial failures such as a mixture of good and - malformed patterns, existing and non-existent packages, successful and - failed builds, import failures, import cycles, and so on, in a call to - Load? - -- Support bazel, blaze, and go1.10 list, not just go1.11 list. - -- Handle (and test) various partial success cases, e.g. - a mixture of good packages and: - invalid patterns - nonexistent packages - empty packages - packages with malformed package or import declarations - unreadable files - import cycles - other parse errors - type errors - Make sure we record errors at the correct place in the graph. - -- Missing packages among initial arguments are not reported. - Return bogus packages for them, like golist does. - -- "undeclared name" errors (for example) are reported out of source file - order. I suspect this is due to the breadth-first resolution now used - by go/types. Is that a bug? Discuss with gri. - -*/ diff --git a/go/extractor/vendor/golang.org/x/tools/go/packages/external.go b/go/extractor/vendor/golang.org/x/tools/go/packages/external.go deleted file mode 100644 index 4335c1eb14c7..000000000000 --- a/go/extractor/vendor/golang.org/x/tools/go/packages/external.go +++ /dev/null @@ -1,140 +0,0 @@ -// Copyright 2018 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package packages - -// This file defines the protocol that enables an external "driver" -// tool to supply package metadata in place of 'go list'. - -import ( - "bytes" - "encoding/json" - "fmt" - "os" - "os/exec" - "strings" -) - -// DriverRequest defines the schema of a request for package metadata -// from an external driver program. The JSON-encoded DriverRequest -// message is provided to the driver program's standard input. The -// query patterns are provided as command-line arguments. -// -// See the package documentation for an overview. -type DriverRequest struct { - Mode LoadMode `json:"mode"` - - // Env specifies the environment the underlying build system should be run in. - Env []string `json:"env"` - - // BuildFlags are flags that should be passed to the underlying build system. - BuildFlags []string `json:"build_flags"` - - // Tests specifies whether the patterns should also return test packages. - Tests bool `json:"tests"` - - // Overlay maps file paths (relative to the driver's working directory) to the byte contents - // of overlay files. - Overlay map[string][]byte `json:"overlay"` -} - -// DriverResponse defines the schema of a response from an external -// driver program, providing the results of a query for package -// metadata. The driver program must write a JSON-encoded -// DriverResponse message to its standard output. -// -// See the package documentation for an overview. -type DriverResponse struct { - // NotHandled is returned if the request can't be handled by the current - // driver. If an external driver returns a response with NotHandled, the - // rest of the DriverResponse is ignored, and go/packages will fallback - // to the next driver. If go/packages is extended in the future to support - // lists of multiple drivers, go/packages will fall back to the next driver. - NotHandled bool - - // Compiler and Arch are the arguments pass of types.SizesFor - // to get a types.Sizes to use when type checking. - Compiler string - Arch string - - // Roots is the set of package IDs that make up the root packages. - // We have to encode this separately because when we encode a single package - // we cannot know if it is one of the roots as that requires knowledge of the - // graph it is part of. - Roots []string `json:",omitempty"` - - // Packages is the full set of packages in the graph. - // The packages are not connected into a graph. - // The Imports if populated will be stubs that only have their ID set. - // Imports will be connected and then type and syntax information added in a - // later pass (see refine). - Packages []*Package - - // GoVersion is the minor version number used by the driver - // (e.g. the go command on the PATH) when selecting .go files. - // Zero means unknown. - GoVersion int -} - -// driver is the type for functions that query the build system for the -// packages named by the patterns. -type driver func(cfg *Config, patterns ...string) (*DriverResponse, error) - -// findExternalDriver returns the file path of a tool that supplies -// the build system package structure, or "" if not found." -// If GOPACKAGESDRIVER is set in the environment findExternalTool returns its -// value, otherwise it searches for a binary named gopackagesdriver on the PATH. -func findExternalDriver(cfg *Config) driver { - const toolPrefix = "GOPACKAGESDRIVER=" - tool := "" - for _, env := range cfg.Env { - if val := strings.TrimPrefix(env, toolPrefix); val != env { - tool = val - } - } - if tool != "" && tool == "off" { - return nil - } - if tool == "" { - var err error - tool, err = exec.LookPath("gopackagesdriver") - if err != nil { - return nil - } - } - return func(cfg *Config, words ...string) (*DriverResponse, error) { - req, err := json.Marshal(DriverRequest{ - Mode: cfg.Mode, - Env: cfg.Env, - BuildFlags: cfg.BuildFlags, - Tests: cfg.Tests, - Overlay: cfg.Overlay, - }) - if err != nil { - return nil, fmt.Errorf("failed to encode message to driver tool: %v", err) - } - - buf := new(bytes.Buffer) - stderr := new(bytes.Buffer) - cmd := exec.CommandContext(cfg.Context, tool, words...) - cmd.Dir = cfg.Dir - cmd.Env = cfg.Env - cmd.Stdin = bytes.NewReader(req) - cmd.Stdout = buf - cmd.Stderr = stderr - - if err := cmd.Run(); err != nil { - return nil, fmt.Errorf("%v: %v: %s", tool, err, cmd.Stderr) - } - if len(stderr.Bytes()) != 0 && os.Getenv("GOPACKAGESPRINTDRIVERERRORS") != "" { - fmt.Fprintf(os.Stderr, "%s stderr: <<%s>>\n", cmdDebugStr(cmd), stderr) - } - - var response DriverResponse - if err := json.Unmarshal(buf.Bytes(), &response); err != nil { - return nil, err - } - return &response, nil - } -} diff --git a/go/extractor/vendor/golang.org/x/tools/go/packages/golist.go b/go/extractor/vendor/golang.org/x/tools/go/packages/golist.go deleted file mode 100644 index 22305d9c90a1..000000000000 --- a/go/extractor/vendor/golang.org/x/tools/go/packages/golist.go +++ /dev/null @@ -1,1106 +0,0 @@ -// Copyright 2018 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package packages - -import ( - "bytes" - "context" - "encoding/json" - "fmt" - "log" - "os" - "os/exec" - "path" - "path/filepath" - "reflect" - "sort" - "strconv" - "strings" - "sync" - "unicode" - - "golang.org/x/tools/go/internal/packagesdriver" - "golang.org/x/tools/internal/gocommand" - "golang.org/x/tools/internal/packagesinternal" -) - -// debug controls verbose logging. -var debug, _ = strconv.ParseBool(os.Getenv("GOPACKAGESDEBUG")) - -// A goTooOldError reports that the go command -// found by exec.LookPath is too old to use the new go list behavior. -type goTooOldError struct { - error -} - -// responseDeduper wraps a DriverResponse, deduplicating its contents. -type responseDeduper struct { - seenRoots map[string]bool - seenPackages map[string]*Package - dr *DriverResponse -} - -func newDeduper() *responseDeduper { - return &responseDeduper{ - dr: &DriverResponse{}, - seenRoots: map[string]bool{}, - seenPackages: map[string]*Package{}, - } -} - -// addAll fills in r with a DriverResponse. -func (r *responseDeduper) addAll(dr *DriverResponse) { - for _, pkg := range dr.Packages { - r.addPackage(pkg) - } - for _, root := range dr.Roots { - r.addRoot(root) - } - r.dr.GoVersion = dr.GoVersion -} - -func (r *responseDeduper) addPackage(p *Package) { - if r.seenPackages[p.ID] != nil { - return - } - r.seenPackages[p.ID] = p - r.dr.Packages = append(r.dr.Packages, p) -} - -func (r *responseDeduper) addRoot(id string) { - if r.seenRoots[id] { - return - } - r.seenRoots[id] = true - r.dr.Roots = append(r.dr.Roots, id) -} - -type golistState struct { - cfg *Config - ctx context.Context - - envOnce sync.Once - goEnvError error - goEnv map[string]string - - rootsOnce sync.Once - rootDirsError error - rootDirs map[string]string - - goVersionOnce sync.Once - goVersionError error - goVersion int // The X in Go 1.X. - - // vendorDirs caches the (non)existence of vendor directories. - vendorDirs map[string]bool -} - -// getEnv returns Go environment variables. Only specific variables are -// populated -- computing all of them is slow. -func (state *golistState) getEnv() (map[string]string, error) { - state.envOnce.Do(func() { - var b *bytes.Buffer - b, state.goEnvError = state.invokeGo("env", "-json", "GOMOD", "GOPATH") - if state.goEnvError != nil { - return - } - - state.goEnv = make(map[string]string) - decoder := json.NewDecoder(b) - if state.goEnvError = decoder.Decode(&state.goEnv); state.goEnvError != nil { - return - } - }) - return state.goEnv, state.goEnvError -} - -// mustGetEnv is a convenience function that can be used if getEnv has already succeeded. -func (state *golistState) mustGetEnv() map[string]string { - env, err := state.getEnv() - if err != nil { - panic(fmt.Sprintf("mustGetEnv: %v", err)) - } - return env -} - -// goListDriver uses the go list command to interpret the patterns and produce -// the build system package structure. -// See driver for more details. -func goListDriver(cfg *Config, patterns ...string) (_ *DriverResponse, err error) { - // Make sure that any asynchronous go commands are killed when we return. - parentCtx := cfg.Context - if parentCtx == nil { - parentCtx = context.Background() - } - ctx, cancel := context.WithCancel(parentCtx) - defer cancel() - - response := newDeduper() - - state := &golistState{ - cfg: cfg, - ctx: ctx, - vendorDirs: map[string]bool{}, - } - - // Fill in response.Sizes asynchronously if necessary. - if cfg.Mode&NeedTypesSizes != 0 || cfg.Mode&NeedTypes != 0 { - errCh := make(chan error) - go func() { - compiler, arch, err := packagesdriver.GetSizesForArgsGolist(ctx, state.cfgInvocation(), cfg.gocmdRunner) - response.dr.Compiler = compiler - response.dr.Arch = arch - errCh <- err - }() - defer func() { - if sizesErr := <-errCh; sizesErr != nil { - err = sizesErr - } - }() - } - - // Determine files requested in contains patterns - var containFiles []string - restPatterns := make([]string, 0, len(patterns)) - // Extract file= and other [querytype]= patterns. Report an error if querytype - // doesn't exist. -extractQueries: - for _, pattern := range patterns { - eqidx := strings.Index(pattern, "=") - if eqidx < 0 { - restPatterns = append(restPatterns, pattern) - } else { - query, value := pattern[:eqidx], pattern[eqidx+len("="):] - switch query { - case "file": - containFiles = append(containFiles, value) - case "pattern": - restPatterns = append(restPatterns, value) - case "": // not a reserved query - restPatterns = append(restPatterns, pattern) - default: - for _, rune := range query { - if rune < 'a' || rune > 'z' { // not a reserved query - restPatterns = append(restPatterns, pattern) - continue extractQueries - } - } - // Reject all other patterns containing "=" - return nil, fmt.Errorf("invalid query type %q in query pattern %q", query, pattern) - } - } - } - - // See if we have any patterns to pass through to go list. Zero initial - // patterns also requires a go list call, since it's the equivalent of - // ".". - if len(restPatterns) > 0 || len(patterns) == 0 { - dr, err := state.createDriverResponse(restPatterns...) - if err != nil { - return nil, err - } - response.addAll(dr) - } - - if len(containFiles) != 0 { - if err := state.runContainsQueries(response, containFiles); err != nil { - return nil, err - } - } - - // (We may yet return an error due to defer.) - return response.dr, nil -} - -func (state *golistState) runContainsQueries(response *responseDeduper, queries []string) error { - for _, query := range queries { - // TODO(matloob): Do only one query per directory. - fdir := filepath.Dir(query) - // Pass absolute path of directory to go list so that it knows to treat it as a directory, - // not a package path. - pattern, err := filepath.Abs(fdir) - if err != nil { - return fmt.Errorf("could not determine absolute path of file= query path %q: %v", query, err) - } - dirResponse, err := state.createDriverResponse(pattern) - - // If there was an error loading the package, or no packages are returned, - // or the package is returned with errors, try to load the file as an - // ad-hoc package. - // Usually the error will appear in a returned package, but may not if we're - // in module mode and the ad-hoc is located outside a module. - if err != nil || len(dirResponse.Packages) == 0 || len(dirResponse.Packages) == 1 && len(dirResponse.Packages[0].GoFiles) == 0 && - len(dirResponse.Packages[0].Errors) == 1 { - var queryErr error - if dirResponse, queryErr = state.adhocPackage(pattern, query); queryErr != nil { - return err // return the original error - } - } - isRoot := make(map[string]bool, len(dirResponse.Roots)) - for _, root := range dirResponse.Roots { - isRoot[root] = true - } - for _, pkg := range dirResponse.Packages { - // Add any new packages to the main set - // We don't bother to filter packages that will be dropped by the changes of roots, - // that will happen anyway during graph construction outside this function. - // Over-reporting packages is not a problem. - response.addPackage(pkg) - // if the package was not a root one, it cannot have the file - if !isRoot[pkg.ID] { - continue - } - for _, pkgFile := range pkg.GoFiles { - if filepath.Base(query) == filepath.Base(pkgFile) { - response.addRoot(pkg.ID) - break - } - } - } - } - return nil -} - -// adhocPackage attempts to load or construct an ad-hoc package for a given -// query, if the original call to the driver produced inadequate results. -func (state *golistState) adhocPackage(pattern, query string) (*DriverResponse, error) { - response, err := state.createDriverResponse(query) - if err != nil { - return nil, err - } - // If we get nothing back from `go list`, - // try to make this file into its own ad-hoc package. - // TODO(rstambler): Should this check against the original response? - if len(response.Packages) == 0 { - response.Packages = append(response.Packages, &Package{ - ID: "command-line-arguments", - PkgPath: query, - GoFiles: []string{query}, - CompiledGoFiles: []string{query}, - Imports: make(map[string]*Package), - }) - response.Roots = append(response.Roots, "command-line-arguments") - } - // Handle special cases. - if len(response.Packages) == 1 { - // golang/go#33482: If this is a file= query for ad-hoc packages where - // the file only exists on an overlay, and exists outside of a module, - // add the file to the package and remove the errors. - if response.Packages[0].ID == "command-line-arguments" || - filepath.ToSlash(response.Packages[0].PkgPath) == filepath.ToSlash(query) { - if len(response.Packages[0].GoFiles) == 0 { - filename := filepath.Join(pattern, filepath.Base(query)) // avoid recomputing abspath - // TODO(matloob): check if the file is outside of a root dir? - for path := range state.cfg.Overlay { - if path == filename { - response.Packages[0].Errors = nil - response.Packages[0].GoFiles = []string{path} - response.Packages[0].CompiledGoFiles = []string{path} - } - } - } - } - } - return response, nil -} - -// Fields must match go list; -// see $GOROOT/src/cmd/go/internal/load/pkg.go. -type jsonPackage struct { - ImportPath string - Dir string - Name string - Export string - GoFiles []string - CompiledGoFiles []string - IgnoredGoFiles []string - IgnoredOtherFiles []string - EmbedPatterns []string - EmbedFiles []string - CFiles []string - CgoFiles []string - CXXFiles []string - MFiles []string - HFiles []string - FFiles []string - SFiles []string - SwigFiles []string - SwigCXXFiles []string - SysoFiles []string - Imports []string - ImportMap map[string]string - Deps []string - Module *Module - TestGoFiles []string - TestImports []string - XTestGoFiles []string - XTestImports []string - ForTest string // q in a "p [q.test]" package, else "" - DepOnly bool - - Error *packagesinternal.PackageError - DepsErrors []*packagesinternal.PackageError -} - -type jsonPackageError struct { - ImportStack []string - Pos string - Err string -} - -func otherFiles(p *jsonPackage) [][]string { - return [][]string{p.CFiles, p.CXXFiles, p.MFiles, p.HFiles, p.FFiles, p.SFiles, p.SwigFiles, p.SwigCXXFiles, p.SysoFiles} -} - -// createDriverResponse uses the "go list" command to expand the pattern -// words and return a response for the specified packages. -func (state *golistState) createDriverResponse(words ...string) (*DriverResponse, error) { - // go list uses the following identifiers in ImportPath and Imports: - // - // "p" -- importable package or main (command) - // "q.test" -- q's test executable - // "p [q.test]" -- variant of p as built for q's test executable - // "q_test [q.test]" -- q's external test package - // - // The packages p that are built differently for a test q.test - // are q itself, plus any helpers used by the external test q_test, - // typically including "testing" and all its dependencies. - - // Run "go list" for complete - // information on the specified packages. - goVersion, err := state.getGoVersion() - if err != nil { - return nil, err - } - buf, err := state.invokeGo("list", golistargs(state.cfg, words, goVersion)...) - if err != nil { - return nil, err - } - - seen := make(map[string]*jsonPackage) - pkgs := make(map[string]*Package) - additionalErrors := make(map[string][]Error) - // Decode the JSON and convert it to Package form. - response := &DriverResponse{ - GoVersion: goVersion, - } - for dec := json.NewDecoder(buf); dec.More(); { - p := new(jsonPackage) - if err := dec.Decode(p); err != nil { - return nil, fmt.Errorf("JSON decoding failed: %v", err) - } - - if p.ImportPath == "" { - // The documentation for go list says that β€œ[e]rroneous packages will have - // a non-empty ImportPath”. If for some reason it comes back empty, we - // prefer to error out rather than silently discarding data or handing - // back a package without any way to refer to it. - if p.Error != nil { - return nil, Error{ - Pos: p.Error.Pos, - Msg: p.Error.Err, - } - } - return nil, fmt.Errorf("package missing import path: %+v", p) - } - - // Work around https://golang.org/issue/33157: - // go list -e, when given an absolute path, will find the package contained at - // that directory. But when no package exists there, it will return a fake package - // with an error and the ImportPath set to the absolute path provided to go list. - // Try to convert that absolute path to what its package path would be if it's - // contained in a known module or GOPATH entry. This will allow the package to be - // properly "reclaimed" when overlays are processed. - if filepath.IsAbs(p.ImportPath) && p.Error != nil { - pkgPath, ok, err := state.getPkgPath(p.ImportPath) - if err != nil { - return nil, err - } - if ok { - p.ImportPath = pkgPath - } - } - - if old, found := seen[p.ImportPath]; found { - // If one version of the package has an error, and the other doesn't, assume - // that this is a case where go list is reporting a fake dependency variant - // of the imported package: When a package tries to invalidly import another - // package, go list emits a variant of the imported package (with the same - // import path, but with an error on it, and the package will have a - // DepError set on it). An example of when this can happen is for imports of - // main packages: main packages can not be imported, but they may be - // separately matched and listed by another pattern. - // See golang.org/issue/36188 for more details. - - // The plan is that eventually, hopefully in Go 1.15, the error will be - // reported on the importing package rather than the duplicate "fake" - // version of the imported package. Once all supported versions of Go - // have the new behavior this logic can be deleted. - // TODO(matloob): delete the workaround logic once all supported versions of - // Go return the errors on the proper package. - - // There should be exactly one version of a package that doesn't have an - // error. - if old.Error == nil && p.Error == nil { - if !reflect.DeepEqual(p, old) { - return nil, fmt.Errorf("internal error: go list gives conflicting information for package %v", p.ImportPath) - } - continue - } - - // Determine if this package's error needs to be bubbled up. - // This is a hack, and we expect for go list to eventually set the error - // on the package. - if old.Error != nil { - var errkind string - if strings.Contains(old.Error.Err, "not an importable package") { - errkind = "not an importable package" - } else if strings.Contains(old.Error.Err, "use of internal package") && strings.Contains(old.Error.Err, "not allowed") { - errkind = "use of internal package not allowed" - } - if errkind != "" { - if len(old.Error.ImportStack) < 1 { - return nil, fmt.Errorf(`internal error: go list gave a %q error with empty import stack`, errkind) - } - importingPkg := old.Error.ImportStack[len(old.Error.ImportStack)-1] - if importingPkg == old.ImportPath { - // Using an older version of Go which put this package itself on top of import - // stack, instead of the importer. Look for importer in second from top - // position. - if len(old.Error.ImportStack) < 2 { - return nil, fmt.Errorf(`internal error: go list gave a %q error with an import stack without importing package`, errkind) - } - importingPkg = old.Error.ImportStack[len(old.Error.ImportStack)-2] - } - additionalErrors[importingPkg] = append(additionalErrors[importingPkg], Error{ - Pos: old.Error.Pos, - Msg: old.Error.Err, - Kind: ListError, - }) - } - } - - // Make sure that if there's a version of the package without an error, - // that's the one reported to the user. - if old.Error == nil { - continue - } - - // This package will replace the old one at the end of the loop. - } - seen[p.ImportPath] = p - - pkg := &Package{ - Name: p.Name, - ID: p.ImportPath, - GoFiles: absJoin(p.Dir, p.GoFiles, p.CgoFiles), - CompiledGoFiles: absJoin(p.Dir, p.CompiledGoFiles), - OtherFiles: absJoin(p.Dir, otherFiles(p)...), - EmbedFiles: absJoin(p.Dir, p.EmbedFiles), - EmbedPatterns: absJoin(p.Dir, p.EmbedPatterns), - IgnoredFiles: absJoin(p.Dir, p.IgnoredGoFiles, p.IgnoredOtherFiles), - forTest: p.ForTest, - depsErrors: p.DepsErrors, - Module: p.Module, - } - - if (state.cfg.Mode&typecheckCgo) != 0 && len(p.CgoFiles) != 0 { - if len(p.CompiledGoFiles) > len(p.GoFiles) { - // We need the cgo definitions, which are in the first - // CompiledGoFile after the non-cgo ones. This is a hack but there - // isn't currently a better way to find it. We also need the pure - // Go files and unprocessed cgo files, all of which are already - // in pkg.GoFiles. - cgoTypes := p.CompiledGoFiles[len(p.GoFiles)] - pkg.CompiledGoFiles = append([]string{cgoTypes}, pkg.GoFiles...) - } else { - // golang/go#38990: go list silently fails to do cgo processing - pkg.CompiledGoFiles = nil - pkg.Errors = append(pkg.Errors, Error{ - Msg: "go list failed to return CompiledGoFiles. This may indicate failure to perform cgo processing; try building at the command line. See https://golang.org/issue/38990.", - Kind: ListError, - }) - } - } - - // Work around https://golang.org/issue/28749: - // cmd/go puts assembly, C, and C++ files in CompiledGoFiles. - // Remove files from CompiledGoFiles that are non-go files - // (or are not files that look like they are from the cache). - if len(pkg.CompiledGoFiles) > 0 { - out := pkg.CompiledGoFiles[:0] - for _, f := range pkg.CompiledGoFiles { - if ext := filepath.Ext(f); ext != ".go" && ext != "" { // ext == "" means the file is from the cache, so probably cgo-processed file - continue - } - out = append(out, f) - } - pkg.CompiledGoFiles = out - } - - // Extract the PkgPath from the package's ID. - if i := strings.IndexByte(pkg.ID, ' '); i >= 0 { - pkg.PkgPath = pkg.ID[:i] - } else { - pkg.PkgPath = pkg.ID - } - - if pkg.PkgPath == "unsafe" { - pkg.CompiledGoFiles = nil // ignore fake unsafe.go file (#59929) - } else if len(pkg.CompiledGoFiles) == 0 { - // Work around for pre-go.1.11 versions of go list. - // TODO(matloob): they should be handled by the fallback. - // Can we delete this? - pkg.CompiledGoFiles = pkg.GoFiles - } - - // Assume go list emits only absolute paths for Dir. - if p.Dir != "" && !filepath.IsAbs(p.Dir) { - log.Fatalf("internal error: go list returned non-absolute Package.Dir: %s", p.Dir) - } - - if p.Export != "" && !filepath.IsAbs(p.Export) { - pkg.ExportFile = filepath.Join(p.Dir, p.Export) - } else { - pkg.ExportFile = p.Export - } - - // imports - // - // Imports contains the IDs of all imported packages. - // ImportsMap records (path, ID) only where they differ. - ids := make(map[string]bool) - for _, id := range p.Imports { - ids[id] = true - } - pkg.Imports = make(map[string]*Package) - for path, id := range p.ImportMap { - pkg.Imports[path] = &Package{ID: id} // non-identity import - delete(ids, id) - } - for id := range ids { - if id == "C" { - continue - } - - pkg.Imports[id] = &Package{ID: id} // identity import - } - if !p.DepOnly { - response.Roots = append(response.Roots, pkg.ID) - } - - // Temporary work-around for golang/go#39986. Parse filenames out of - // error messages. This happens if there are unrecoverable syntax - // errors in the source, so we can't match on a specific error message. - // - // TODO(rfindley): remove this heuristic, in favor of considering - // InvalidGoFiles from the list driver. - if err := p.Error; err != nil && state.shouldAddFilenameFromError(p) { - addFilenameFromPos := func(pos string) bool { - split := strings.Split(pos, ":") - if len(split) < 1 { - return false - } - filename := strings.TrimSpace(split[0]) - if filename == "" { - return false - } - if !filepath.IsAbs(filename) { - filename = filepath.Join(state.cfg.Dir, filename) - } - info, _ := os.Stat(filename) - if info == nil { - return false - } - pkg.CompiledGoFiles = append(pkg.CompiledGoFiles, filename) - pkg.GoFiles = append(pkg.GoFiles, filename) - return true - } - found := addFilenameFromPos(err.Pos) - // In some cases, go list only reports the error position in the - // error text, not the error position. One such case is when the - // file's package name is a keyword (see golang.org/issue/39763). - if !found { - addFilenameFromPos(err.Err) - } - } - - if p.Error != nil { - msg := strings.TrimSpace(p.Error.Err) // Trim to work around golang.org/issue/32363. - // Address golang.org/issue/35964 by appending import stack to error message. - if msg == "import cycle not allowed" && len(p.Error.ImportStack) != 0 { - msg += fmt.Sprintf(": import stack: %v", p.Error.ImportStack) - } - pkg.Errors = append(pkg.Errors, Error{ - Pos: p.Error.Pos, - Msg: msg, - Kind: ListError, - }) - } - - pkgs[pkg.ID] = pkg - } - - for id, errs := range additionalErrors { - if p, ok := pkgs[id]; ok { - p.Errors = append(p.Errors, errs...) - } - } - for _, pkg := range pkgs { - response.Packages = append(response.Packages, pkg) - } - sort.Slice(response.Packages, func(i, j int) bool { return response.Packages[i].ID < response.Packages[j].ID }) - - return response, nil -} - -func (state *golistState) shouldAddFilenameFromError(p *jsonPackage) bool { - if len(p.GoFiles) > 0 || len(p.CompiledGoFiles) > 0 { - return false - } - - goV, err := state.getGoVersion() - if err != nil { - return false - } - - // On Go 1.14 and earlier, only add filenames from errors if the import stack is empty. - // The import stack behaves differently for these versions than newer Go versions. - if goV < 15 { - return len(p.Error.ImportStack) == 0 - } - - // On Go 1.15 and later, only parse filenames out of error if there's no import stack, - // or the current package is at the top of the import stack. This is not guaranteed - // to work perfectly, but should avoid some cases where files in errors don't belong to this - // package. - return len(p.Error.ImportStack) == 0 || p.Error.ImportStack[len(p.Error.ImportStack)-1] == p.ImportPath -} - -// getGoVersion returns the effective minor version of the go command. -func (state *golistState) getGoVersion() (int, error) { - state.goVersionOnce.Do(func() { - state.goVersion, state.goVersionError = gocommand.GoVersion(state.ctx, state.cfgInvocation(), state.cfg.gocmdRunner) - }) - return state.goVersion, state.goVersionError -} - -// getPkgPath finds the package path of a directory if it's relative to a root -// directory. -func (state *golistState) getPkgPath(dir string) (string, bool, error) { - absDir, err := filepath.Abs(dir) - if err != nil { - return "", false, err - } - roots, err := state.determineRootDirs() - if err != nil { - return "", false, err - } - - for rdir, rpath := range roots { - // Make sure that the directory is in the module, - // to avoid creating a path relative to another module. - if !strings.HasPrefix(absDir, rdir) { - continue - } - // TODO(matloob): This doesn't properly handle symlinks. - r, err := filepath.Rel(rdir, dir) - if err != nil { - continue - } - if rpath != "" { - // We choose only one root even though the directory even it can belong in multiple modules - // or GOPATH entries. This is okay because we only need to work with absolute dirs when a - // file is missing from disk, for instance when gopls calls go/packages in an overlay. - // Once the file is saved, gopls, or the next invocation of the tool will get the correct - // result straight from golist. - // TODO(matloob): Implement module tiebreaking? - return path.Join(rpath, filepath.ToSlash(r)), true, nil - } - return filepath.ToSlash(r), true, nil - } - return "", false, nil -} - -// absJoin absolutizes and flattens the lists of files. -func absJoin(dir string, fileses ...[]string) (res []string) { - for _, files := range fileses { - for _, file := range files { - if !filepath.IsAbs(file) { - file = filepath.Join(dir, file) - } - res = append(res, file) - } - } - return res -} - -func jsonFlag(cfg *Config, goVersion int) string { - if goVersion < 19 { - return "-json" - } - var fields []string - added := make(map[string]bool) - addFields := func(fs ...string) { - for _, f := range fs { - if !added[f] { - added[f] = true - fields = append(fields, f) - } - } - } - addFields("Name", "ImportPath", "Error") // These fields are always needed - if cfg.Mode&NeedFiles != 0 || cfg.Mode&NeedTypes != 0 { - addFields("Dir", "GoFiles", "IgnoredGoFiles", "IgnoredOtherFiles", "CFiles", - "CgoFiles", "CXXFiles", "MFiles", "HFiles", "FFiles", "SFiles", - "SwigFiles", "SwigCXXFiles", "SysoFiles") - if cfg.Tests { - addFields("TestGoFiles", "XTestGoFiles") - } - } - if cfg.Mode&NeedTypes != 0 { - // CompiledGoFiles seems to be required for the test case TestCgoNoSyntax, - // even when -compiled isn't passed in. - // TODO(#52435): Should we make the test ask for -compiled, or automatically - // request CompiledGoFiles in certain circumstances? - addFields("Dir", "CompiledGoFiles") - } - if cfg.Mode&NeedCompiledGoFiles != 0 { - addFields("Dir", "CompiledGoFiles", "Export") - } - if cfg.Mode&NeedImports != 0 { - // When imports are requested, DepOnly is used to distinguish between packages - // explicitly requested and transitive imports of those packages. - addFields("DepOnly", "Imports", "ImportMap") - if cfg.Tests { - addFields("TestImports", "XTestImports") - } - } - if cfg.Mode&NeedDeps != 0 { - addFields("DepOnly") - } - if usesExportData(cfg) { - // Request Dir in the unlikely case Export is not absolute. - addFields("Dir", "Export") - } - if cfg.Mode&needInternalForTest != 0 { - addFields("ForTest") - } - if cfg.Mode&needInternalDepsErrors != 0 { - addFields("DepsErrors") - } - if cfg.Mode&NeedModule != 0 { - addFields("Module") - } - if cfg.Mode&NeedEmbedFiles != 0 { - addFields("EmbedFiles") - } - if cfg.Mode&NeedEmbedPatterns != 0 { - addFields("EmbedPatterns") - } - return "-json=" + strings.Join(fields, ",") -} - -func golistargs(cfg *Config, words []string, goVersion int) []string { - const findFlags = NeedImports | NeedTypes | NeedSyntax | NeedTypesInfo - fullargs := []string{ - "-e", jsonFlag(cfg, goVersion), - fmt.Sprintf("-compiled=%t", cfg.Mode&(NeedCompiledGoFiles|NeedSyntax|NeedTypes|NeedTypesInfo|NeedTypesSizes) != 0), - fmt.Sprintf("-test=%t", cfg.Tests), - fmt.Sprintf("-export=%t", usesExportData(cfg)), - fmt.Sprintf("-deps=%t", cfg.Mode&NeedImports != 0), - // go list doesn't let you pass -test and -find together, - // probably because you'd just get the TestMain. - fmt.Sprintf("-find=%t", !cfg.Tests && cfg.Mode&findFlags == 0 && !usesExportData(cfg)), - } - - // golang/go#60456: with go1.21 and later, go list serves pgo variants, which - // can be costly to compute and may result in redundant processing for the - // caller. Disable these variants. If someone wants to add e.g. a NeedPGO - // mode flag, that should be a separate proposal. - if goVersion >= 21 { - fullargs = append(fullargs, "-pgo=off") - } - - fullargs = append(fullargs, cfg.BuildFlags...) - fullargs = append(fullargs, "--") - fullargs = append(fullargs, words...) - return fullargs -} - -// cfgInvocation returns an Invocation that reflects cfg's settings. -func (state *golistState) cfgInvocation() gocommand.Invocation { - cfg := state.cfg - return gocommand.Invocation{ - BuildFlags: cfg.BuildFlags, - ModFile: cfg.modFile, - ModFlag: cfg.modFlag, - CleanEnv: cfg.Env != nil, - Env: cfg.Env, - Logf: cfg.Logf, - WorkingDir: cfg.Dir, - } -} - -// invokeGo returns the stdout of a go command invocation. -func (state *golistState) invokeGo(verb string, args ...string) (*bytes.Buffer, error) { - cfg := state.cfg - - inv := state.cfgInvocation() - - // For Go versions 1.16 and above, `go list` accepts overlays directly via - // the -overlay flag. Set it, if it's available. - // - // The check for "list" is not necessarily required, but we should avoid - // getting the go version if possible. - if verb == "list" { - goVersion, err := state.getGoVersion() - if err != nil { - return nil, err - } - if goVersion >= 16 { - filename, cleanup, err := state.writeOverlays() - if err != nil { - return nil, err - } - defer cleanup() - inv.Overlay = filename - } - } - inv.Verb = verb - inv.Args = args - gocmdRunner := cfg.gocmdRunner - if gocmdRunner == nil { - gocmdRunner = &gocommand.Runner{} - } - stdout, stderr, friendlyErr, err := gocmdRunner.RunRaw(cfg.Context, inv) - if err != nil { - // Check for 'go' executable not being found. - if ee, ok := err.(*exec.Error); ok && ee.Err == exec.ErrNotFound { - return nil, fmt.Errorf("'go list' driver requires 'go', but %s", exec.ErrNotFound) - } - - exitErr, ok := err.(*exec.ExitError) - if !ok { - // Catastrophic error: - // - context cancellation - return nil, fmt.Errorf("couldn't run 'go': %w", err) - } - - // Old go version? - if strings.Contains(stderr.String(), "flag provided but not defined") { - return nil, goTooOldError{fmt.Errorf("unsupported version of go: %s: %s", exitErr, stderr)} - } - - // Related to #24854 - if len(stderr.String()) > 0 && strings.Contains(stderr.String(), "unexpected directory layout") { - return nil, friendlyErr - } - - // Is there an error running the C compiler in cgo? This will be reported in the "Error" field - // and should be suppressed by go list -e. - // - // This condition is not perfect yet because the error message can include other error messages than runtime/cgo. - isPkgPathRune := func(r rune) bool { - // From https://golang.org/ref/spec#Import_declarations: - // Implementation restriction: A compiler may restrict ImportPaths to non-empty strings - // using only characters belonging to Unicode's L, M, N, P, and S general categories - // (the Graphic characters without spaces) and may also exclude the - // characters !"#$%&'()*,:;<=>?[\]^`{|} and the Unicode replacement character U+FFFD. - return unicode.IsOneOf([]*unicode.RangeTable{unicode.L, unicode.M, unicode.N, unicode.P, unicode.S}, r) && - !strings.ContainsRune("!\"#$%&'()*,:;<=>?[\\]^`{|}\uFFFD", r) - } - // golang/go#36770: Handle case where cmd/go prints module download messages before the error. - msg := stderr.String() - for strings.HasPrefix(msg, "go: downloading") { - msg = msg[strings.IndexRune(msg, '\n')+1:] - } - if len(stderr.String()) > 0 && strings.HasPrefix(stderr.String(), "# ") { - msg := msg[len("# "):] - if strings.HasPrefix(strings.TrimLeftFunc(msg, isPkgPathRune), "\n") { - return stdout, nil - } - // Treat pkg-config errors as a special case (golang.org/issue/36770). - if strings.HasPrefix(msg, "pkg-config") { - return stdout, nil - } - } - - // This error only appears in stderr. See golang.org/cl/166398 for a fix in go list to show - // the error in the Err section of stdout in case -e option is provided. - // This fix is provided for backwards compatibility. - if len(stderr.String()) > 0 && strings.Contains(stderr.String(), "named files must be .go files") { - output := fmt.Sprintf(`{"ImportPath": "command-line-arguments","Incomplete": true,"Error": {"Pos": "","Err": %q}}`, - strings.Trim(stderr.String(), "\n")) - return bytes.NewBufferString(output), nil - } - - // Similar to the previous error, but currently lacks a fix in Go. - if len(stderr.String()) > 0 && strings.Contains(stderr.String(), "named files must all be in one directory") { - output := fmt.Sprintf(`{"ImportPath": "command-line-arguments","Incomplete": true,"Error": {"Pos": "","Err": %q}}`, - strings.Trim(stderr.String(), "\n")) - return bytes.NewBufferString(output), nil - } - - // Backwards compatibility for Go 1.11 because 1.12 and 1.13 put the directory in the ImportPath. - // If the package doesn't exist, put the absolute path of the directory into the error message, - // as Go 1.13 list does. - const noSuchDirectory = "no such directory" - if len(stderr.String()) > 0 && strings.Contains(stderr.String(), noSuchDirectory) { - errstr := stderr.String() - abspath := strings.TrimSpace(errstr[strings.Index(errstr, noSuchDirectory)+len(noSuchDirectory):]) - output := fmt.Sprintf(`{"ImportPath": %q,"Incomplete": true,"Error": {"Pos": "","Err": %q}}`, - abspath, strings.Trim(stderr.String(), "\n")) - return bytes.NewBufferString(output), nil - } - - // Workaround for #29280: go list -e has incorrect behavior when an ad-hoc package doesn't exist. - // Note that the error message we look for in this case is different that the one looked for above. - if len(stderr.String()) > 0 && strings.Contains(stderr.String(), "no such file or directory") { - output := fmt.Sprintf(`{"ImportPath": "command-line-arguments","Incomplete": true,"Error": {"Pos": "","Err": %q}}`, - strings.Trim(stderr.String(), "\n")) - return bytes.NewBufferString(output), nil - } - - // Workaround for #34273. go list -e with GO111MODULE=on has incorrect behavior when listing a - // directory outside any module. - if len(stderr.String()) > 0 && strings.Contains(stderr.String(), "outside available modules") { - output := fmt.Sprintf(`{"ImportPath": %q,"Incomplete": true,"Error": {"Pos": "","Err": %q}}`, - // TODO(matloob): command-line-arguments isn't correct here. - "command-line-arguments", strings.Trim(stderr.String(), "\n")) - return bytes.NewBufferString(output), nil - } - - // Another variation of the previous error - if len(stderr.String()) > 0 && strings.Contains(stderr.String(), "outside module root") { - output := fmt.Sprintf(`{"ImportPath": %q,"Incomplete": true,"Error": {"Pos": "","Err": %q}}`, - // TODO(matloob): command-line-arguments isn't correct here. - "command-line-arguments", strings.Trim(stderr.String(), "\n")) - return bytes.NewBufferString(output), nil - } - - // Workaround for an instance of golang.org/issue/26755: go list -e will return a non-zero exit - // status if there's a dependency on a package that doesn't exist. But it should return - // a zero exit status and set an error on that package. - if len(stderr.String()) > 0 && strings.Contains(stderr.String(), "no Go files in") { - // Don't clobber stdout if `go list` actually returned something. - if len(stdout.String()) > 0 { - return stdout, nil - } - // try to extract package name from string - stderrStr := stderr.String() - var importPath string - colon := strings.Index(stderrStr, ":") - if colon > 0 && strings.HasPrefix(stderrStr, "go build ") { - importPath = stderrStr[len("go build "):colon] - } - output := fmt.Sprintf(`{"ImportPath": %q,"Incomplete": true,"Error": {"Pos": "","Err": %q}}`, - importPath, strings.Trim(stderrStr, "\n")) - return bytes.NewBufferString(output), nil - } - - // Export mode entails a build. - // If that build fails, errors appear on stderr - // (despite the -e flag) and the Export field is blank. - // Do not fail in that case. - // The same is true if an ad-hoc package given to go list doesn't exist. - // TODO(matloob): Remove these once we can depend on go list to exit with a zero status with -e even when - // packages don't exist or a build fails. - if !usesExportData(cfg) && !containsGoFile(args) { - return nil, friendlyErr - } - } - return stdout, nil -} - -// OverlayJSON is the format overlay files are expected to be in. -// The Replace map maps from overlaid paths to replacement paths: -// the Go command will forward all reads trying to open -// each overlaid path to its replacement path, or consider the overlaid -// path not to exist if the replacement path is empty. -// -// From golang/go#39958. -type OverlayJSON struct { - Replace map[string]string `json:"replace,omitempty"` -} - -// writeOverlays writes out files for go list's -overlay flag, as described -// above. -func (state *golistState) writeOverlays() (filename string, cleanup func(), err error) { - // Do nothing if there are no overlays in the config. - if len(state.cfg.Overlay) == 0 { - return "", func() {}, nil - } - dir, err := os.MkdirTemp("", "gopackages-*") - if err != nil { - return "", nil, err - } - // The caller must clean up this directory, unless this function returns an - // error. - cleanup = func() { - os.RemoveAll(dir) - } - defer func() { - if err != nil { - cleanup() - } - }() - overlays := map[string]string{} - for k, v := range state.cfg.Overlay { - // Create a unique filename for the overlaid files, to avoid - // creating nested directories. - noSeparator := strings.Join(strings.Split(filepath.ToSlash(k), "/"), "") - f, err := os.CreateTemp(dir, fmt.Sprintf("*-%s", noSeparator)) - if err != nil { - return "", func() {}, err - } - if _, err := f.Write(v); err != nil { - return "", func() {}, err - } - if err := f.Close(); err != nil { - return "", func() {}, err - } - overlays[k] = f.Name() - } - b, err := json.Marshal(OverlayJSON{Replace: overlays}) - if err != nil { - return "", func() {}, err - } - // Write out the overlay file that contains the filepath mappings. - filename = filepath.Join(dir, "overlay.json") - if err := os.WriteFile(filename, b, 0665); err != nil { - return "", func() {}, err - } - return filename, cleanup, nil -} - -func containsGoFile(s []string) bool { - for _, f := range s { - if strings.HasSuffix(f, ".go") { - return true - } - } - return false -} - -func cmdDebugStr(cmd *exec.Cmd) string { - env := make(map[string]string) - for _, kv := range cmd.Env { - split := strings.SplitN(kv, "=", 2) - k, v := split[0], split[1] - env[k] = v - } - - var args []string - for _, arg := range cmd.Args { - quoted := strconv.Quote(arg) - if quoted[1:len(quoted)-1] != arg || strings.Contains(arg, " ") { - args = append(args, quoted) - } else { - args = append(args, arg) - } - } - return fmt.Sprintf("GOROOT=%v GOPATH=%v GO111MODULE=%v GOPROXY=%v PWD=%v %v", env["GOROOT"], env["GOPATH"], env["GO111MODULE"], env["GOPROXY"], env["PWD"], strings.Join(args, " ")) -} diff --git a/go/extractor/vendor/golang.org/x/tools/go/packages/golist_overlay.go b/go/extractor/vendor/golang.org/x/tools/go/packages/golist_overlay.go deleted file mode 100644 index d823c474ad3a..000000000000 --- a/go/extractor/vendor/golang.org/x/tools/go/packages/golist_overlay.go +++ /dev/null @@ -1,83 +0,0 @@ -// Copyright 2018 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package packages - -import ( - "encoding/json" - "path/filepath" - - "golang.org/x/tools/internal/gocommand" -) - -// determineRootDirs returns a mapping from absolute directories that could -// contain code to their corresponding import path prefixes. -func (state *golistState) determineRootDirs() (map[string]string, error) { - env, err := state.getEnv() - if err != nil { - return nil, err - } - if env["GOMOD"] != "" { - state.rootsOnce.Do(func() { - state.rootDirs, state.rootDirsError = state.determineRootDirsModules() - }) - } else { - state.rootsOnce.Do(func() { - state.rootDirs, state.rootDirsError = state.determineRootDirsGOPATH() - }) - } - return state.rootDirs, state.rootDirsError -} - -func (state *golistState) determineRootDirsModules() (map[string]string, error) { - // List all of the modules--the first will be the directory for the main - // module. Any replaced modules will also need to be treated as roots. - // Editing files in the module cache isn't a great idea, so we don't - // plan to ever support that. - out, err := state.invokeGo("list", "-m", "-json", "all") - if err != nil { - // 'go list all' will fail if we're outside of a module and - // GO111MODULE=on. Try falling back without 'all'. - var innerErr error - out, innerErr = state.invokeGo("list", "-m", "-json") - if innerErr != nil { - return nil, err - } - } - roots := map[string]string{} - modules := map[string]string{} - var i int - for dec := json.NewDecoder(out); dec.More(); { - mod := new(gocommand.ModuleJSON) - if err := dec.Decode(mod); err != nil { - return nil, err - } - if mod.Dir != "" && mod.Path != "" { - // This is a valid module; add it to the map. - absDir, err := filepath.Abs(mod.Dir) - if err != nil { - return nil, err - } - modules[absDir] = mod.Path - // The first result is the main module. - if i == 0 || mod.Replace != nil && mod.Replace.Path != "" { - roots[absDir] = mod.Path - } - } - i++ - } - return roots, nil -} - -func (state *golistState) determineRootDirsGOPATH() (map[string]string, error) { - m := map[string]string{} - for _, dir := range filepath.SplitList(state.mustGetEnv()["GOPATH"]) { - absDir, err := filepath.Abs(dir) - if err != nil { - return nil, err - } - m[filepath.Join(absDir, "src")] = "" - } - return m, nil -} diff --git a/go/extractor/vendor/golang.org/x/tools/go/packages/loadmode_string.go b/go/extractor/vendor/golang.org/x/tools/go/packages/loadmode_string.go deleted file mode 100644 index 5c080d21b54a..000000000000 --- a/go/extractor/vendor/golang.org/x/tools/go/packages/loadmode_string.go +++ /dev/null @@ -1,57 +0,0 @@ -// Copyright 2019 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package packages - -import ( - "fmt" - "strings" -) - -var allModes = []LoadMode{ - NeedName, - NeedFiles, - NeedCompiledGoFiles, - NeedImports, - NeedDeps, - NeedExportFile, - NeedTypes, - NeedSyntax, - NeedTypesInfo, - NeedTypesSizes, -} - -var modeStrings = []string{ - "NeedName", - "NeedFiles", - "NeedCompiledGoFiles", - "NeedImports", - "NeedDeps", - "NeedExportFile", - "NeedTypes", - "NeedSyntax", - "NeedTypesInfo", - "NeedTypesSizes", -} - -func (mod LoadMode) String() string { - m := mod - if m == 0 { - return "LoadMode(0)" - } - var out []string - for i, x := range allModes { - if x > m { - break - } - if (m & x) != 0 { - out = append(out, modeStrings[i]) - m = m ^ x - } - } - if m != 0 { - out = append(out, "Unknown") - } - return fmt.Sprintf("LoadMode(%s)", strings.Join(out, "|")) -} diff --git a/go/extractor/vendor/golang.org/x/tools/go/packages/packages.go b/go/extractor/vendor/golang.org/x/tools/go/packages/packages.go deleted file mode 100644 index f33b0afc22cf..000000000000 --- a/go/extractor/vendor/golang.org/x/tools/go/packages/packages.go +++ /dev/null @@ -1,1313 +0,0 @@ -// Copyright 2018 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package packages - -// See doc.go for package documentation and implementation notes. - -import ( - "context" - "encoding/json" - "fmt" - "go/ast" - "go/parser" - "go/scanner" - "go/token" - "go/types" - "io" - "log" - "os" - "path/filepath" - "runtime" - "strings" - "sync" - "time" - - "golang.org/x/tools/go/gcexportdata" - "golang.org/x/tools/internal/gocommand" - "golang.org/x/tools/internal/packagesinternal" - "golang.org/x/tools/internal/typesinternal" - "golang.org/x/tools/internal/versions" -) - -// A LoadMode controls the amount of detail to return when loading. -// The bits below can be combined to specify which fields should be -// filled in the result packages. -// The zero value is a special case, equivalent to combining -// the NeedName, NeedFiles, and NeedCompiledGoFiles bits. -// ID and Errors (if present) will always be filled. -// Load may return more information than requested. -type LoadMode int - -const ( - // NeedName adds Name and PkgPath. - NeedName LoadMode = 1 << iota - - // NeedFiles adds GoFiles and OtherFiles. - NeedFiles - - // NeedCompiledGoFiles adds CompiledGoFiles. - NeedCompiledGoFiles - - // NeedImports adds Imports. If NeedDeps is not set, the Imports field will contain - // "placeholder" Packages with only the ID set. - NeedImports - - // NeedDeps adds the fields requested by the LoadMode in the packages in Imports. - NeedDeps - - // NeedExportFile adds ExportFile. - NeedExportFile - - // NeedTypes adds Types, Fset, and IllTyped. - NeedTypes - - // NeedSyntax adds Syntax. - NeedSyntax - - // NeedTypesInfo adds TypesInfo. - NeedTypesInfo - - // NeedTypesSizes adds TypesSizes. - NeedTypesSizes - - // needInternalDepsErrors adds the internal deps errors field for use by gopls. - needInternalDepsErrors - - // needInternalForTest adds the internal forTest field. - // Tests must also be set on the context for this field to be populated. - needInternalForTest - - // typecheckCgo enables full support for type checking cgo. Requires Go 1.15+. - // Modifies CompiledGoFiles and Types, and has no effect on its own. - typecheckCgo - - // NeedModule adds Module. - NeedModule - - // NeedEmbedFiles adds EmbedFiles. - NeedEmbedFiles - - // NeedEmbedPatterns adds EmbedPatterns. - NeedEmbedPatterns -) - -const ( - // Deprecated: LoadFiles exists for historical compatibility - // and should not be used. Please directly specify the needed fields using the Need values. - LoadFiles = NeedName | NeedFiles | NeedCompiledGoFiles - - // Deprecated: LoadImports exists for historical compatibility - // and should not be used. Please directly specify the needed fields using the Need values. - LoadImports = LoadFiles | NeedImports - - // Deprecated: LoadTypes exists for historical compatibility - // and should not be used. Please directly specify the needed fields using the Need values. - LoadTypes = LoadImports | NeedTypes | NeedTypesSizes - - // Deprecated: LoadSyntax exists for historical compatibility - // and should not be used. Please directly specify the needed fields using the Need values. - LoadSyntax = LoadTypes | NeedSyntax | NeedTypesInfo - - // Deprecated: LoadAllSyntax exists for historical compatibility - // and should not be used. Please directly specify the needed fields using the Need values. - LoadAllSyntax = LoadSyntax | NeedDeps - - // Deprecated: NeedExportsFile is a historical misspelling of NeedExportFile. - NeedExportsFile = NeedExportFile -) - -// A Config specifies details about how packages should be loaded. -// The zero value is a valid configuration. -// Calls to Load do not modify this struct. -type Config struct { - // Mode controls the level of information returned for each package. - Mode LoadMode - - // Context specifies the context for the load operation. - // If the context is cancelled, the loader may stop early - // and return an ErrCancelled error. - // If Context is nil, the load cannot be cancelled. - Context context.Context - - // Logf is the logger for the config. - // If the user provides a logger, debug logging is enabled. - // If the GOPACKAGESDEBUG environment variable is set to true, - // but the logger is nil, default to log.Printf. - Logf func(format string, args ...interface{}) - - // Dir is the directory in which to run the build system's query tool - // that provides information about the packages. - // If Dir is empty, the tool is run in the current directory. - Dir string - - // Env is the environment to use when invoking the build system's query tool. - // If Env is nil, the current environment is used. - // As in os/exec's Cmd, only the last value in the slice for - // each environment key is used. To specify the setting of only - // a few variables, append to the current environment, as in: - // - // opt.Env = append(os.Environ(), "GOOS=plan9", "GOARCH=386") - // - Env []string - - // gocmdRunner guards go command calls from concurrency errors. - gocmdRunner *gocommand.Runner - - // BuildFlags is a list of command-line flags to be passed through to - // the build system's query tool. - BuildFlags []string - - // modFile will be used for -modfile in go command invocations. - modFile string - - // modFlag will be used for -modfile in go command invocations. - modFlag string - - // Fset provides source position information for syntax trees and types. - // If Fset is nil, Load will use a new fileset, but preserve Fset's value. - Fset *token.FileSet - - // ParseFile is called to read and parse each file - // when preparing a package's type-checked syntax tree. - // It must be safe to call ParseFile simultaneously from multiple goroutines. - // If ParseFile is nil, the loader will uses parser.ParseFile. - // - // ParseFile should parse the source from src and use filename only for - // recording position information. - // - // An application may supply a custom implementation of ParseFile - // to change the effective file contents or the behavior of the parser, - // or to modify the syntax tree. For example, selectively eliminating - // unwanted function bodies can significantly accelerate type checking. - ParseFile func(fset *token.FileSet, filename string, src []byte) (*ast.File, error) - - // If Tests is set, the loader includes not just the packages - // matching a particular pattern but also any related test packages, - // including test-only variants of the package and the test executable. - // - // For example, when using the go command, loading "fmt" with Tests=true - // returns four packages, with IDs "fmt" (the standard package), - // "fmt [fmt.test]" (the package as compiled for the test), - // "fmt_test" (the test functions from source files in package fmt_test), - // and "fmt.test" (the test binary). - // - // In build systems with explicit names for tests, - // setting Tests may have no effect. - Tests bool - - // Overlay provides a mapping of absolute file paths to file contents. - // If the file with the given path already exists, the parser will use the - // alternative file contents provided by the map. - // - // Overlays provide incomplete support for when a given file doesn't - // already exist on disk. See the package doc above for more details. - Overlay map[string][]byte -} - -// Load loads and returns the Go packages named by the given patterns. -// -// Config specifies loading options; -// nil behaves the same as an empty Config. -// -// Load returns an error if any of the patterns was invalid -// as defined by the underlying build system. -// It may return an empty list of packages without an error, -// for instance for an empty expansion of a valid wildcard. -// Errors associated with a particular package are recorded in the -// corresponding Package's Errors list, and do not cause Load to -// return an error. Clients may need to handle such errors before -// proceeding with further analysis. The PrintErrors function is -// provided for convenient display of all errors. -func Load(cfg *Config, patterns ...string) ([]*Package, error) { - ld := newLoader(cfg) - response, external, err := defaultDriver(&ld.Config, patterns...) - if err != nil { - return nil, err - } - - ld.sizes = types.SizesFor(response.Compiler, response.Arch) - if ld.sizes == nil && ld.Config.Mode&(NeedTypes|NeedTypesSizes|NeedTypesInfo) != 0 { - // Type size information is needed but unavailable. - if external { - // An external driver may fail to populate the Compiler/GOARCH fields, - // especially since they are relatively new (see #63700). - // Provide a sensible fallback in this case. - ld.sizes = types.SizesFor("gc", runtime.GOARCH) - if ld.sizes == nil { // gccgo-only arch - ld.sizes = types.SizesFor("gc", "amd64") - } - } else { - // Go list should never fail to deliver accurate size information. - // Reject the whole Load since the error is the same for every package. - return nil, fmt.Errorf("can't determine type sizes for compiler %q on GOARCH %q", - response.Compiler, response.Arch) - } - } - - return ld.refine(response) -} - -// defaultDriver is a driver that implements go/packages' fallback behavior. -// It will try to request to an external driver, if one exists. If there's -// no external driver, or the driver returns a response with NotHandled set, -// defaultDriver will fall back to the go list driver. -// The boolean result indicates that an external driver handled the request. -func defaultDriver(cfg *Config, patterns ...string) (*DriverResponse, bool, error) { - if driver := findExternalDriver(cfg); driver != nil { - response, err := driver(cfg, patterns...) - if err != nil { - return nil, false, err - } else if !response.NotHandled { - return response, true, nil - } - // (fall through) - } - - response, err := goListDriver(cfg, patterns...) - if err != nil { - return nil, false, err - } - return response, false, nil -} - -// A Package describes a loaded Go package. -type Package struct { - // ID is a unique identifier for a package, - // in a syntax provided by the underlying build system. - // - // Because the syntax varies based on the build system, - // clients should treat IDs as opaque and not attempt to - // interpret them. - ID string - - // Name is the package name as it appears in the package source code. - Name string - - // PkgPath is the package path as used by the go/types package. - PkgPath string - - // Errors contains any errors encountered querying the metadata - // of the package, or while parsing or type-checking its files. - Errors []Error - - // TypeErrors contains the subset of errors produced during type checking. - TypeErrors []types.Error - - // GoFiles lists the absolute file paths of the package's Go source files. - // It may include files that should not be compiled, for example because - // they contain non-matching build tags, are documentary pseudo-files such as - // unsafe/unsafe.go or builtin/builtin.go, or are subject to cgo preprocessing. - GoFiles []string - - // CompiledGoFiles lists the absolute file paths of the package's source - // files that are suitable for type checking. - // This may differ from GoFiles if files are processed before compilation. - CompiledGoFiles []string - - // OtherFiles lists the absolute file paths of the package's non-Go source files, - // including assembly, C, C++, Fortran, Objective-C, SWIG, and so on. - OtherFiles []string - - // EmbedFiles lists the absolute file paths of the package's files - // embedded with go:embed. - EmbedFiles []string - - // EmbedPatterns lists the absolute file patterns of the package's - // files embedded with go:embed. - EmbedPatterns []string - - // IgnoredFiles lists source files that are not part of the package - // using the current build configuration but that might be part of - // the package using other build configurations. - IgnoredFiles []string - - // ExportFile is the absolute path to a file containing type - // information for the package as provided by the build system. - ExportFile string - - // Imports maps import paths appearing in the package's Go source files - // to corresponding loaded Packages. - Imports map[string]*Package - - // Types provides type information for the package. - // The NeedTypes LoadMode bit sets this field for packages matching the - // patterns; type information for dependencies may be missing or incomplete, - // unless NeedDeps and NeedImports are also set. - Types *types.Package - - // Fset provides position information for Types, TypesInfo, and Syntax. - // It is set only when Types is set. - Fset *token.FileSet - - // IllTyped indicates whether the package or any dependency contains errors. - // It is set only when Types is set. - IllTyped bool - - // Syntax is the package's syntax trees, for the files listed in CompiledGoFiles. - // - // The NeedSyntax LoadMode bit populates this field for packages matching the patterns. - // If NeedDeps and NeedImports are also set, this field will also be populated - // for dependencies. - // - // Syntax is kept in the same order as CompiledGoFiles, with the caveat that nils are - // removed. If parsing returned nil, Syntax may be shorter than CompiledGoFiles. - Syntax []*ast.File - - // TypesInfo provides type information about the package's syntax trees. - // It is set only when Syntax is set. - TypesInfo *types.Info - - // TypesSizes provides the effective size function for types in TypesInfo. - TypesSizes types.Sizes - - // forTest is the package under test, if any. - forTest string - - // depsErrors is the DepsErrors field from the go list response, if any. - depsErrors []*packagesinternal.PackageError - - // module is the module information for the package if it exists. - Module *Module -} - -// Module provides module information for a package. -type Module struct { - Path string // module path - Version string // module version - Replace *Module // replaced by this module - Time *time.Time // time version was created - Main bool // is this the main module? - Indirect bool // is this module only an indirect dependency of main module? - Dir string // directory holding files for this module, if any - GoMod string // path to go.mod file used when loading this module, if any - GoVersion string // go version used in module - Error *ModuleError // error loading module -} - -// ModuleError holds errors loading a module. -type ModuleError struct { - Err string // the error itself -} - -func init() { - packagesinternal.GetForTest = func(p interface{}) string { - return p.(*Package).forTest - } - packagesinternal.GetDepsErrors = func(p interface{}) []*packagesinternal.PackageError { - return p.(*Package).depsErrors - } - packagesinternal.SetModFile = func(config interface{}, value string) { - config.(*Config).modFile = value - } - packagesinternal.SetModFlag = func(config interface{}, value string) { - config.(*Config).modFlag = value - } - packagesinternal.TypecheckCgo = int(typecheckCgo) - packagesinternal.DepsErrors = int(needInternalDepsErrors) - packagesinternal.ForTest = int(needInternalForTest) -} - -// An Error describes a problem with a package's metadata, syntax, or types. -type Error struct { - Pos string // "file:line:col" or "file:line" or "" or "-" - Msg string - Kind ErrorKind -} - -// ErrorKind describes the source of the error, allowing the user to -// differentiate between errors generated by the driver, the parser, or the -// type-checker. -type ErrorKind int - -const ( - UnknownError ErrorKind = iota - ListError - ParseError - TypeError -) - -func (err Error) Error() string { - pos := err.Pos - if pos == "" { - pos = "-" // like token.Position{}.String() - } - return pos + ": " + err.Msg -} - -// flatPackage is the JSON form of Package -// It drops all the type and syntax fields, and transforms the Imports -// -// TODO(adonovan): identify this struct with Package, effectively -// publishing the JSON protocol. -type flatPackage struct { - ID string - Name string `json:",omitempty"` - PkgPath string `json:",omitempty"` - Errors []Error `json:",omitempty"` - GoFiles []string `json:",omitempty"` - CompiledGoFiles []string `json:",omitempty"` - OtherFiles []string `json:",omitempty"` - EmbedFiles []string `json:",omitempty"` - EmbedPatterns []string `json:",omitempty"` - IgnoredFiles []string `json:",omitempty"` - ExportFile string `json:",omitempty"` - Imports map[string]string `json:",omitempty"` -} - -// MarshalJSON returns the Package in its JSON form. -// For the most part, the structure fields are written out unmodified, and -// the type and syntax fields are skipped. -// The imports are written out as just a map of path to package id. -// The errors are written using a custom type that tries to preserve the -// structure of error types we know about. -// -// This method exists to enable support for additional build systems. It is -// not intended for use by clients of the API and we may change the format. -func (p *Package) MarshalJSON() ([]byte, error) { - flat := &flatPackage{ - ID: p.ID, - Name: p.Name, - PkgPath: p.PkgPath, - Errors: p.Errors, - GoFiles: p.GoFiles, - CompiledGoFiles: p.CompiledGoFiles, - OtherFiles: p.OtherFiles, - EmbedFiles: p.EmbedFiles, - EmbedPatterns: p.EmbedPatterns, - IgnoredFiles: p.IgnoredFiles, - ExportFile: p.ExportFile, - } - if len(p.Imports) > 0 { - flat.Imports = make(map[string]string, len(p.Imports)) - for path, ipkg := range p.Imports { - flat.Imports[path] = ipkg.ID - } - } - return json.Marshal(flat) -} - -// UnmarshalJSON reads in a Package from its JSON format. -// See MarshalJSON for details about the format accepted. -func (p *Package) UnmarshalJSON(b []byte) error { - flat := &flatPackage{} - if err := json.Unmarshal(b, &flat); err != nil { - return err - } - *p = Package{ - ID: flat.ID, - Name: flat.Name, - PkgPath: flat.PkgPath, - Errors: flat.Errors, - GoFiles: flat.GoFiles, - CompiledGoFiles: flat.CompiledGoFiles, - OtherFiles: flat.OtherFiles, - EmbedFiles: flat.EmbedFiles, - EmbedPatterns: flat.EmbedPatterns, - ExportFile: flat.ExportFile, - } - if len(flat.Imports) > 0 { - p.Imports = make(map[string]*Package, len(flat.Imports)) - for path, id := range flat.Imports { - p.Imports[path] = &Package{ID: id} - } - } - return nil -} - -func (p *Package) String() string { return p.ID } - -// loaderPackage augments Package with state used during the loading phase -type loaderPackage struct { - *Package - importErrors map[string]error // maps each bad import to its error - loadOnce sync.Once - color uint8 // for cycle detection - needsrc bool // load from source (Mode >= LoadTypes) - needtypes bool // type information is either requested or depended on - initial bool // package was matched by a pattern - goVersion int // minor version number of go command on PATH -} - -// loader holds the working state of a single call to load. -type loader struct { - pkgs map[string]*loaderPackage - Config - sizes types.Sizes // non-nil if needed by mode - parseCache map[string]*parseValue - parseCacheMu sync.Mutex - exportMu sync.Mutex // enforces mutual exclusion of exportdata operations - - // Config.Mode contains the implied mode (see impliedLoadMode). - // Implied mode contains all the fields we need the data for. - // In requestedMode there are the actually requested fields. - // We'll zero them out before returning packages to the user. - // This makes it easier for us to get the conditions where - // we need certain modes right. - requestedMode LoadMode -} - -type parseValue struct { - f *ast.File - err error - ready chan struct{} -} - -func newLoader(cfg *Config) *loader { - ld := &loader{ - parseCache: map[string]*parseValue{}, - } - if cfg != nil { - ld.Config = *cfg - // If the user has provided a logger, use it. - ld.Config.Logf = cfg.Logf - } - if ld.Config.Logf == nil { - // If the GOPACKAGESDEBUG environment variable is set to true, - // but the user has not provided a logger, default to log.Printf. - if debug { - ld.Config.Logf = log.Printf - } else { - ld.Config.Logf = func(format string, args ...interface{}) {} - } - } - if ld.Config.Mode == 0 { - ld.Config.Mode = NeedName | NeedFiles | NeedCompiledGoFiles // Preserve zero behavior of Mode for backwards compatibility. - } - if ld.Config.Env == nil { - ld.Config.Env = os.Environ() - } - if ld.Config.gocmdRunner == nil { - ld.Config.gocmdRunner = &gocommand.Runner{} - } - if ld.Context == nil { - ld.Context = context.Background() - } - if ld.Dir == "" { - if dir, err := os.Getwd(); err == nil { - ld.Dir = dir - } - } - - // Save the actually requested fields. We'll zero them out before returning packages to the user. - ld.requestedMode = ld.Mode - ld.Mode = impliedLoadMode(ld.Mode) - - if ld.Mode&NeedTypes != 0 || ld.Mode&NeedSyntax != 0 { - if ld.Fset == nil { - ld.Fset = token.NewFileSet() - } - - // ParseFile is required even in LoadTypes mode - // because we load source if export data is missing. - if ld.ParseFile == nil { - ld.ParseFile = func(fset *token.FileSet, filename string, src []byte) (*ast.File, error) { - const mode = parser.AllErrors | parser.ParseComments - return parser.ParseFile(fset, filename, src, mode) - } - } - } - - return ld -} - -// refine connects the supplied packages into a graph and then adds type -// and syntax information as requested by the LoadMode. -func (ld *loader) refine(response *DriverResponse) ([]*Package, error) { - roots := response.Roots - rootMap := make(map[string]int, len(roots)) - for i, root := range roots { - rootMap[root] = i - } - ld.pkgs = make(map[string]*loaderPackage) - // first pass, fixup and build the map and roots - var initial = make([]*loaderPackage, len(roots)) - for _, pkg := range response.Packages { - rootIndex := -1 - if i, found := rootMap[pkg.ID]; found { - rootIndex = i - } - - // Overlays can invalidate export data. - // TODO(matloob): make this check fine-grained based on dependencies on overlaid files - exportDataInvalid := len(ld.Overlay) > 0 || pkg.ExportFile == "" && pkg.PkgPath != "unsafe" - // This package needs type information if the caller requested types and the package is - // either a root, or it's a non-root and the user requested dependencies ... - needtypes := (ld.Mode&NeedTypes|NeedTypesInfo != 0 && (rootIndex >= 0 || ld.Mode&NeedDeps != 0)) - // This package needs source if the call requested source (or types info, which implies source) - // and the package is either a root, or itas a non- root and the user requested dependencies... - needsrc := ((ld.Mode&(NeedSyntax|NeedTypesInfo) != 0 && (rootIndex >= 0 || ld.Mode&NeedDeps != 0)) || - // ... or if we need types and the exportData is invalid. We fall back to (incompletely) - // typechecking packages from source if they fail to compile. - (ld.Mode&(NeedTypes|NeedTypesInfo) != 0 && exportDataInvalid)) && pkg.PkgPath != "unsafe" - lpkg := &loaderPackage{ - Package: pkg, - needtypes: needtypes, - needsrc: needsrc, - goVersion: response.GoVersion, - } - ld.pkgs[lpkg.ID] = lpkg - if rootIndex >= 0 { - initial[rootIndex] = lpkg - lpkg.initial = true - } - } - for i, root := range roots { - if initial[i] == nil { - return nil, fmt.Errorf("root package %v is missing", root) - } - } - - if ld.Mode&NeedImports != 0 { - // Materialize the import graph. - - const ( - white = 0 // new - grey = 1 // in progress - black = 2 // complete - ) - - // visit traverses the import graph, depth-first, - // and materializes the graph as Packages.Imports. - // - // Valid imports are saved in the Packages.Import map. - // Invalid imports (cycles and missing nodes) are saved in the importErrors map. - // Thus, even in the presence of both kinds of errors, - // the Import graph remains a DAG. - // - // visit returns whether the package needs src or has a transitive - // dependency on a package that does. These are the only packages - // for which we load source code. - var stack []*loaderPackage - var visit func(lpkg *loaderPackage) bool - visit = func(lpkg *loaderPackage) bool { - switch lpkg.color { - case black: - return lpkg.needsrc - case grey: - panic("internal error: grey node") - } - lpkg.color = grey - stack = append(stack, lpkg) // push - stubs := lpkg.Imports // the structure form has only stubs with the ID in the Imports - lpkg.Imports = make(map[string]*Package, len(stubs)) - for importPath, ipkg := range stubs { - var importErr error - imp := ld.pkgs[ipkg.ID] - if imp == nil { - // (includes package "C" when DisableCgo) - importErr = fmt.Errorf("missing package: %q", ipkg.ID) - } else if imp.color == grey { - importErr = fmt.Errorf("import cycle: %s", stack) - } - if importErr != nil { - if lpkg.importErrors == nil { - lpkg.importErrors = make(map[string]error) - } - lpkg.importErrors[importPath] = importErr - continue - } - - if visit(imp) { - lpkg.needsrc = true - } - lpkg.Imports[importPath] = imp.Package - } - - // Complete type information is required for the - // immediate dependencies of each source package. - if lpkg.needsrc && ld.Mode&NeedTypes != 0 { - for _, ipkg := range lpkg.Imports { - ld.pkgs[ipkg.ID].needtypes = true - } - } - - // NeedTypeSizes causes TypeSizes to be set even - // on packages for which types aren't needed. - if ld.Mode&NeedTypesSizes != 0 { - lpkg.TypesSizes = ld.sizes - } - stack = stack[:len(stack)-1] // pop - lpkg.color = black - - return lpkg.needsrc - } - - // For each initial package, create its import DAG. - for _, lpkg := range initial { - visit(lpkg) - } - - } else { - // !NeedImports: drop the stub (ID-only) import packages - // that we are not even going to try to resolve. - for _, lpkg := range initial { - lpkg.Imports = nil - } - } - - // Load type data and syntax if needed, starting at - // the initial packages (roots of the import DAG). - if ld.Mode&NeedTypes != 0 || ld.Mode&NeedSyntax != 0 { - var wg sync.WaitGroup - for _, lpkg := range initial { - wg.Add(1) - go func(lpkg *loaderPackage) { - ld.loadRecursive(lpkg) - wg.Done() - }(lpkg) - } - wg.Wait() - } - - result := make([]*Package, len(initial)) - for i, lpkg := range initial { - result[i] = lpkg.Package - } - for i := range ld.pkgs { - // Clear all unrequested fields, - // to catch programs that use more than they request. - if ld.requestedMode&NeedName == 0 { - ld.pkgs[i].Name = "" - ld.pkgs[i].PkgPath = "" - } - if ld.requestedMode&NeedFiles == 0 { - ld.pkgs[i].GoFiles = nil - ld.pkgs[i].OtherFiles = nil - ld.pkgs[i].IgnoredFiles = nil - } - if ld.requestedMode&NeedEmbedFiles == 0 { - ld.pkgs[i].EmbedFiles = nil - } - if ld.requestedMode&NeedEmbedPatterns == 0 { - ld.pkgs[i].EmbedPatterns = nil - } - if ld.requestedMode&NeedCompiledGoFiles == 0 { - ld.pkgs[i].CompiledGoFiles = nil - } - if ld.requestedMode&NeedImports == 0 { - ld.pkgs[i].Imports = nil - } - if ld.requestedMode&NeedExportFile == 0 { - ld.pkgs[i].ExportFile = "" - } - if ld.requestedMode&NeedTypes == 0 { - ld.pkgs[i].Types = nil - ld.pkgs[i].Fset = nil - ld.pkgs[i].IllTyped = false - } - if ld.requestedMode&NeedSyntax == 0 { - ld.pkgs[i].Syntax = nil - } - if ld.requestedMode&NeedTypesInfo == 0 { - ld.pkgs[i].TypesInfo = nil - } - if ld.requestedMode&NeedTypesSizes == 0 { - ld.pkgs[i].TypesSizes = nil - } - if ld.requestedMode&NeedModule == 0 { - ld.pkgs[i].Module = nil - } - } - - return result, nil -} - -// loadRecursive loads the specified package and its dependencies, -// recursively, in parallel, in topological order. -// It is atomic and idempotent. -// Precondition: ld.Mode&NeedTypes. -func (ld *loader) loadRecursive(lpkg *loaderPackage) { - lpkg.loadOnce.Do(func() { - // Load the direct dependencies, in parallel. - var wg sync.WaitGroup - for _, ipkg := range lpkg.Imports { - imp := ld.pkgs[ipkg.ID] - wg.Add(1) - go func(imp *loaderPackage) { - ld.loadRecursive(imp) - wg.Done() - }(imp) - } - wg.Wait() - ld.loadPackage(lpkg) - }) -} - -// loadPackage loads the specified package. -// It must be called only once per Package, -// after immediate dependencies are loaded. -// Precondition: ld.Mode & NeedTypes. -func (ld *loader) loadPackage(lpkg *loaderPackage) { - if lpkg.PkgPath == "unsafe" { - // Fill in the blanks to avoid surprises. - lpkg.Types = types.Unsafe - lpkg.Fset = ld.Fset - lpkg.Syntax = []*ast.File{} - lpkg.TypesInfo = new(types.Info) - lpkg.TypesSizes = ld.sizes - return - } - - // Call NewPackage directly with explicit name. - // This avoids skew between golist and go/types when the files' - // package declarations are inconsistent. - lpkg.Types = types.NewPackage(lpkg.PkgPath, lpkg.Name) - lpkg.Fset = ld.Fset - - // Subtle: we populate all Types fields with an empty Package - // before loading export data so that export data processing - // never has to create a types.Package for an indirect dependency, - // which would then require that such created packages be explicitly - // inserted back into the Import graph as a final step after export data loading. - // (Hence this return is after the Types assignment.) - // The Diamond test exercises this case. - if !lpkg.needtypes && !lpkg.needsrc { - return - } - if !lpkg.needsrc { - if err := ld.loadFromExportData(lpkg); err != nil { - lpkg.Errors = append(lpkg.Errors, Error{ - Pos: "-", - Msg: err.Error(), - Kind: UnknownError, // e.g. can't find/open/parse export data - }) - } - return // not a source package, don't get syntax trees - } - - appendError := func(err error) { - // Convert various error types into the one true Error. - var errs []Error - switch err := err.(type) { - case Error: - // from driver - errs = append(errs, err) - - case *os.PathError: - // from parser - errs = append(errs, Error{ - Pos: err.Path + ":1", - Msg: err.Err.Error(), - Kind: ParseError, - }) - - case scanner.ErrorList: - // from parser - for _, err := range err { - errs = append(errs, Error{ - Pos: err.Pos.String(), - Msg: err.Msg, - Kind: ParseError, - }) - } - - case types.Error: - // from type checker - lpkg.TypeErrors = append(lpkg.TypeErrors, err) - errs = append(errs, Error{ - Pos: err.Fset.Position(err.Pos).String(), - Msg: err.Msg, - Kind: TypeError, - }) - - default: - // unexpected impoverished error from parser? - errs = append(errs, Error{ - Pos: "-", - Msg: err.Error(), - Kind: UnknownError, - }) - - // If you see this error message, please file a bug. - log.Printf("internal error: error %q (%T) without position", err, err) - } - - lpkg.Errors = append(lpkg.Errors, errs...) - } - - // If the go command on the PATH is newer than the runtime, - // then the go/{scanner,ast,parser,types} packages from the - // standard library may be unable to process the files - // selected by go list. - // - // There is currently no way to downgrade the effective - // version of the go command (see issue 52078), so we proceed - // with the newer go command but, in case of parse or type - // errors, we emit an additional diagnostic. - // - // See: - // - golang.org/issue/52078 (flag to set release tags) - // - golang.org/issue/50825 (gopls legacy version support) - // - golang.org/issue/55883 (go/packages confusing error) - // - // Should we assert a hard minimum of (currently) go1.16 here? - var runtimeVersion int - if _, err := fmt.Sscanf(runtime.Version(), "go1.%d", &runtimeVersion); err == nil && runtimeVersion < lpkg.goVersion { - defer func() { - if len(lpkg.Errors) > 0 { - appendError(Error{ - Pos: "-", - Msg: fmt.Sprintf("This application uses version go1.%d of the source-processing packages but runs version go1.%d of 'go list'. It may fail to process source files that rely on newer language features. If so, rebuild the application using a newer version of Go.", runtimeVersion, lpkg.goVersion), - Kind: UnknownError, - }) - } - }() - } - - if ld.Config.Mode&NeedTypes != 0 && len(lpkg.CompiledGoFiles) == 0 && lpkg.ExportFile != "" { - // The config requested loading sources and types, but sources are missing. - // Add an error to the package and fall back to loading from export data. - appendError(Error{"-", fmt.Sprintf("sources missing for package %s", lpkg.ID), ParseError}) - _ = ld.loadFromExportData(lpkg) // ignore any secondary errors - - return // can't get syntax trees for this package - } - - files, errs := ld.parseFiles(lpkg.CompiledGoFiles) - for _, err := range errs { - appendError(err) - } - - lpkg.Syntax = files - if ld.Config.Mode&NeedTypes == 0 { - return - } - - lpkg.TypesInfo = &types.Info{ - Types: make(map[ast.Expr]types.TypeAndValue), - Defs: make(map[*ast.Ident]types.Object), - Uses: make(map[*ast.Ident]types.Object), - Implicits: make(map[ast.Node]types.Object), - Instances: make(map[*ast.Ident]types.Instance), - Scopes: make(map[ast.Node]*types.Scope), - Selections: make(map[*ast.SelectorExpr]*types.Selection), - } - versions.InitFileVersions(lpkg.TypesInfo) - lpkg.TypesSizes = ld.sizes - - importer := importerFunc(func(path string) (*types.Package, error) { - if path == "unsafe" { - return types.Unsafe, nil - } - - // The imports map is keyed by import path. - ipkg := lpkg.Imports[path] - if ipkg == nil { - if err := lpkg.importErrors[path]; err != nil { - return nil, err - } - // There was skew between the metadata and the - // import declarations, likely due to an edit - // race, or because the ParseFile feature was - // used to supply alternative file contents. - return nil, fmt.Errorf("no metadata for %s", path) - } - - if ipkg.Types != nil && ipkg.Types.Complete() { - return ipkg.Types, nil - } - log.Fatalf("internal error: package %q without types was imported from %q", path, lpkg) - panic("unreachable") - }) - - // type-check - tc := &types.Config{ - Importer: importer, - - // Type-check bodies of functions only in initial packages. - // Example: for import graph A->B->C and initial packages {A,C}, - // we can ignore function bodies in B. - IgnoreFuncBodies: ld.Mode&NeedDeps == 0 && !lpkg.initial, - - Error: appendError, - Sizes: ld.sizes, // may be nil - } - if lpkg.Module != nil && lpkg.Module.GoVersion != "" { - typesinternal.SetGoVersion(tc, "go"+lpkg.Module.GoVersion) - } - if (ld.Mode & typecheckCgo) != 0 { - if !typesinternal.SetUsesCgo(tc) { - appendError(Error{ - Msg: "typecheckCgo requires Go 1.15+", - Kind: ListError, - }) - return - } - } - types.NewChecker(tc, ld.Fset, lpkg.Types, lpkg.TypesInfo).Files(lpkg.Syntax) - - lpkg.importErrors = nil // no longer needed - - // If !Cgo, the type-checker uses FakeImportC mode, so - // it doesn't invoke the importer for import "C", - // nor report an error for the import, - // or for any undefined C.f reference. - // We must detect this explicitly and correctly - // mark the package as IllTyped (by reporting an error). - // TODO(adonovan): if these errors are annoying, - // we could just set IllTyped quietly. - if tc.FakeImportC { - outer: - for _, f := range lpkg.Syntax { - for _, imp := range f.Imports { - if imp.Path.Value == `"C"` { - err := types.Error{Fset: ld.Fset, Pos: imp.Pos(), Msg: `import "C" ignored`} - appendError(err) - break outer - } - } - } - } - - // Record accumulated errors. - illTyped := len(lpkg.Errors) > 0 - if !illTyped { - for _, imp := range lpkg.Imports { - if imp.IllTyped { - illTyped = true - break - } - } - } - lpkg.IllTyped = illTyped -} - -// An importFunc is an implementation of the single-method -// types.Importer interface based on a function value. -type importerFunc func(path string) (*types.Package, error) - -func (f importerFunc) Import(path string) (*types.Package, error) { return f(path) } - -// We use a counting semaphore to limit -// the number of parallel I/O calls per process. -var ioLimit = make(chan bool, 20) - -func (ld *loader) parseFile(filename string) (*ast.File, error) { - ld.parseCacheMu.Lock() - v, ok := ld.parseCache[filename] - if ok { - // cache hit - ld.parseCacheMu.Unlock() - <-v.ready - } else { - // cache miss - v = &parseValue{ready: make(chan struct{})} - ld.parseCache[filename] = v - ld.parseCacheMu.Unlock() - - var src []byte - for f, contents := range ld.Config.Overlay { - if sameFile(f, filename) { - src = contents - } - } - var err error - if src == nil { - ioLimit <- true // wait - src, err = os.ReadFile(filename) - <-ioLimit // signal - } - if err != nil { - v.err = err - } else { - v.f, v.err = ld.ParseFile(ld.Fset, filename, src) - } - - close(v.ready) - } - return v.f, v.err -} - -// parseFiles reads and parses the Go source files and returns the ASTs -// of the ones that could be at least partially parsed, along with a -// list of I/O and parse errors encountered. -// -// Because files are scanned in parallel, the token.Pos -// positions of the resulting ast.Files are not ordered. -func (ld *loader) parseFiles(filenames []string) ([]*ast.File, []error) { - var wg sync.WaitGroup - n := len(filenames) - parsed := make([]*ast.File, n) - errors := make([]error, n) - for i, file := range filenames { - if ld.Config.Context.Err() != nil { - parsed[i] = nil - errors[i] = ld.Config.Context.Err() - continue - } - wg.Add(1) - go func(i int, filename string) { - parsed[i], errors[i] = ld.parseFile(filename) - wg.Done() - }(i, file) - } - wg.Wait() - - // Eliminate nils, preserving order. - var o int - for _, f := range parsed { - if f != nil { - parsed[o] = f - o++ - } - } - parsed = parsed[:o] - - o = 0 - for _, err := range errors { - if err != nil { - errors[o] = err - o++ - } - } - errors = errors[:o] - - return parsed, errors -} - -// sameFile returns true if x and y have the same basename and denote -// the same file. -func sameFile(x, y string) bool { - if x == y { - // It could be the case that y doesn't exist. - // For instance, it may be an overlay file that - // hasn't been written to disk. To handle that case - // let x == y through. (We added the exact absolute path - // string to the CompiledGoFiles list, so the unwritten - // overlay case implies x==y.) - return true - } - if strings.EqualFold(filepath.Base(x), filepath.Base(y)) { // (optimisation) - if xi, err := os.Stat(x); err == nil { - if yi, err := os.Stat(y); err == nil { - return os.SameFile(xi, yi) - } - } - } - return false -} - -// loadFromExportData ensures that type information is present for the specified -// package, loading it from an export data file on the first request. -// On success it sets lpkg.Types to a new Package. -func (ld *loader) loadFromExportData(lpkg *loaderPackage) error { - if lpkg.PkgPath == "" { - log.Fatalf("internal error: Package %s has no PkgPath", lpkg) - } - - // Because gcexportdata.Read has the potential to create or - // modify the types.Package for each node in the transitive - // closure of dependencies of lpkg, all exportdata operations - // must be sequential. (Finer-grained locking would require - // changes to the gcexportdata API.) - // - // The exportMu lock guards the lpkg.Types field and the - // types.Package it points to, for each loaderPackage in the graph. - // - // Not all accesses to Package.Pkg need to be protected by exportMu: - // graph ordering ensures that direct dependencies of source - // packages are fully loaded before the importer reads their Pkg field. - ld.exportMu.Lock() - defer ld.exportMu.Unlock() - - if tpkg := lpkg.Types; tpkg != nil && tpkg.Complete() { - return nil // cache hit - } - - lpkg.IllTyped = true // fail safe - - if lpkg.ExportFile == "" { - // Errors while building export data will have been printed to stderr. - return fmt.Errorf("no export data file") - } - f, err := os.Open(lpkg.ExportFile) - if err != nil { - return err - } - defer f.Close() - - // Read gc export data. - // - // We don't currently support gccgo export data because all - // underlying workspaces use the gc toolchain. (Even build - // systems that support gccgo don't use it for workspace - // queries.) - r, err := gcexportdata.NewReader(f) - if err != nil { - return fmt.Errorf("reading %s: %v", lpkg.ExportFile, err) - } - - // Build the view. - // - // The gcexportdata machinery has no concept of package ID. - // It identifies packages by their PkgPath, which although not - // globally unique is unique within the scope of one invocation - // of the linker, type-checker, or gcexportdata. - // - // So, we must build a PkgPath-keyed view of the global - // (conceptually ID-keyed) cache of packages and pass it to - // gcexportdata. The view must contain every existing - // package that might possibly be mentioned by the - // current package---its transitive closure. - // - // In loadPackage, we unconditionally create a types.Package for - // each dependency so that export data loading does not - // create new ones. - // - // TODO(adonovan): it would be simpler and more efficient - // if the export data machinery invoked a callback to - // get-or-create a package instead of a map. - // - view := make(map[string]*types.Package) // view seen by gcexportdata - seen := make(map[*loaderPackage]bool) // all visited packages - var visit func(pkgs map[string]*Package) - visit = func(pkgs map[string]*Package) { - for _, p := range pkgs { - lpkg := ld.pkgs[p.ID] - if !seen[lpkg] { - seen[lpkg] = true - view[lpkg.PkgPath] = lpkg.Types - visit(lpkg.Imports) - } - } - } - visit(lpkg.Imports) - - viewLen := len(view) + 1 // adding the self package - // Parse the export data. - // (May modify incomplete packages in view but not create new ones.) - tpkg, err := gcexportdata.Read(r, ld.Fset, view, lpkg.PkgPath) - if err != nil { - return fmt.Errorf("reading %s: %v", lpkg.ExportFile, err) - } - if _, ok := view["go.shape"]; ok { - // Account for the pseudopackage "go.shape" that gets - // created by generic code. - viewLen++ - } - if viewLen != len(view) { - log.Panicf("golang.org/x/tools/go/packages: unexpected new packages during load of %s", lpkg.PkgPath) - } - - lpkg.Types = tpkg - lpkg.IllTyped = false - return nil -} - -// impliedLoadMode returns loadMode with its dependencies. -func impliedLoadMode(loadMode LoadMode) LoadMode { - if loadMode&(NeedDeps|NeedTypes|NeedTypesInfo) != 0 { - // All these things require knowing the import graph. - loadMode |= NeedImports - } - - return loadMode -} - -func usesExportData(cfg *Config) bool { - return cfg.Mode&NeedExportFile != 0 || cfg.Mode&NeedTypes != 0 && cfg.Mode&NeedDeps == 0 -} - -var _ interface{} = io.Discard // assert build toolchain is go1.16 or later diff --git a/go/extractor/vendor/golang.org/x/tools/go/packages/visit.go b/go/extractor/vendor/golang.org/x/tools/go/packages/visit.go deleted file mode 100644 index a1dcc40b7270..000000000000 --- a/go/extractor/vendor/golang.org/x/tools/go/packages/visit.go +++ /dev/null @@ -1,59 +0,0 @@ -// Copyright 2018 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package packages - -import ( - "fmt" - "os" - "sort" -) - -// Visit visits all the packages in the import graph whose roots are -// pkgs, calling the optional pre function the first time each package -// is encountered (preorder), and the optional post function after a -// package's dependencies have been visited (postorder). -// The boolean result of pre(pkg) determines whether -// the imports of package pkg are visited. -func Visit(pkgs []*Package, pre func(*Package) bool, post func(*Package)) { - seen := make(map[*Package]bool) - var visit func(*Package) - visit = func(pkg *Package) { - if !seen[pkg] { - seen[pkg] = true - - if pre == nil || pre(pkg) { - paths := make([]string, 0, len(pkg.Imports)) - for path := range pkg.Imports { - paths = append(paths, path) - } - sort.Strings(paths) // Imports is a map, this makes visit stable - for _, path := range paths { - visit(pkg.Imports[path]) - } - } - - if post != nil { - post(pkg) - } - } - } - for _, pkg := range pkgs { - visit(pkg) - } -} - -// PrintErrors prints to os.Stderr the accumulated errors of all -// packages in the import graph rooted at pkgs, dependencies first. -// PrintErrors returns the number of errors printed. -func PrintErrors(pkgs []*Package) int { - var n int - Visit(pkgs, nil, func(pkg *Package) { - for _, err := range pkg.Errors { - fmt.Fprintln(os.Stderr, err) - n++ - } - }) - return n -} diff --git a/go/extractor/vendor/golang.org/x/tools/go/types/objectpath/BUILD.bazel b/go/extractor/vendor/golang.org/x/tools/go/types/objectpath/BUILD.bazel deleted file mode 100644 index 374c5c601bc8..000000000000 --- a/go/extractor/vendor/golang.org/x/tools/go/types/objectpath/BUILD.bazel +++ /dev/null @@ -1,12 +0,0 @@ -# generated running `bazel run //go/gazelle`, do not edit - -load("@rules_go//go:def.bzl", "go_library") - -go_library( - name = "objectpath", - srcs = ["objectpath.go"], - importmap = "github.com/github/codeql-go/extractor/vendor/golang.org/x/tools/go/types/objectpath", - importpath = "golang.org/x/tools/go/types/objectpath", - visibility = ["//visibility:public"], - deps = ["//go/extractor/vendor/golang.org/x/tools/internal/typeparams"], -) diff --git a/go/extractor/vendor/golang.org/x/tools/go/types/objectpath/objectpath.go b/go/extractor/vendor/golang.org/x/tools/go/types/objectpath/objectpath.go deleted file mode 100644 index 11d5c8c3adf1..000000000000 --- a/go/extractor/vendor/golang.org/x/tools/go/types/objectpath/objectpath.go +++ /dev/null @@ -1,752 +0,0 @@ -// Copyright 2018 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Package objectpath defines a naming scheme for types.Objects -// (that is, named entities in Go programs) relative to their enclosing -// package. -// -// Type-checker objects are canonical, so they are usually identified by -// their address in memory (a pointer), but a pointer has meaning only -// within one address space. By contrast, objectpath names allow the -// identity of an object to be sent from one program to another, -// establishing a correspondence between types.Object variables that are -// distinct but logically equivalent. -// -// A single object may have multiple paths. In this example, -// -// type A struct{ X int } -// type B A -// -// the field X has two paths due to its membership of both A and B. -// The For(obj) function always returns one of these paths, arbitrarily -// but consistently. -package objectpath - -import ( - "fmt" - "go/types" - "strconv" - "strings" - - "golang.org/x/tools/internal/typeparams" -) - -// A Path is an opaque name that identifies a types.Object -// relative to its package. Conceptually, the name consists of a -// sequence of destructuring operations applied to the package scope -// to obtain the original object. -// The name does not include the package itself. -type Path string - -// Encoding -// -// An object path is a textual and (with training) human-readable encoding -// of a sequence of destructuring operators, starting from a types.Package. -// The sequences represent a path through the package/object/type graph. -// We classify these operators by their type: -// -// PO package->object Package.Scope.Lookup -// OT object->type Object.Type -// TT type->type Type.{Elem,Key,Params,Results,Underlying} [EKPRU] -// TO type->object Type.{At,Field,Method,Obj} [AFMO] -// -// All valid paths start with a package and end at an object -// and thus may be defined by the regular language: -// -// objectpath = PO (OT TT* TO)* -// -// The concrete encoding follows directly: -// - The only PO operator is Package.Scope.Lookup, which requires an identifier. -// - The only OT operator is Object.Type, -// which we encode as '.' because dot cannot appear in an identifier. -// - The TT operators are encoded as [EKPRUTC]; -// one of these (TypeParam) requires an integer operand, -// which is encoded as a string of decimal digits. -// - The TO operators are encoded as [AFMO]; -// three of these (At,Field,Method) require an integer operand, -// which is encoded as a string of decimal digits. -// These indices are stable across different representations -// of the same package, even source and export data. -// The indices used are implementation specific and may not correspond to -// the argument to the go/types function. -// -// In the example below, -// -// package p -// -// type T interface { -// f() (a string, b struct{ X int }) -// } -// -// field X has the path "T.UM0.RA1.F0", -// representing the following sequence of operations: -// -// p.Lookup("T") T -// .Type().Underlying().Method(0). f -// .Type().Results().At(1) b -// .Type().Field(0) X -// -// The encoding is not maximally compact---every R or P is -// followed by an A, for example---but this simplifies the -// encoder and decoder. -const ( - // object->type operators - opType = '.' // .Type() (Object) - - // type->type operators - opElem = 'E' // .Elem() (Pointer, Slice, Array, Chan, Map) - opKey = 'K' // .Key() (Map) - opParams = 'P' // .Params() (Signature) - opResults = 'R' // .Results() (Signature) - opUnderlying = 'U' // .Underlying() (Named) - opTypeParam = 'T' // .TypeParams.At(i) (Named, Signature) - opConstraint = 'C' // .Constraint() (TypeParam) - - // type->object operators - opAt = 'A' // .At(i) (Tuple) - opField = 'F' // .Field(i) (Struct) - opMethod = 'M' // .Method(i) (Named or Interface; not Struct: "promoted" names are ignored) - opObj = 'O' // .Obj() (Named, TypeParam) -) - -// For is equivalent to new(Encoder).For(obj). -// -// It may be more efficient to reuse a single Encoder across several calls. -func For(obj types.Object) (Path, error) { - return new(Encoder).For(obj) -} - -// An Encoder amortizes the cost of encoding the paths of multiple objects. -// The zero value of an Encoder is ready to use. -type Encoder struct { - scopeMemo map[*types.Scope][]types.Object // memoization of scopeObjects -} - -// For returns the path to an object relative to its package, -// or an error if the object is not accessible from the package's Scope. -// -// The For function guarantees to return a path only for the following objects: -// - package-level types -// - exported package-level non-types -// - methods -// - parameter and result variables -// - struct fields -// These objects are sufficient to define the API of their package. -// The objects described by a package's export data are drawn from this set. -// -// The set of objects accessible from a package's Scope depends on -// whether the package was produced by type-checking syntax, or -// reading export data; the latter may have a smaller Scope since -// export data trims objects that are not reachable from an exported -// declaration. For example, the For function will return a path for -// an exported method of an unexported type that is not reachable -// from any public declaration; this path will cause the Object -// function to fail if called on a package loaded from export data. -// TODO(adonovan): is this a bug or feature? Should this package -// compute accessibility in the same way? -// -// For does not return a path for predeclared names, imported package -// names, local names, and unexported package-level names (except -// types). -// -// Example: given this definition, -// -// package p -// -// type T interface { -// f() (a string, b struct{ X int }) -// } -// -// For(X) would return a path that denotes the following sequence of operations: -// -// p.Scope().Lookup("T") (TypeName T) -// .Type().Underlying().Method(0). (method Func f) -// .Type().Results().At(1) (field Var b) -// .Type().Field(0) (field Var X) -// -// where p is the package (*types.Package) to which X belongs. -func (enc *Encoder) For(obj types.Object) (Path, error) { - pkg := obj.Pkg() - - // This table lists the cases of interest. - // - // Object Action - // ------ ------ - // nil reject - // builtin reject - // pkgname reject - // label reject - // var - // package-level accept - // func param/result accept - // local reject - // struct field accept - // const - // package-level accept - // local reject - // func - // package-level accept - // init functions reject - // concrete method accept - // interface method accept - // type - // package-level accept - // local reject - // - // The only accessible package-level objects are members of pkg itself. - // - // The cases are handled in four steps: - // - // 1. reject nil and builtin - // 2. accept package-level objects - // 3. reject obviously invalid objects - // 4. search the API for the path to the param/result/field/method. - - // 1. reference to nil or builtin? - if pkg == nil { - return "", fmt.Errorf("predeclared %s has no path", obj) - } - scope := pkg.Scope() - - // 2. package-level object? - if scope.Lookup(obj.Name()) == obj { - // Only exported objects (and non-exported types) have a path. - // Non-exported types may be referenced by other objects. - if _, ok := obj.(*types.TypeName); !ok && !obj.Exported() { - return "", fmt.Errorf("no path for non-exported %v", obj) - } - return Path(obj.Name()), nil - } - - // 3. Not a package-level object. - // Reject obviously non-viable cases. - switch obj := obj.(type) { - case *types.TypeName: - if _, ok := obj.Type().(*types.TypeParam); !ok { - // With the exception of type parameters, only package-level type names - // have a path. - return "", fmt.Errorf("no path for %v", obj) - } - case *types.Const, // Only package-level constants have a path. - *types.Label, // Labels are function-local. - *types.PkgName: // PkgNames are file-local. - return "", fmt.Errorf("no path for %v", obj) - - case *types.Var: - // Could be: - // - a field (obj.IsField()) - // - a func parameter or result - // - a local var. - // Sadly there is no way to distinguish - // a param/result from a local - // so we must proceed to the find. - - case *types.Func: - // A func, if not package-level, must be a method. - if recv := obj.Type().(*types.Signature).Recv(); recv == nil { - return "", fmt.Errorf("func is not a method: %v", obj) - } - - if path, ok := enc.concreteMethod(obj); ok { - // Fast path for concrete methods that avoids looping over scope. - return path, nil - } - - default: - panic(obj) - } - - // 4. Search the API for the path to the var (field/param/result) or method. - - // First inspect package-level named types. - // In the presence of path aliases, these give - // the best paths because non-types may - // refer to types, but not the reverse. - empty := make([]byte, 0, 48) // initial space - objs := enc.scopeObjects(scope) - for _, o := range objs { - tname, ok := o.(*types.TypeName) - if !ok { - continue // handle non-types in second pass - } - - path := append(empty, o.Name()...) - path = append(path, opType) - - T := o.Type() - - if tname.IsAlias() { - // type alias - if r := find(obj, T, path, nil); r != nil { - return Path(r), nil - } - } else { - if named, _ := T.(*types.Named); named != nil { - if r := findTypeParam(obj, named.TypeParams(), path, nil); r != nil { - // generic named type - return Path(r), nil - } - } - // defined (named) type - if r := find(obj, T.Underlying(), append(path, opUnderlying), nil); r != nil { - return Path(r), nil - } - } - } - - // Then inspect everything else: - // non-types, and declared methods of defined types. - for _, o := range objs { - path := append(empty, o.Name()...) - if _, ok := o.(*types.TypeName); !ok { - if o.Exported() { - // exported non-type (const, var, func) - if r := find(obj, o.Type(), append(path, opType), nil); r != nil { - return Path(r), nil - } - } - continue - } - - // Inspect declared methods of defined types. - if T, ok := o.Type().(*types.Named); ok { - path = append(path, opType) - // The method index here is always with respect - // to the underlying go/types data structures, - // which ultimately derives from source order - // and must be preserved by export data. - for i := 0; i < T.NumMethods(); i++ { - m := T.Method(i) - path2 := appendOpArg(path, opMethod, i) - if m == obj { - return Path(path2), nil // found declared method - } - if r := find(obj, m.Type(), append(path2, opType), nil); r != nil { - return Path(r), nil - } - } - } - } - - return "", fmt.Errorf("can't find path for %v in %s", obj, pkg.Path()) -} - -func appendOpArg(path []byte, op byte, arg int) []byte { - path = append(path, op) - path = strconv.AppendInt(path, int64(arg), 10) - return path -} - -// concreteMethod returns the path for meth, which must have a non-nil receiver. -// The second return value indicates success and may be false if the method is -// an interface method or if it is an instantiated method. -// -// This function is just an optimization that avoids the general scope walking -// approach. You are expected to fall back to the general approach if this -// function fails. -func (enc *Encoder) concreteMethod(meth *types.Func) (Path, bool) { - // Concrete methods can only be declared on package-scoped named types. For - // that reason we can skip the expensive walk over the package scope: the - // path will always be package -> named type -> method. We can trivially get - // the type name from the receiver, and only have to look over the type's - // methods to find the method index. - // - // Methods on generic types require special consideration, however. Consider - // the following package: - // - // L1: type S[T any] struct{} - // L2: func (recv S[A]) Foo() { recv.Bar() } - // L3: func (recv S[B]) Bar() { } - // L4: type Alias = S[int] - // L5: func _[T any]() { var s S[int]; s.Foo() } - // - // The receivers of methods on generic types are instantiations. L2 and L3 - // instantiate S with the type-parameters A and B, which are scoped to the - // respective methods. L4 and L5 each instantiate S with int. Each of these - // instantiations has its own method set, full of methods (and thus objects) - // with receivers whose types are the respective instantiations. In other - // words, we have - // - // S[A].Foo, S[A].Bar - // S[B].Foo, S[B].Bar - // S[int].Foo, S[int].Bar - // - // We may thus be trying to produce object paths for any of these objects. - // - // S[A].Foo and S[B].Bar are the origin methods, and their paths are S.Foo - // and S.Bar, which are the paths that this function naturally produces. - // - // S[A].Bar, S[B].Foo, and both methods on S[int] are instantiations that - // don't correspond to the origin methods. For S[int], this is significant. - // The most precise object path for S[int].Foo, for example, is Alias.Foo, - // not S.Foo. Our function, however, would produce S.Foo, which would - // resolve to a different object. - // - // For S[A].Bar and S[B].Foo it could be argued that S.Bar and S.Foo are - // still the correct paths, since only the origin methods have meaningful - // paths. But this is likely only true for trivial cases and has edge cases. - // Since this function is only an optimization, we err on the side of giving - // up, deferring to the slower but definitely correct algorithm. Most users - // of objectpath will only be giving us origin methods, anyway, as referring - // to instantiated methods is usually not useful. - - if typeparams.OriginMethod(meth) != meth { - return "", false - } - - recvT := meth.Type().(*types.Signature).Recv().Type() - if ptr, ok := recvT.(*types.Pointer); ok { - recvT = ptr.Elem() - } - - named, ok := recvT.(*types.Named) - if !ok { - return "", false - } - - if types.IsInterface(named) { - // Named interfaces don't have to be package-scoped - // - // TODO(dominikh): opt: if scope.Lookup(name) == named, then we can apply this optimization to interface - // methods, too, I think. - return "", false - } - - // Preallocate space for the name, opType, opMethod, and some digits. - name := named.Obj().Name() - path := make([]byte, 0, len(name)+8) - path = append(path, name...) - path = append(path, opType) - - // Method indices are w.r.t. the go/types data structures, - // ultimately deriving from source order, - // which is preserved by export data. - for i := 0; i < named.NumMethods(); i++ { - if named.Method(i) == meth { - path = appendOpArg(path, opMethod, i) - return Path(path), true - } - } - - // Due to golang/go#59944, go/types fails to associate the receiver with - // certain methods on cgo types. - // - // TODO(rfindley): replace this panic once golang/go#59944 is fixed in all Go - // versions gopls supports. - return "", false - // panic(fmt.Sprintf("couldn't find method %s on type %s; methods: %#v", meth, named, enc.namedMethods(named))) -} - -// find finds obj within type T, returning the path to it, or nil if not found. -// -// The seen map is used to short circuit cycles through type parameters. If -// nil, it will be allocated as necessary. -func find(obj types.Object, T types.Type, path []byte, seen map[*types.TypeName]bool) []byte { - switch T := T.(type) { - case *types.Basic, *types.Named: - // Named types belonging to pkg were handled already, - // so T must belong to another package. No path. - return nil - case *types.Pointer: - return find(obj, T.Elem(), append(path, opElem), seen) - case *types.Slice: - return find(obj, T.Elem(), append(path, opElem), seen) - case *types.Array: - return find(obj, T.Elem(), append(path, opElem), seen) - case *types.Chan: - return find(obj, T.Elem(), append(path, opElem), seen) - case *types.Map: - if r := find(obj, T.Key(), append(path, opKey), seen); r != nil { - return r - } - return find(obj, T.Elem(), append(path, opElem), seen) - case *types.Signature: - if r := findTypeParam(obj, T.TypeParams(), path, seen); r != nil { - return r - } - if r := find(obj, T.Params(), append(path, opParams), seen); r != nil { - return r - } - return find(obj, T.Results(), append(path, opResults), seen) - case *types.Struct: - for i := 0; i < T.NumFields(); i++ { - fld := T.Field(i) - path2 := appendOpArg(path, opField, i) - if fld == obj { - return path2 // found field var - } - if r := find(obj, fld.Type(), append(path2, opType), seen); r != nil { - return r - } - } - return nil - case *types.Tuple: - for i := 0; i < T.Len(); i++ { - v := T.At(i) - path2 := appendOpArg(path, opAt, i) - if v == obj { - return path2 // found param/result var - } - if r := find(obj, v.Type(), append(path2, opType), seen); r != nil { - return r - } - } - return nil - case *types.Interface: - for i := 0; i < T.NumMethods(); i++ { - m := T.Method(i) - path2 := appendOpArg(path, opMethod, i) - if m == obj { - return path2 // found interface method - } - if r := find(obj, m.Type(), append(path2, opType), seen); r != nil { - return r - } - } - return nil - case *types.TypeParam: - name := T.Obj() - if name == obj { - return append(path, opObj) - } - if seen[name] { - return nil - } - if seen == nil { - seen = make(map[*types.TypeName]bool) - } - seen[name] = true - if r := find(obj, T.Constraint(), append(path, opConstraint), seen); r != nil { - return r - } - return nil - } - panic(T) -} - -func findTypeParam(obj types.Object, list *types.TypeParamList, path []byte, seen map[*types.TypeName]bool) []byte { - for i := 0; i < list.Len(); i++ { - tparam := list.At(i) - path2 := appendOpArg(path, opTypeParam, i) - if r := find(obj, tparam, path2, seen); r != nil { - return r - } - } - return nil -} - -// Object returns the object denoted by path p within the package pkg. -func Object(pkg *types.Package, p Path) (types.Object, error) { - pathstr := string(p) - if pathstr == "" { - return nil, fmt.Errorf("empty path") - } - - var pkgobj, suffix string - if dot := strings.IndexByte(pathstr, opType); dot < 0 { - pkgobj = pathstr - } else { - pkgobj = pathstr[:dot] - suffix = pathstr[dot:] // suffix starts with "." - } - - obj := pkg.Scope().Lookup(pkgobj) - if obj == nil { - return nil, fmt.Errorf("package %s does not contain %q", pkg.Path(), pkgobj) - } - - // abstraction of *types.{Pointer,Slice,Array,Chan,Map} - type hasElem interface { - Elem() types.Type - } - // abstraction of *types.{Named,Signature} - type hasTypeParams interface { - TypeParams() *types.TypeParamList - } - // abstraction of *types.{Named,TypeParam} - type hasObj interface { - Obj() *types.TypeName - } - - // The loop state is the pair (t, obj), - // exactly one of which is non-nil, initially obj. - // All suffixes start with '.' (the only object->type operation), - // followed by optional type->type operations, - // then a type->object operation. - // The cycle then repeats. - var t types.Type - for suffix != "" { - code := suffix[0] - suffix = suffix[1:] - - // Codes [AFM] have an integer operand. - var index int - switch code { - case opAt, opField, opMethod, opTypeParam: - rest := strings.TrimLeft(suffix, "0123456789") - numerals := suffix[:len(suffix)-len(rest)] - suffix = rest - i, err := strconv.Atoi(numerals) - if err != nil { - return nil, fmt.Errorf("invalid path: bad numeric operand %q for code %q", numerals, code) - } - index = int(i) - case opObj: - // no operand - default: - // The suffix must end with a type->object operation. - if suffix == "" { - return nil, fmt.Errorf("invalid path: ends with %q, want [AFMO]", code) - } - } - - if code == opType { - if t != nil { - return nil, fmt.Errorf("invalid path: unexpected %q in type context", opType) - } - t = obj.Type() - obj = nil - continue - } - - if t == nil { - return nil, fmt.Errorf("invalid path: code %q in object context", code) - } - - // Inv: t != nil, obj == nil - - switch code { - case opElem: - hasElem, ok := t.(hasElem) // Pointer, Slice, Array, Chan, Map - if !ok { - return nil, fmt.Errorf("cannot apply %q to %s (got %T, want pointer, slice, array, chan or map)", code, t, t) - } - t = hasElem.Elem() - - case opKey: - mapType, ok := t.(*types.Map) - if !ok { - return nil, fmt.Errorf("cannot apply %q to %s (got %T, want map)", code, t, t) - } - t = mapType.Key() - - case opParams: - sig, ok := t.(*types.Signature) - if !ok { - return nil, fmt.Errorf("cannot apply %q to %s (got %T, want signature)", code, t, t) - } - t = sig.Params() - - case opResults: - sig, ok := t.(*types.Signature) - if !ok { - return nil, fmt.Errorf("cannot apply %q to %s (got %T, want signature)", code, t, t) - } - t = sig.Results() - - case opUnderlying: - named, ok := t.(*types.Named) - if !ok { - return nil, fmt.Errorf("cannot apply %q to %s (got %T, want named)", code, t, t) - } - t = named.Underlying() - - case opTypeParam: - hasTypeParams, ok := t.(hasTypeParams) // Named, Signature - if !ok { - return nil, fmt.Errorf("cannot apply %q to %s (got %T, want named or signature)", code, t, t) - } - tparams := hasTypeParams.TypeParams() - if n := tparams.Len(); index >= n { - return nil, fmt.Errorf("tuple index %d out of range [0-%d)", index, n) - } - t = tparams.At(index) - - case opConstraint: - tparam, ok := t.(*types.TypeParam) - if !ok { - return nil, fmt.Errorf("cannot apply %q to %s (got %T, want type parameter)", code, t, t) - } - t = tparam.Constraint() - - case opAt: - tuple, ok := t.(*types.Tuple) - if !ok { - return nil, fmt.Errorf("cannot apply %q to %s (got %T, want tuple)", code, t, t) - } - if n := tuple.Len(); index >= n { - return nil, fmt.Errorf("tuple index %d out of range [0-%d)", index, n) - } - obj = tuple.At(index) - t = nil - - case opField: - structType, ok := t.(*types.Struct) - if !ok { - return nil, fmt.Errorf("cannot apply %q to %s (got %T, want struct)", code, t, t) - } - if n := structType.NumFields(); index >= n { - return nil, fmt.Errorf("field index %d out of range [0-%d)", index, n) - } - obj = structType.Field(index) - t = nil - - case opMethod: - switch t := t.(type) { - case *types.Interface: - if index >= t.NumMethods() { - return nil, fmt.Errorf("method index %d out of range [0-%d)", index, t.NumMethods()) - } - obj = t.Method(index) // Id-ordered - - case *types.Named: - if index >= t.NumMethods() { - return nil, fmt.Errorf("method index %d out of range [0-%d)", index, t.NumMethods()) - } - obj = t.Method(index) - - default: - return nil, fmt.Errorf("cannot apply %q to %s (got %T, want interface or named)", code, t, t) - } - t = nil - - case opObj: - hasObj, ok := t.(hasObj) - if !ok { - return nil, fmt.Errorf("cannot apply %q to %s (got %T, want named or type param)", code, t, t) - } - obj = hasObj.Obj() - t = nil - - default: - return nil, fmt.Errorf("invalid path: unknown code %q", code) - } - } - - if obj.Pkg() != pkg { - return nil, fmt.Errorf("path denotes %s, which belongs to a different package", obj) - } - - return obj, nil // success -} - -// scopeObjects is a memoization of scope objects. -// Callers must not modify the result. -func (enc *Encoder) scopeObjects(scope *types.Scope) []types.Object { - m := enc.scopeMemo - if m == nil { - m = make(map[*types.Scope][]types.Object) - enc.scopeMemo = m - } - objs, ok := m[scope] - if !ok { - names := scope.Names() // allocates and sorts - objs = make([]types.Object, len(names)) - for i, name := range names { - objs[i] = scope.Lookup(name) - } - m[scope] = objs - } - return objs -} diff --git a/go/extractor/vendor/golang.org/x/tools/internal/event/BUILD.bazel b/go/extractor/vendor/golang.org/x/tools/internal/event/BUILD.bazel deleted file mode 100644 index 200e436fcd4b..000000000000 --- a/go/extractor/vendor/golang.org/x/tools/internal/event/BUILD.bazel +++ /dev/null @@ -1,19 +0,0 @@ -# generated running `bazel run //go/gazelle`, do not edit - -load("@rules_go//go:def.bzl", "go_library") - -go_library( - name = "event", - srcs = [ - "doc.go", - "event.go", - ], - importmap = "github.com/github/codeql-go/extractor/vendor/golang.org/x/tools/internal/event", - importpath = "golang.org/x/tools/internal/event", - visibility = ["//go/extractor/vendor/golang.org/x/tools:__subpackages__"], - deps = [ - "//go/extractor/vendor/golang.org/x/tools/internal/event/core", - "//go/extractor/vendor/golang.org/x/tools/internal/event/keys", - "//go/extractor/vendor/golang.org/x/tools/internal/event/label", - ], -) diff --git a/go/extractor/vendor/golang.org/x/tools/internal/event/core/BUILD.bazel b/go/extractor/vendor/golang.org/x/tools/internal/event/core/BUILD.bazel deleted file mode 100644 index a16713f536ce..000000000000 --- a/go/extractor/vendor/golang.org/x/tools/internal/event/core/BUILD.bazel +++ /dev/null @@ -1,19 +0,0 @@ -# generated running `bazel run //go/gazelle`, do not edit - -load("@rules_go//go:def.bzl", "go_library") - -go_library( - name = "core", - srcs = [ - "event.go", - "export.go", - "fast.go", - ], - importmap = "github.com/github/codeql-go/extractor/vendor/golang.org/x/tools/internal/event/core", - importpath = "golang.org/x/tools/internal/event/core", - visibility = ["//go/extractor/vendor/golang.org/x/tools:__subpackages__"], - deps = [ - "//go/extractor/vendor/golang.org/x/tools/internal/event/keys", - "//go/extractor/vendor/golang.org/x/tools/internal/event/label", - ], -) diff --git a/go/extractor/vendor/golang.org/x/tools/internal/event/core/event.go b/go/extractor/vendor/golang.org/x/tools/internal/event/core/event.go deleted file mode 100644 index a6cf0e64a4b1..000000000000 --- a/go/extractor/vendor/golang.org/x/tools/internal/event/core/event.go +++ /dev/null @@ -1,85 +0,0 @@ -// Copyright 2019 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Package core provides support for event based telemetry. -package core - -import ( - "fmt" - "time" - - "golang.org/x/tools/internal/event/label" -) - -// Event holds the information about an event of note that occurred. -type Event struct { - at time.Time - - // As events are often on the stack, storing the first few labels directly - // in the event can avoid an allocation at all for the very common cases of - // simple events. - // The length needs to be large enough to cope with the majority of events - // but no so large as to cause undue stack pressure. - // A log message with two values will use 3 labels (one for each value and - // one for the message itself). - - static [3]label.Label // inline storage for the first few labels - dynamic []label.Label // dynamically sized storage for remaining labels -} - -// eventLabelMap implements label.Map for a the labels of an Event. -type eventLabelMap struct { - event Event -} - -func (ev Event) At() time.Time { return ev.at } - -func (ev Event) Format(f fmt.State, r rune) { - if !ev.at.IsZero() { - fmt.Fprint(f, ev.at.Format("2006/01/02 15:04:05 ")) - } - for index := 0; ev.Valid(index); index++ { - if l := ev.Label(index); l.Valid() { - fmt.Fprintf(f, "\n\t%v", l) - } - } -} - -func (ev Event) Valid(index int) bool { - return index >= 0 && index < len(ev.static)+len(ev.dynamic) -} - -func (ev Event) Label(index int) label.Label { - if index < len(ev.static) { - return ev.static[index] - } - return ev.dynamic[index-len(ev.static)] -} - -func (ev Event) Find(key label.Key) label.Label { - for _, l := range ev.static { - if l.Key() == key { - return l - } - } - for _, l := range ev.dynamic { - if l.Key() == key { - return l - } - } - return label.Label{} -} - -func MakeEvent(static [3]label.Label, labels []label.Label) Event { - return Event{ - static: static, - dynamic: labels, - } -} - -// CloneEvent event returns a copy of the event with the time adjusted to at. -func CloneEvent(ev Event, at time.Time) Event { - ev.at = at - return ev -} diff --git a/go/extractor/vendor/golang.org/x/tools/internal/event/core/export.go b/go/extractor/vendor/golang.org/x/tools/internal/event/core/export.go deleted file mode 100644 index 05f3a9a5791a..000000000000 --- a/go/extractor/vendor/golang.org/x/tools/internal/event/core/export.go +++ /dev/null @@ -1,70 +0,0 @@ -// Copyright 2019 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package core - -import ( - "context" - "sync/atomic" - "time" - "unsafe" - - "golang.org/x/tools/internal/event/label" -) - -// Exporter is a function that handles events. -// It may return a modified context and event. -type Exporter func(context.Context, Event, label.Map) context.Context - -var ( - exporter unsafe.Pointer -) - -// SetExporter sets the global exporter function that handles all events. -// The exporter is called synchronously from the event call site, so it should -// return quickly so as not to hold up user code. -func SetExporter(e Exporter) { - p := unsafe.Pointer(&e) - if e == nil { - // &e is always valid, and so p is always valid, but for the early abort - // of ProcessEvent to be efficient it needs to make the nil check on the - // pointer without having to dereference it, so we make the nil function - // also a nil pointer - p = nil - } - atomic.StorePointer(&exporter, p) -} - -// deliver is called to deliver an event to the supplied exporter. -// it will fill in the time. -func deliver(ctx context.Context, exporter Exporter, ev Event) context.Context { - // add the current time to the event - ev.at = time.Now() - // hand the event off to the current exporter - return exporter(ctx, ev, ev) -} - -// Export is called to deliver an event to the global exporter if set. -func Export(ctx context.Context, ev Event) context.Context { - // get the global exporter and abort early if there is not one - exporterPtr := (*Exporter)(atomic.LoadPointer(&exporter)) - if exporterPtr == nil { - return ctx - } - return deliver(ctx, *exporterPtr, ev) -} - -// ExportPair is called to deliver a start event to the supplied exporter. -// It also returns a function that will deliver the end event to the same -// exporter. -// It will fill in the time. -func ExportPair(ctx context.Context, begin, end Event) (context.Context, func()) { - // get the global exporter and abort early if there is not one - exporterPtr := (*Exporter)(atomic.LoadPointer(&exporter)) - if exporterPtr == nil { - return ctx, func() {} - } - ctx = deliver(ctx, *exporterPtr, begin) - return ctx, func() { deliver(ctx, *exporterPtr, end) } -} diff --git a/go/extractor/vendor/golang.org/x/tools/internal/event/core/fast.go b/go/extractor/vendor/golang.org/x/tools/internal/event/core/fast.go deleted file mode 100644 index 06c1d4615e6b..000000000000 --- a/go/extractor/vendor/golang.org/x/tools/internal/event/core/fast.go +++ /dev/null @@ -1,77 +0,0 @@ -// Copyright 2019 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package core - -import ( - "context" - - "golang.org/x/tools/internal/event/keys" - "golang.org/x/tools/internal/event/label" -) - -// Log1 takes a message and one label delivers a log event to the exporter. -// It is a customized version of Print that is faster and does no allocation. -func Log1(ctx context.Context, message string, t1 label.Label) { - Export(ctx, MakeEvent([3]label.Label{ - keys.Msg.Of(message), - t1, - }, nil)) -} - -// Log2 takes a message and two labels and delivers a log event to the exporter. -// It is a customized version of Print that is faster and does no allocation. -func Log2(ctx context.Context, message string, t1 label.Label, t2 label.Label) { - Export(ctx, MakeEvent([3]label.Label{ - keys.Msg.Of(message), - t1, - t2, - }, nil)) -} - -// Metric1 sends a label event to the exporter with the supplied labels. -func Metric1(ctx context.Context, t1 label.Label) context.Context { - return Export(ctx, MakeEvent([3]label.Label{ - keys.Metric.New(), - t1, - }, nil)) -} - -// Metric2 sends a label event to the exporter with the supplied labels. -func Metric2(ctx context.Context, t1, t2 label.Label) context.Context { - return Export(ctx, MakeEvent([3]label.Label{ - keys.Metric.New(), - t1, - t2, - }, nil)) -} - -// Start1 sends a span start event with the supplied label list to the exporter. -// It also returns a function that will end the span, which should normally be -// deferred. -func Start1(ctx context.Context, name string, t1 label.Label) (context.Context, func()) { - return ExportPair(ctx, - MakeEvent([3]label.Label{ - keys.Start.Of(name), - t1, - }, nil), - MakeEvent([3]label.Label{ - keys.End.New(), - }, nil)) -} - -// Start2 sends a span start event with the supplied label list to the exporter. -// It also returns a function that will end the span, which should normally be -// deferred. -func Start2(ctx context.Context, name string, t1, t2 label.Label) (context.Context, func()) { - return ExportPair(ctx, - MakeEvent([3]label.Label{ - keys.Start.Of(name), - t1, - t2, - }, nil), - MakeEvent([3]label.Label{ - keys.End.New(), - }, nil)) -} diff --git a/go/extractor/vendor/golang.org/x/tools/internal/event/doc.go b/go/extractor/vendor/golang.org/x/tools/internal/event/doc.go deleted file mode 100644 index 5dc6e6babedd..000000000000 --- a/go/extractor/vendor/golang.org/x/tools/internal/event/doc.go +++ /dev/null @@ -1,7 +0,0 @@ -// Copyright 2019 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Package event provides a set of packages that cover the main -// concepts of telemetry in an implementation agnostic way. -package event diff --git a/go/extractor/vendor/golang.org/x/tools/internal/event/event.go b/go/extractor/vendor/golang.org/x/tools/internal/event/event.go deleted file mode 100644 index 4d55e577d1a8..000000000000 --- a/go/extractor/vendor/golang.org/x/tools/internal/event/event.go +++ /dev/null @@ -1,127 +0,0 @@ -// Copyright 2019 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package event - -import ( - "context" - - "golang.org/x/tools/internal/event/core" - "golang.org/x/tools/internal/event/keys" - "golang.org/x/tools/internal/event/label" -) - -// Exporter is a function that handles events. -// It may return a modified context and event. -type Exporter func(context.Context, core.Event, label.Map) context.Context - -// SetExporter sets the global exporter function that handles all events. -// The exporter is called synchronously from the event call site, so it should -// return quickly so as not to hold up user code. -func SetExporter(e Exporter) { - core.SetExporter(core.Exporter(e)) -} - -// Log takes a message and a label list and combines them into a single event -// before delivering them to the exporter. -func Log(ctx context.Context, message string, labels ...label.Label) { - core.Export(ctx, core.MakeEvent([3]label.Label{ - keys.Msg.Of(message), - }, labels)) -} - -// IsLog returns true if the event was built by the Log function. -// It is intended to be used in exporters to identify the semantics of the -// event when deciding what to do with it. -func IsLog(ev core.Event) bool { - return ev.Label(0).Key() == keys.Msg -} - -// Error takes a message and a label list and combines them into a single event -// before delivering them to the exporter. It captures the error in the -// delivered event. -func Error(ctx context.Context, message string, err error, labels ...label.Label) { - core.Export(ctx, core.MakeEvent([3]label.Label{ - keys.Msg.Of(message), - keys.Err.Of(err), - }, labels)) -} - -// IsError returns true if the event was built by the Error function. -// It is intended to be used in exporters to identify the semantics of the -// event when deciding what to do with it. -func IsError(ev core.Event) bool { - return ev.Label(0).Key() == keys.Msg && - ev.Label(1).Key() == keys.Err -} - -// Metric sends a label event to the exporter with the supplied labels. -func Metric(ctx context.Context, labels ...label.Label) { - core.Export(ctx, core.MakeEvent([3]label.Label{ - keys.Metric.New(), - }, labels)) -} - -// IsMetric returns true if the event was built by the Metric function. -// It is intended to be used in exporters to identify the semantics of the -// event when deciding what to do with it. -func IsMetric(ev core.Event) bool { - return ev.Label(0).Key() == keys.Metric -} - -// Label sends a label event to the exporter with the supplied labels. -func Label(ctx context.Context, labels ...label.Label) context.Context { - return core.Export(ctx, core.MakeEvent([3]label.Label{ - keys.Label.New(), - }, labels)) -} - -// IsLabel returns true if the event was built by the Label function. -// It is intended to be used in exporters to identify the semantics of the -// event when deciding what to do with it. -func IsLabel(ev core.Event) bool { - return ev.Label(0).Key() == keys.Label -} - -// Start sends a span start event with the supplied label list to the exporter. -// It also returns a function that will end the span, which should normally be -// deferred. -func Start(ctx context.Context, name string, labels ...label.Label) (context.Context, func()) { - return core.ExportPair(ctx, - core.MakeEvent([3]label.Label{ - keys.Start.Of(name), - }, labels), - core.MakeEvent([3]label.Label{ - keys.End.New(), - }, nil)) -} - -// IsStart returns true if the event was built by the Start function. -// It is intended to be used in exporters to identify the semantics of the -// event when deciding what to do with it. -func IsStart(ev core.Event) bool { - return ev.Label(0).Key() == keys.Start -} - -// IsEnd returns true if the event was built by the End function. -// It is intended to be used in exporters to identify the semantics of the -// event when deciding what to do with it. -func IsEnd(ev core.Event) bool { - return ev.Label(0).Key() == keys.End -} - -// Detach returns a context without an associated span. -// This allows the creation of spans that are not children of the current span. -func Detach(ctx context.Context) context.Context { - return core.Export(ctx, core.MakeEvent([3]label.Label{ - keys.Detach.New(), - }, nil)) -} - -// IsDetach returns true if the event was built by the Detach function. -// It is intended to be used in exporters to identify the semantics of the -// event when deciding what to do with it. -func IsDetach(ev core.Event) bool { - return ev.Label(0).Key() == keys.Detach -} diff --git a/go/extractor/vendor/golang.org/x/tools/internal/event/keys/BUILD.bazel b/go/extractor/vendor/golang.org/x/tools/internal/event/keys/BUILD.bazel deleted file mode 100644 index 1feefdf1a834..000000000000 --- a/go/extractor/vendor/golang.org/x/tools/internal/event/keys/BUILD.bazel +++ /dev/null @@ -1,16 +0,0 @@ -# generated running `bazel run //go/gazelle`, do not edit - -load("@rules_go//go:def.bzl", "go_library") - -go_library( - name = "keys", - srcs = [ - "keys.go", - "standard.go", - "util.go", - ], - importmap = "github.com/github/codeql-go/extractor/vendor/golang.org/x/tools/internal/event/keys", - importpath = "golang.org/x/tools/internal/event/keys", - visibility = ["//go/extractor/vendor/golang.org/x/tools:__subpackages__"], - deps = ["//go/extractor/vendor/golang.org/x/tools/internal/event/label"], -) diff --git a/go/extractor/vendor/golang.org/x/tools/internal/event/keys/keys.go b/go/extractor/vendor/golang.org/x/tools/internal/event/keys/keys.go deleted file mode 100644 index a02206e30150..000000000000 --- a/go/extractor/vendor/golang.org/x/tools/internal/event/keys/keys.go +++ /dev/null @@ -1,564 +0,0 @@ -// Copyright 2019 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package keys - -import ( - "fmt" - "io" - "math" - "strconv" - - "golang.org/x/tools/internal/event/label" -) - -// Value represents a key for untyped values. -type Value struct { - name string - description string -} - -// New creates a new Key for untyped values. -func New(name, description string) *Value { - return &Value{name: name, description: description} -} - -func (k *Value) Name() string { return k.name } -func (k *Value) Description() string { return k.description } - -func (k *Value) Format(w io.Writer, buf []byte, l label.Label) { - fmt.Fprint(w, k.From(l)) -} - -// Get can be used to get a label for the key from a label.Map. -func (k *Value) Get(lm label.Map) interface{} { - if t := lm.Find(k); t.Valid() { - return k.From(t) - } - return nil -} - -// From can be used to get a value from a Label. -func (k *Value) From(t label.Label) interface{} { return t.UnpackValue() } - -// Of creates a new Label with this key and the supplied value. -func (k *Value) Of(value interface{}) label.Label { return label.OfValue(k, value) } - -// Tag represents a key for tagging labels that have no value. -// These are used when the existence of the label is the entire information it -// carries, such as marking events to be of a specific kind, or from a specific -// package. -type Tag struct { - name string - description string -} - -// NewTag creates a new Key for tagging labels. -func NewTag(name, description string) *Tag { - return &Tag{name: name, description: description} -} - -func (k *Tag) Name() string { return k.name } -func (k *Tag) Description() string { return k.description } - -func (k *Tag) Format(w io.Writer, buf []byte, l label.Label) {} - -// New creates a new Label with this key. -func (k *Tag) New() label.Label { return label.OfValue(k, nil) } - -// Int represents a key -type Int struct { - name string - description string -} - -// NewInt creates a new Key for int values. -func NewInt(name, description string) *Int { - return &Int{name: name, description: description} -} - -func (k *Int) Name() string { return k.name } -func (k *Int) Description() string { return k.description } - -func (k *Int) Format(w io.Writer, buf []byte, l label.Label) { - w.Write(strconv.AppendInt(buf, int64(k.From(l)), 10)) -} - -// Of creates a new Label with this key and the supplied value. -func (k *Int) Of(v int) label.Label { return label.Of64(k, uint64(v)) } - -// Get can be used to get a label for the key from a label.Map. -func (k *Int) Get(lm label.Map) int { - if t := lm.Find(k); t.Valid() { - return k.From(t) - } - return 0 -} - -// From can be used to get a value from a Label. -func (k *Int) From(t label.Label) int { return int(t.Unpack64()) } - -// Int8 represents a key -type Int8 struct { - name string - description string -} - -// NewInt8 creates a new Key for int8 values. -func NewInt8(name, description string) *Int8 { - return &Int8{name: name, description: description} -} - -func (k *Int8) Name() string { return k.name } -func (k *Int8) Description() string { return k.description } - -func (k *Int8) Format(w io.Writer, buf []byte, l label.Label) { - w.Write(strconv.AppendInt(buf, int64(k.From(l)), 10)) -} - -// Of creates a new Label with this key and the supplied value. -func (k *Int8) Of(v int8) label.Label { return label.Of64(k, uint64(v)) } - -// Get can be used to get a label for the key from a label.Map. -func (k *Int8) Get(lm label.Map) int8 { - if t := lm.Find(k); t.Valid() { - return k.From(t) - } - return 0 -} - -// From can be used to get a value from a Label. -func (k *Int8) From(t label.Label) int8 { return int8(t.Unpack64()) } - -// Int16 represents a key -type Int16 struct { - name string - description string -} - -// NewInt16 creates a new Key for int16 values. -func NewInt16(name, description string) *Int16 { - return &Int16{name: name, description: description} -} - -func (k *Int16) Name() string { return k.name } -func (k *Int16) Description() string { return k.description } - -func (k *Int16) Format(w io.Writer, buf []byte, l label.Label) { - w.Write(strconv.AppendInt(buf, int64(k.From(l)), 10)) -} - -// Of creates a new Label with this key and the supplied value. -func (k *Int16) Of(v int16) label.Label { return label.Of64(k, uint64(v)) } - -// Get can be used to get a label for the key from a label.Map. -func (k *Int16) Get(lm label.Map) int16 { - if t := lm.Find(k); t.Valid() { - return k.From(t) - } - return 0 -} - -// From can be used to get a value from a Label. -func (k *Int16) From(t label.Label) int16 { return int16(t.Unpack64()) } - -// Int32 represents a key -type Int32 struct { - name string - description string -} - -// NewInt32 creates a new Key for int32 values. -func NewInt32(name, description string) *Int32 { - return &Int32{name: name, description: description} -} - -func (k *Int32) Name() string { return k.name } -func (k *Int32) Description() string { return k.description } - -func (k *Int32) Format(w io.Writer, buf []byte, l label.Label) { - w.Write(strconv.AppendInt(buf, int64(k.From(l)), 10)) -} - -// Of creates a new Label with this key and the supplied value. -func (k *Int32) Of(v int32) label.Label { return label.Of64(k, uint64(v)) } - -// Get can be used to get a label for the key from a label.Map. -func (k *Int32) Get(lm label.Map) int32 { - if t := lm.Find(k); t.Valid() { - return k.From(t) - } - return 0 -} - -// From can be used to get a value from a Label. -func (k *Int32) From(t label.Label) int32 { return int32(t.Unpack64()) } - -// Int64 represents a key -type Int64 struct { - name string - description string -} - -// NewInt64 creates a new Key for int64 values. -func NewInt64(name, description string) *Int64 { - return &Int64{name: name, description: description} -} - -func (k *Int64) Name() string { return k.name } -func (k *Int64) Description() string { return k.description } - -func (k *Int64) Format(w io.Writer, buf []byte, l label.Label) { - w.Write(strconv.AppendInt(buf, k.From(l), 10)) -} - -// Of creates a new Label with this key and the supplied value. -func (k *Int64) Of(v int64) label.Label { return label.Of64(k, uint64(v)) } - -// Get can be used to get a label for the key from a label.Map. -func (k *Int64) Get(lm label.Map) int64 { - if t := lm.Find(k); t.Valid() { - return k.From(t) - } - return 0 -} - -// From can be used to get a value from a Label. -func (k *Int64) From(t label.Label) int64 { return int64(t.Unpack64()) } - -// UInt represents a key -type UInt struct { - name string - description string -} - -// NewUInt creates a new Key for uint values. -func NewUInt(name, description string) *UInt { - return &UInt{name: name, description: description} -} - -func (k *UInt) Name() string { return k.name } -func (k *UInt) Description() string { return k.description } - -func (k *UInt) Format(w io.Writer, buf []byte, l label.Label) { - w.Write(strconv.AppendUint(buf, uint64(k.From(l)), 10)) -} - -// Of creates a new Label with this key and the supplied value. -func (k *UInt) Of(v uint) label.Label { return label.Of64(k, uint64(v)) } - -// Get can be used to get a label for the key from a label.Map. -func (k *UInt) Get(lm label.Map) uint { - if t := lm.Find(k); t.Valid() { - return k.From(t) - } - return 0 -} - -// From can be used to get a value from a Label. -func (k *UInt) From(t label.Label) uint { return uint(t.Unpack64()) } - -// UInt8 represents a key -type UInt8 struct { - name string - description string -} - -// NewUInt8 creates a new Key for uint8 values. -func NewUInt8(name, description string) *UInt8 { - return &UInt8{name: name, description: description} -} - -func (k *UInt8) Name() string { return k.name } -func (k *UInt8) Description() string { return k.description } - -func (k *UInt8) Format(w io.Writer, buf []byte, l label.Label) { - w.Write(strconv.AppendUint(buf, uint64(k.From(l)), 10)) -} - -// Of creates a new Label with this key and the supplied value. -func (k *UInt8) Of(v uint8) label.Label { return label.Of64(k, uint64(v)) } - -// Get can be used to get a label for the key from a label.Map. -func (k *UInt8) Get(lm label.Map) uint8 { - if t := lm.Find(k); t.Valid() { - return k.From(t) - } - return 0 -} - -// From can be used to get a value from a Label. -func (k *UInt8) From(t label.Label) uint8 { return uint8(t.Unpack64()) } - -// UInt16 represents a key -type UInt16 struct { - name string - description string -} - -// NewUInt16 creates a new Key for uint16 values. -func NewUInt16(name, description string) *UInt16 { - return &UInt16{name: name, description: description} -} - -func (k *UInt16) Name() string { return k.name } -func (k *UInt16) Description() string { return k.description } - -func (k *UInt16) Format(w io.Writer, buf []byte, l label.Label) { - w.Write(strconv.AppendUint(buf, uint64(k.From(l)), 10)) -} - -// Of creates a new Label with this key and the supplied value. -func (k *UInt16) Of(v uint16) label.Label { return label.Of64(k, uint64(v)) } - -// Get can be used to get a label for the key from a label.Map. -func (k *UInt16) Get(lm label.Map) uint16 { - if t := lm.Find(k); t.Valid() { - return k.From(t) - } - return 0 -} - -// From can be used to get a value from a Label. -func (k *UInt16) From(t label.Label) uint16 { return uint16(t.Unpack64()) } - -// UInt32 represents a key -type UInt32 struct { - name string - description string -} - -// NewUInt32 creates a new Key for uint32 values. -func NewUInt32(name, description string) *UInt32 { - return &UInt32{name: name, description: description} -} - -func (k *UInt32) Name() string { return k.name } -func (k *UInt32) Description() string { return k.description } - -func (k *UInt32) Format(w io.Writer, buf []byte, l label.Label) { - w.Write(strconv.AppendUint(buf, uint64(k.From(l)), 10)) -} - -// Of creates a new Label with this key and the supplied value. -func (k *UInt32) Of(v uint32) label.Label { return label.Of64(k, uint64(v)) } - -// Get can be used to get a label for the key from a label.Map. -func (k *UInt32) Get(lm label.Map) uint32 { - if t := lm.Find(k); t.Valid() { - return k.From(t) - } - return 0 -} - -// From can be used to get a value from a Label. -func (k *UInt32) From(t label.Label) uint32 { return uint32(t.Unpack64()) } - -// UInt64 represents a key -type UInt64 struct { - name string - description string -} - -// NewUInt64 creates a new Key for uint64 values. -func NewUInt64(name, description string) *UInt64 { - return &UInt64{name: name, description: description} -} - -func (k *UInt64) Name() string { return k.name } -func (k *UInt64) Description() string { return k.description } - -func (k *UInt64) Format(w io.Writer, buf []byte, l label.Label) { - w.Write(strconv.AppendUint(buf, k.From(l), 10)) -} - -// Of creates a new Label with this key and the supplied value. -func (k *UInt64) Of(v uint64) label.Label { return label.Of64(k, v) } - -// Get can be used to get a label for the key from a label.Map. -func (k *UInt64) Get(lm label.Map) uint64 { - if t := lm.Find(k); t.Valid() { - return k.From(t) - } - return 0 -} - -// From can be used to get a value from a Label. -func (k *UInt64) From(t label.Label) uint64 { return t.Unpack64() } - -// Float32 represents a key -type Float32 struct { - name string - description string -} - -// NewFloat32 creates a new Key for float32 values. -func NewFloat32(name, description string) *Float32 { - return &Float32{name: name, description: description} -} - -func (k *Float32) Name() string { return k.name } -func (k *Float32) Description() string { return k.description } - -func (k *Float32) Format(w io.Writer, buf []byte, l label.Label) { - w.Write(strconv.AppendFloat(buf, float64(k.From(l)), 'E', -1, 32)) -} - -// Of creates a new Label with this key and the supplied value. -func (k *Float32) Of(v float32) label.Label { - return label.Of64(k, uint64(math.Float32bits(v))) -} - -// Get can be used to get a label for the key from a label.Map. -func (k *Float32) Get(lm label.Map) float32 { - if t := lm.Find(k); t.Valid() { - return k.From(t) - } - return 0 -} - -// From can be used to get a value from a Label. -func (k *Float32) From(t label.Label) float32 { - return math.Float32frombits(uint32(t.Unpack64())) -} - -// Float64 represents a key -type Float64 struct { - name string - description string -} - -// NewFloat64 creates a new Key for int64 values. -func NewFloat64(name, description string) *Float64 { - return &Float64{name: name, description: description} -} - -func (k *Float64) Name() string { return k.name } -func (k *Float64) Description() string { return k.description } - -func (k *Float64) Format(w io.Writer, buf []byte, l label.Label) { - w.Write(strconv.AppendFloat(buf, k.From(l), 'E', -1, 64)) -} - -// Of creates a new Label with this key and the supplied value. -func (k *Float64) Of(v float64) label.Label { - return label.Of64(k, math.Float64bits(v)) -} - -// Get can be used to get a label for the key from a label.Map. -func (k *Float64) Get(lm label.Map) float64 { - if t := lm.Find(k); t.Valid() { - return k.From(t) - } - return 0 -} - -// From can be used to get a value from a Label. -func (k *Float64) From(t label.Label) float64 { - return math.Float64frombits(t.Unpack64()) -} - -// String represents a key -type String struct { - name string - description string -} - -// NewString creates a new Key for int64 values. -func NewString(name, description string) *String { - return &String{name: name, description: description} -} - -func (k *String) Name() string { return k.name } -func (k *String) Description() string { return k.description } - -func (k *String) Format(w io.Writer, buf []byte, l label.Label) { - w.Write(strconv.AppendQuote(buf, k.From(l))) -} - -// Of creates a new Label with this key and the supplied value. -func (k *String) Of(v string) label.Label { return label.OfString(k, v) } - -// Get can be used to get a label for the key from a label.Map. -func (k *String) Get(lm label.Map) string { - if t := lm.Find(k); t.Valid() { - return k.From(t) - } - return "" -} - -// From can be used to get a value from a Label. -func (k *String) From(t label.Label) string { return t.UnpackString() } - -// Boolean represents a key -type Boolean struct { - name string - description string -} - -// NewBoolean creates a new Key for bool values. -func NewBoolean(name, description string) *Boolean { - return &Boolean{name: name, description: description} -} - -func (k *Boolean) Name() string { return k.name } -func (k *Boolean) Description() string { return k.description } - -func (k *Boolean) Format(w io.Writer, buf []byte, l label.Label) { - w.Write(strconv.AppendBool(buf, k.From(l))) -} - -// Of creates a new Label with this key and the supplied value. -func (k *Boolean) Of(v bool) label.Label { - if v { - return label.Of64(k, 1) - } - return label.Of64(k, 0) -} - -// Get can be used to get a label for the key from a label.Map. -func (k *Boolean) Get(lm label.Map) bool { - if t := lm.Find(k); t.Valid() { - return k.From(t) - } - return false -} - -// From can be used to get a value from a Label. -func (k *Boolean) From(t label.Label) bool { return t.Unpack64() > 0 } - -// Error represents a key -type Error struct { - name string - description string -} - -// NewError creates a new Key for int64 values. -func NewError(name, description string) *Error { - return &Error{name: name, description: description} -} - -func (k *Error) Name() string { return k.name } -func (k *Error) Description() string { return k.description } - -func (k *Error) Format(w io.Writer, buf []byte, l label.Label) { - io.WriteString(w, k.From(l).Error()) -} - -// Of creates a new Label with this key and the supplied value. -func (k *Error) Of(v error) label.Label { return label.OfValue(k, v) } - -// Get can be used to get a label for the key from a label.Map. -func (k *Error) Get(lm label.Map) error { - if t := lm.Find(k); t.Valid() { - return k.From(t) - } - return nil -} - -// From can be used to get a value from a Label. -func (k *Error) From(t label.Label) error { - err, _ := t.UnpackValue().(error) - return err -} diff --git a/go/extractor/vendor/golang.org/x/tools/internal/event/keys/standard.go b/go/extractor/vendor/golang.org/x/tools/internal/event/keys/standard.go deleted file mode 100644 index 7e9586659213..000000000000 --- a/go/extractor/vendor/golang.org/x/tools/internal/event/keys/standard.go +++ /dev/null @@ -1,22 +0,0 @@ -// Copyright 2020 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package keys - -var ( - // Msg is a key used to add message strings to label lists. - Msg = NewString("message", "a readable message") - // Label is a key used to indicate an event adds labels to the context. - Label = NewTag("label", "a label context marker") - // Start is used for things like traces that have a name. - Start = NewString("start", "span start") - // Metric is a key used to indicate an event records metrics. - End = NewTag("end", "a span end marker") - // Metric is a key used to indicate an event records metrics. - Detach = NewTag("detach", "a span detach marker") - // Err is a key used to add error values to label lists. - Err = NewError("error", "an error that occurred") - // Metric is a key used to indicate an event records metrics. - Metric = NewTag("metric", "a metric event marker") -) diff --git a/go/extractor/vendor/golang.org/x/tools/internal/event/keys/util.go b/go/extractor/vendor/golang.org/x/tools/internal/event/keys/util.go deleted file mode 100644 index c0e8e731c900..000000000000 --- a/go/extractor/vendor/golang.org/x/tools/internal/event/keys/util.go +++ /dev/null @@ -1,21 +0,0 @@ -// Copyright 2023 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package keys - -import ( - "sort" - "strings" -) - -// Join returns a canonical join of the keys in S: -// a sorted comma-separated string list. -func Join[S ~[]T, T ~string](s S) string { - strs := make([]string, 0, len(s)) - for _, v := range s { - strs = append(strs, string(v)) - } - sort.Strings(strs) - return strings.Join(strs, ",") -} diff --git a/go/extractor/vendor/golang.org/x/tools/internal/event/label/BUILD.bazel b/go/extractor/vendor/golang.org/x/tools/internal/event/label/BUILD.bazel deleted file mode 100644 index a4430ba0a17c..000000000000 --- a/go/extractor/vendor/golang.org/x/tools/internal/event/label/BUILD.bazel +++ /dev/null @@ -1,11 +0,0 @@ -# generated running `bazel run //go/gazelle`, do not edit - -load("@rules_go//go:def.bzl", "go_library") - -go_library( - name = "label", - srcs = ["label.go"], - importmap = "github.com/github/codeql-go/extractor/vendor/golang.org/x/tools/internal/event/label", - importpath = "golang.org/x/tools/internal/event/label", - visibility = ["//go/extractor/vendor/golang.org/x/tools:__subpackages__"], -) diff --git a/go/extractor/vendor/golang.org/x/tools/internal/event/label/label.go b/go/extractor/vendor/golang.org/x/tools/internal/event/label/label.go deleted file mode 100644 index 0f526e1f9ab4..000000000000 --- a/go/extractor/vendor/golang.org/x/tools/internal/event/label/label.go +++ /dev/null @@ -1,215 +0,0 @@ -// Copyright 2019 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package label - -import ( - "fmt" - "io" - "reflect" - "unsafe" -) - -// Key is used as the identity of a Label. -// Keys are intended to be compared by pointer only, the name should be unique -// for communicating with external systems, but it is not required or enforced. -type Key interface { - // Name returns the key name. - Name() string - // Description returns a string that can be used to describe the value. - Description() string - - // Format is used in formatting to append the value of the label to the - // supplied buffer. - // The formatter may use the supplied buf as a scratch area to avoid - // allocations. - Format(w io.Writer, buf []byte, l Label) -} - -// Label holds a key and value pair. -// It is normally used when passing around lists of labels. -type Label struct { - key Key - packed uint64 - untyped interface{} -} - -// Map is the interface to a collection of Labels indexed by key. -type Map interface { - // Find returns the label that matches the supplied key. - Find(key Key) Label -} - -// List is the interface to something that provides an iterable -// list of labels. -// Iteration should start from 0 and continue until Valid returns false. -type List interface { - // Valid returns true if the index is within range for the list. - // It does not imply the label at that index will itself be valid. - Valid(index int) bool - // Label returns the label at the given index. - Label(index int) Label -} - -// list implements LabelList for a list of Labels. -type list struct { - labels []Label -} - -// filter wraps a LabelList filtering out specific labels. -type filter struct { - keys []Key - underlying List -} - -// listMap implements LabelMap for a simple list of labels. -type listMap struct { - labels []Label -} - -// mapChain implements LabelMap for a list of underlying LabelMap. -type mapChain struct { - maps []Map -} - -// OfValue creates a new label from the key and value. -// This method is for implementing new key types, label creation should -// normally be done with the Of method of the key. -func OfValue(k Key, value interface{}) Label { return Label{key: k, untyped: value} } - -// UnpackValue assumes the label was built using LabelOfValue and returns the value -// that was passed to that constructor. -// This method is for implementing new key types, for type safety normal -// access should be done with the From method of the key. -func (t Label) UnpackValue() interface{} { return t.untyped } - -// Of64 creates a new label from a key and a uint64. This is often -// used for non uint64 values that can be packed into a uint64. -// This method is for implementing new key types, label creation should -// normally be done with the Of method of the key. -func Of64(k Key, v uint64) Label { return Label{key: k, packed: v} } - -// Unpack64 assumes the label was built using LabelOf64 and returns the value that -// was passed to that constructor. -// This method is for implementing new key types, for type safety normal -// access should be done with the From method of the key. -func (t Label) Unpack64() uint64 { return t.packed } - -type stringptr unsafe.Pointer - -// OfString creates a new label from a key and a string. -// This method is for implementing new key types, label creation should -// normally be done with the Of method of the key. -func OfString(k Key, v string) Label { - hdr := (*reflect.StringHeader)(unsafe.Pointer(&v)) - return Label{ - key: k, - packed: uint64(hdr.Len), - untyped: stringptr(hdr.Data), - } -} - -// UnpackString assumes the label was built using LabelOfString and returns the -// value that was passed to that constructor. -// This method is for implementing new key types, for type safety normal -// access should be done with the From method of the key. -func (t Label) UnpackString() string { - var v string - hdr := (*reflect.StringHeader)(unsafe.Pointer(&v)) - hdr.Data = uintptr(t.untyped.(stringptr)) - hdr.Len = int(t.packed) - return v -} - -// Valid returns true if the Label is a valid one (it has a key). -func (t Label) Valid() bool { return t.key != nil } - -// Key returns the key of this Label. -func (t Label) Key() Key { return t.key } - -// Format is used for debug printing of labels. -func (t Label) Format(f fmt.State, r rune) { - if !t.Valid() { - io.WriteString(f, `nil`) - return - } - io.WriteString(f, t.Key().Name()) - io.WriteString(f, "=") - var buf [128]byte - t.Key().Format(f, buf[:0], t) -} - -func (l *list) Valid(index int) bool { - return index >= 0 && index < len(l.labels) -} - -func (l *list) Label(index int) Label { - return l.labels[index] -} - -func (f *filter) Valid(index int) bool { - return f.underlying.Valid(index) -} - -func (f *filter) Label(index int) Label { - l := f.underlying.Label(index) - for _, f := range f.keys { - if l.Key() == f { - return Label{} - } - } - return l -} - -func (lm listMap) Find(key Key) Label { - for _, l := range lm.labels { - if l.Key() == key { - return l - } - } - return Label{} -} - -func (c mapChain) Find(key Key) Label { - for _, src := range c.maps { - l := src.Find(key) - if l.Valid() { - return l - } - } - return Label{} -} - -var emptyList = &list{} - -func NewList(labels ...Label) List { - if len(labels) == 0 { - return emptyList - } - return &list{labels: labels} -} - -func Filter(l List, keys ...Key) List { - if len(keys) == 0 { - return l - } - return &filter{keys: keys, underlying: l} -} - -func NewMap(labels ...Label) Map { - return listMap{labels: labels} -} - -func MergeMaps(srcs ...Map) Map { - var nonNil []Map - for _, src := range srcs { - if src != nil { - nonNil = append(nonNil, src) - } - } - if len(nonNil) == 1 { - return nonNil[0] - } - return mapChain{maps: nonNil} -} diff --git a/go/extractor/vendor/golang.org/x/tools/internal/event/tag/BUILD.bazel b/go/extractor/vendor/golang.org/x/tools/internal/event/tag/BUILD.bazel deleted file mode 100644 index d2c87f41a8ae..000000000000 --- a/go/extractor/vendor/golang.org/x/tools/internal/event/tag/BUILD.bazel +++ /dev/null @@ -1,12 +0,0 @@ -# generated running `bazel run //go/gazelle`, do not edit - -load("@rules_go//go:def.bzl", "go_library") - -go_library( - name = "tag", - srcs = ["tag.go"], - importmap = "github.com/github/codeql-go/extractor/vendor/golang.org/x/tools/internal/event/tag", - importpath = "golang.org/x/tools/internal/event/tag", - visibility = ["//go/extractor/vendor/golang.org/x/tools:__subpackages__"], - deps = ["//go/extractor/vendor/golang.org/x/tools/internal/event/keys"], -) diff --git a/go/extractor/vendor/golang.org/x/tools/internal/event/tag/tag.go b/go/extractor/vendor/golang.org/x/tools/internal/event/tag/tag.go deleted file mode 100644 index 581b26c2041f..000000000000 --- a/go/extractor/vendor/golang.org/x/tools/internal/event/tag/tag.go +++ /dev/null @@ -1,59 +0,0 @@ -// Copyright 2019 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Package tag provides the labels used for telemetry throughout gopls. -package tag - -import ( - "golang.org/x/tools/internal/event/keys" -) - -var ( - // create the label keys we use - Method = keys.NewString("method", "") - StatusCode = keys.NewString("status.code", "") - StatusMessage = keys.NewString("status.message", "") - RPCID = keys.NewString("id", "") - RPCDirection = keys.NewString("direction", "") - File = keys.NewString("file", "") - Directory = keys.New("directory", "") - URI = keys.New("URI", "") - Package = keys.NewString("package", "") // sorted comma-separated list of Package IDs - PackagePath = keys.NewString("package_path", "") - Query = keys.New("query", "") - Snapshot = keys.NewUInt64("snapshot", "") - Operation = keys.NewString("operation", "") - - Position = keys.New("position", "") - Category = keys.NewString("category", "") - PackageCount = keys.NewInt("packages", "") - Files = keys.New("files", "") - Port = keys.NewInt("port", "") - Type = keys.New("type", "") - HoverKind = keys.NewString("hoverkind", "") - - NewServer = keys.NewString("new_server", "A new server was added") - EndServer = keys.NewString("end_server", "A server was shut down") - - ServerID = keys.NewString("server", "The server ID an event is related to") - Logfile = keys.NewString("logfile", "") - DebugAddress = keys.NewString("debug_address", "") - GoplsPath = keys.NewString("gopls_path", "") - ClientID = keys.NewString("client_id", "") - - Level = keys.NewInt("level", "The logging level") -) - -var ( - // create the stats we measure - Started = keys.NewInt64("started", "Count of started RPCs.") - ReceivedBytes = keys.NewInt64("received_bytes", "Bytes received.") //, unit.Bytes) - SentBytes = keys.NewInt64("sent_bytes", "Bytes sent.") //, unit.Bytes) - Latency = keys.NewFloat64("latency_ms", "Elapsed time in milliseconds") //, unit.Milliseconds) -) - -const ( - Inbound = "in" - Outbound = "out" -) diff --git a/go/extractor/vendor/golang.org/x/tools/internal/gcimporter/BUILD.bazel b/go/extractor/vendor/golang.org/x/tools/internal/gcimporter/BUILD.bazel deleted file mode 100644 index 56da3b0130e2..000000000000 --- a/go/extractor/vendor/golang.org/x/tools/internal/gcimporter/BUILD.bazel +++ /dev/null @@ -1,29 +0,0 @@ -# generated running `bazel run //go/gazelle`, do not edit - -load("@rules_go//go:def.bzl", "go_library") - -go_library( - name = "gcimporter", - srcs = [ - "bimport.go", - "exportdata.go", - "gcimporter.go", - "iexport.go", - "iimport.go", - "newInterface10.go", - "newInterface11.go", - "support_go117.go", - "support_go118.go", - "unified_no.go", - "ureader_no.go", - "ureader_yes.go", - ], - importmap = "github.com/github/codeql-go/extractor/vendor/golang.org/x/tools/internal/gcimporter", - importpath = "golang.org/x/tools/internal/gcimporter", - visibility = ["//go/extractor/vendor/golang.org/x/tools:__subpackages__"], - deps = [ - "//go/extractor/vendor/golang.org/x/tools/go/types/objectpath", - "//go/extractor/vendor/golang.org/x/tools/internal/pkgbits", - "//go/extractor/vendor/golang.org/x/tools/internal/tokeninternal", - ], -) diff --git a/go/extractor/vendor/golang.org/x/tools/internal/gcimporter/bimport.go b/go/extractor/vendor/golang.org/x/tools/internal/gcimporter/bimport.go deleted file mode 100644 index d98b0db2a9a9..000000000000 --- a/go/extractor/vendor/golang.org/x/tools/internal/gcimporter/bimport.go +++ /dev/null @@ -1,150 +0,0 @@ -// Copyright 2015 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// This file contains the remaining vestiges of -// $GOROOT/src/go/internal/gcimporter/bimport.go. - -package gcimporter - -import ( - "fmt" - "go/token" - "go/types" - "sync" -) - -func errorf(format string, args ...interface{}) { - panic(fmt.Sprintf(format, args...)) -} - -const deltaNewFile = -64 // see cmd/compile/internal/gc/bexport.go - -// Synthesize a token.Pos -type fakeFileSet struct { - fset *token.FileSet - files map[string]*fileInfo -} - -type fileInfo struct { - file *token.File - lastline int -} - -const maxlines = 64 * 1024 - -func (s *fakeFileSet) pos(file string, line, column int) token.Pos { - // TODO(mdempsky): Make use of column. - - // Since we don't know the set of needed file positions, we reserve maxlines - // positions per file. We delay calling token.File.SetLines until all - // positions have been calculated (by way of fakeFileSet.setLines), so that - // we can avoid setting unnecessary lines. See also golang/go#46586. - f := s.files[file] - if f == nil { - f = &fileInfo{file: s.fset.AddFile(file, -1, maxlines)} - s.files[file] = f - } - if line > maxlines { - line = 1 - } - if line > f.lastline { - f.lastline = line - } - - // Return a fake position assuming that f.file consists only of newlines. - return token.Pos(f.file.Base() + line - 1) -} - -func (s *fakeFileSet) setLines() { - fakeLinesOnce.Do(func() { - fakeLines = make([]int, maxlines) - for i := range fakeLines { - fakeLines[i] = i - } - }) - for _, f := range s.files { - f.file.SetLines(fakeLines[:f.lastline]) - } -} - -var ( - fakeLines []int - fakeLinesOnce sync.Once -) - -func chanDir(d int) types.ChanDir { - // tag values must match the constants in cmd/compile/internal/gc/go.go - switch d { - case 1 /* Crecv */ : - return types.RecvOnly - case 2 /* Csend */ : - return types.SendOnly - case 3 /* Cboth */ : - return types.SendRecv - default: - errorf("unexpected channel dir %d", d) - return 0 - } -} - -var predeclOnce sync.Once -var predecl []types.Type // initialized lazily - -func predeclared() []types.Type { - predeclOnce.Do(func() { - // initialize lazily to be sure that all - // elements have been initialized before - predecl = []types.Type{ // basic types - types.Typ[types.Bool], - types.Typ[types.Int], - types.Typ[types.Int8], - types.Typ[types.Int16], - types.Typ[types.Int32], - types.Typ[types.Int64], - types.Typ[types.Uint], - types.Typ[types.Uint8], - types.Typ[types.Uint16], - types.Typ[types.Uint32], - types.Typ[types.Uint64], - types.Typ[types.Uintptr], - types.Typ[types.Float32], - types.Typ[types.Float64], - types.Typ[types.Complex64], - types.Typ[types.Complex128], - types.Typ[types.String], - - // basic type aliases - types.Universe.Lookup("byte").Type(), - types.Universe.Lookup("rune").Type(), - - // error - types.Universe.Lookup("error").Type(), - - // untyped types - types.Typ[types.UntypedBool], - types.Typ[types.UntypedInt], - types.Typ[types.UntypedRune], - types.Typ[types.UntypedFloat], - types.Typ[types.UntypedComplex], - types.Typ[types.UntypedString], - types.Typ[types.UntypedNil], - - // package unsafe - types.Typ[types.UnsafePointer], - - // invalid type - types.Typ[types.Invalid], // only appears in packages with errors - - // used internally by gc; never used by this package or in .a files - anyType{}, - } - predecl = append(predecl, additionalPredeclared()...) - }) - return predecl -} - -type anyType struct{} - -func (t anyType) Underlying() types.Type { return t } -func (t anyType) String() string { return "any" } diff --git a/go/extractor/vendor/golang.org/x/tools/internal/gcimporter/exportdata.go b/go/extractor/vendor/golang.org/x/tools/internal/gcimporter/exportdata.go deleted file mode 100644 index f6437feb1cfd..000000000000 --- a/go/extractor/vendor/golang.org/x/tools/internal/gcimporter/exportdata.go +++ /dev/null @@ -1,99 +0,0 @@ -// Copyright 2011 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// This file is a copy of $GOROOT/src/go/internal/gcimporter/exportdata.go. - -// This file implements FindExportData. - -package gcimporter - -import ( - "bufio" - "fmt" - "io" - "strconv" - "strings" -) - -func readGopackHeader(r *bufio.Reader) (name string, size int64, err error) { - // See $GOROOT/include/ar.h. - hdr := make([]byte, 16+12+6+6+8+10+2) - _, err = io.ReadFull(r, hdr) - if err != nil { - return - } - // leave for debugging - if false { - fmt.Printf("header: %s", hdr) - } - s := strings.TrimSpace(string(hdr[16+12+6+6+8:][:10])) - length, err := strconv.Atoi(s) - size = int64(length) - if err != nil || hdr[len(hdr)-2] != '`' || hdr[len(hdr)-1] != '\n' { - err = fmt.Errorf("invalid archive header") - return - } - name = strings.TrimSpace(string(hdr[:16])) - return -} - -// FindExportData positions the reader r at the beginning of the -// export data section of an underlying GC-created object/archive -// file by reading from it. The reader must be positioned at the -// start of the file before calling this function. The hdr result -// is the string before the export data, either "$$" or "$$B". -// The size result is the length of the export data in bytes, or -1 if not known. -func FindExportData(r *bufio.Reader) (hdr string, size int64, err error) { - // Read first line to make sure this is an object file. - line, err := r.ReadSlice('\n') - if err != nil { - err = fmt.Errorf("can't find export data (%v)", err) - return - } - - if string(line) == "!\n" { - // Archive file. Scan to __.PKGDEF. - var name string - if name, size, err = readGopackHeader(r); err != nil { - return - } - - // First entry should be __.PKGDEF. - if name != "__.PKGDEF" { - err = fmt.Errorf("go archive is missing __.PKGDEF") - return - } - - // Read first line of __.PKGDEF data, so that line - // is once again the first line of the input. - if line, err = r.ReadSlice('\n'); err != nil { - err = fmt.Errorf("can't find export data (%v)", err) - return - } - size -= int64(len(line)) - } - - // Now at __.PKGDEF in archive or still at beginning of file. - // Either way, line should begin with "go object ". - if !strings.HasPrefix(string(line), "go object ") { - err = fmt.Errorf("not a Go object file") - return - } - - // Skip over object header to export data. - // Begins after first line starting with $$. - for line[0] != '$' { - if line, err = r.ReadSlice('\n'); err != nil { - err = fmt.Errorf("can't find export data (%v)", err) - return - } - size -= int64(len(line)) - } - hdr = string(line) - if size < 0 { - size = -1 - } - - return -} diff --git a/go/extractor/vendor/golang.org/x/tools/internal/gcimporter/gcimporter.go b/go/extractor/vendor/golang.org/x/tools/internal/gcimporter/gcimporter.go deleted file mode 100644 index 2d078ccb19c5..000000000000 --- a/go/extractor/vendor/golang.org/x/tools/internal/gcimporter/gcimporter.go +++ /dev/null @@ -1,273 +0,0 @@ -// Copyright 2011 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// This file is a reduced copy of $GOROOT/src/go/internal/gcimporter/gcimporter.go. - -// Package gcimporter provides various functions for reading -// gc-generated object files that can be used to implement the -// Importer interface defined by the Go 1.5 standard library package. -// -// The encoding is deterministic: if the encoder is applied twice to -// the same types.Package data structure, both encodings are equal. -// This property may be important to avoid spurious changes in -// applications such as build systems. -// -// However, the encoder is not necessarily idempotent. Importing an -// exported package may yield a types.Package that, while it -// represents the same set of Go types as the original, may differ in -// the details of its internal representation. Because of these -// differences, re-encoding the imported package may yield a -// different, but equally valid, encoding of the package. -package gcimporter // import "golang.org/x/tools/internal/gcimporter" - -import ( - "bufio" - "bytes" - "fmt" - "go/build" - "go/token" - "go/types" - "io" - "os" - "os/exec" - "path/filepath" - "strings" - "sync" -) - -const ( - // Enable debug during development: it adds some additional checks, and - // prevents errors from being recovered. - debug = false - - // If trace is set, debugging output is printed to std out. - trace = false -) - -var exportMap sync.Map // package dir β†’ func() (string, bool) - -// lookupGorootExport returns the location of the export data -// (normally found in the build cache, but located in GOROOT/pkg -// in prior Go releases) for the package located in pkgDir. -// -// (We use the package's directory instead of its import path -// mainly to simplify handling of the packages in src/vendor -// and cmd/vendor.) -func lookupGorootExport(pkgDir string) (string, bool) { - f, ok := exportMap.Load(pkgDir) - if !ok { - var ( - listOnce sync.Once - exportPath string - ) - f, _ = exportMap.LoadOrStore(pkgDir, func() (string, bool) { - listOnce.Do(func() { - cmd := exec.Command("go", "list", "-export", "-f", "{{.Export}}", pkgDir) - cmd.Dir = build.Default.GOROOT - var output []byte - output, err := cmd.Output() - if err != nil { - return - } - - exports := strings.Split(string(bytes.TrimSpace(output)), "\n") - if len(exports) != 1 { - return - } - - exportPath = exports[0] - }) - - return exportPath, exportPath != "" - }) - } - - return f.(func() (string, bool))() -} - -var pkgExts = [...]string{".a", ".o"} - -// FindPkg returns the filename and unique package id for an import -// path based on package information provided by build.Import (using -// the build.Default build.Context). A relative srcDir is interpreted -// relative to the current working directory. -// If no file was found, an empty filename is returned. -func FindPkg(path, srcDir string) (filename, id string) { - if path == "" { - return - } - - var noext string - switch { - default: - // "x" -> "$GOPATH/pkg/$GOOS_$GOARCH/x.ext", "x" - // Don't require the source files to be present. - if abs, err := filepath.Abs(srcDir); err == nil { // see issue 14282 - srcDir = abs - } - bp, _ := build.Import(path, srcDir, build.FindOnly|build.AllowBinary) - if bp.PkgObj == "" { - var ok bool - if bp.Goroot && bp.Dir != "" { - filename, ok = lookupGorootExport(bp.Dir) - } - if !ok { - id = path // make sure we have an id to print in error message - return - } - } else { - noext = strings.TrimSuffix(bp.PkgObj, ".a") - id = bp.ImportPath - } - - case build.IsLocalImport(path): - // "./x" -> "/this/directory/x.ext", "/this/directory/x" - noext = filepath.Join(srcDir, path) - id = noext - - case filepath.IsAbs(path): - // for completeness only - go/build.Import - // does not support absolute imports - // "/x" -> "/x.ext", "/x" - noext = path - id = path - } - - if false { // for debugging - if path != id { - fmt.Printf("%s -> %s\n", path, id) - } - } - - if filename != "" { - if f, err := os.Stat(filename); err == nil && !f.IsDir() { - return - } - } - - // try extensions - for _, ext := range pkgExts { - filename = noext + ext - if f, err := os.Stat(filename); err == nil && !f.IsDir() { - return - } - } - - filename = "" // not found - return -} - -// Import imports a gc-generated package given its import path and srcDir, adds -// the corresponding package object to the packages map, and returns the object. -// The packages map must contain all packages already imported. -func Import(packages map[string]*types.Package, path, srcDir string, lookup func(path string) (io.ReadCloser, error)) (pkg *types.Package, err error) { - var rc io.ReadCloser - var filename, id string - if lookup != nil { - // With custom lookup specified, assume that caller has - // converted path to a canonical import path for use in the map. - if path == "unsafe" { - return types.Unsafe, nil - } - id = path - - // No need to re-import if the package was imported completely before. - if pkg = packages[id]; pkg != nil && pkg.Complete() { - return - } - f, err := lookup(path) - if err != nil { - return nil, err - } - rc = f - } else { - filename, id = FindPkg(path, srcDir) - if filename == "" { - if path == "unsafe" { - return types.Unsafe, nil - } - return nil, fmt.Errorf("can't find import: %q", id) - } - - // no need to re-import if the package was imported completely before - if pkg = packages[id]; pkg != nil && pkg.Complete() { - return - } - - // open file - f, err := os.Open(filename) - if err != nil { - return nil, err - } - defer func() { - if err != nil { - // add file name to error - err = fmt.Errorf("%s: %v", filename, err) - } - }() - rc = f - } - defer rc.Close() - - var hdr string - var size int64 - buf := bufio.NewReader(rc) - if hdr, size, err = FindExportData(buf); err != nil { - return - } - - switch hdr { - case "$$B\n": - var data []byte - data, err = io.ReadAll(buf) - if err != nil { - break - } - - // TODO(gri): allow clients of go/importer to provide a FileSet. - // Or, define a new standard go/types/gcexportdata package. - fset := token.NewFileSet() - - // Select appropriate importer. - if len(data) > 0 { - switch data[0] { - case 'v', 'c', 'd': // binary, till go1.10 - return nil, fmt.Errorf("binary (%c) import format is no longer supported", data[0]) - - case 'i': // indexed, till go1.19 - _, pkg, err := IImportData(fset, packages, data[1:], id) - return pkg, err - - case 'u': // unified, from go1.20 - _, pkg, err := UImportData(fset, packages, data[1:size], id) - return pkg, err - - default: - l := len(data) - if l > 10 { - l = 10 - } - return nil, fmt.Errorf("unexpected export data with prefix %q for path %s", string(data[:l]), id) - } - } - - default: - err = fmt.Errorf("unknown export data header: %q", hdr) - } - - return -} - -func deref(typ types.Type) types.Type { - if p, _ := typ.(*types.Pointer); p != nil { - return p.Elem() - } - return typ -} - -type byPath []*types.Package - -func (a byPath) Len() int { return len(a) } -func (a byPath) Swap(i, j int) { a[i], a[j] = a[j], a[i] } -func (a byPath) Less(i, j int) bool { return a[i].Path() < a[j].Path() } diff --git a/go/extractor/vendor/golang.org/x/tools/internal/gcimporter/iexport.go b/go/extractor/vendor/golang.org/x/tools/internal/gcimporter/iexport.go deleted file mode 100644 index 2ee8c70164f8..000000000000 --- a/go/extractor/vendor/golang.org/x/tools/internal/gcimporter/iexport.go +++ /dev/null @@ -1,1321 +0,0 @@ -// Copyright 2019 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Indexed binary package export. -// This file was derived from $GOROOT/src/cmd/compile/internal/gc/iexport.go; -// see that file for specification of the format. - -package gcimporter - -import ( - "bytes" - "encoding/binary" - "fmt" - "go/constant" - "go/token" - "go/types" - "io" - "math/big" - "reflect" - "sort" - "strconv" - "strings" - - "golang.org/x/tools/go/types/objectpath" - "golang.org/x/tools/internal/tokeninternal" -) - -// IExportShallow encodes "shallow" export data for the specified package. -// -// No promises are made about the encoding other than that it can be decoded by -// the same version of IIExportShallow. If you plan to save export data in the -// file system, be sure to include a cryptographic digest of the executable in -// the key to avoid version skew. -// -// If the provided reportf func is non-nil, it will be used for reporting bugs -// encountered during export. -// TODO(rfindley): remove reportf when we are confident enough in the new -// objectpath encoding. -func IExportShallow(fset *token.FileSet, pkg *types.Package, reportf ReportFunc) ([]byte, error) { - // In principle this operation can only fail if out.Write fails, - // but that's impossible for bytes.Buffer---and as a matter of - // fact iexportCommon doesn't even check for I/O errors. - // TODO(adonovan): handle I/O errors properly. - // TODO(adonovan): use byte slices throughout, avoiding copying. - const bundle, shallow = false, true - var out bytes.Buffer - err := iexportCommon(&out, fset, bundle, shallow, iexportVersion, []*types.Package{pkg}) - return out.Bytes(), err -} - -// IImportShallow decodes "shallow" types.Package data encoded by -// IExportShallow in the same executable. This function cannot import data from -// cmd/compile or gcexportdata.Write. -// -// The importer calls getPackages to obtain package symbols for all -// packages mentioned in the export data, including the one being -// decoded. -// -// If the provided reportf func is non-nil, it will be used for reporting bugs -// encountered during import. -// TODO(rfindley): remove reportf when we are confident enough in the new -// objectpath encoding. -func IImportShallow(fset *token.FileSet, getPackages GetPackagesFunc, data []byte, path string, reportf ReportFunc) (*types.Package, error) { - const bundle = false - const shallow = true - pkgs, err := iimportCommon(fset, getPackages, data, bundle, path, shallow, reportf) - if err != nil { - return nil, err - } - return pkgs[0], nil -} - -// ReportFunc is the type of a function used to report formatted bugs. -type ReportFunc = func(string, ...interface{}) - -// Current bundled export format version. Increase with each format change. -// 0: initial implementation -const bundleVersion = 0 - -// IExportData writes indexed export data for pkg to out. -// -// If no file set is provided, position info will be missing. -// The package path of the top-level package will not be recorded, -// so that calls to IImportData can override with a provided package path. -func IExportData(out io.Writer, fset *token.FileSet, pkg *types.Package) error { - const bundle, shallow = false, false - return iexportCommon(out, fset, bundle, shallow, iexportVersion, []*types.Package{pkg}) -} - -// IExportBundle writes an indexed export bundle for pkgs to out. -func IExportBundle(out io.Writer, fset *token.FileSet, pkgs []*types.Package) error { - const bundle, shallow = true, false - return iexportCommon(out, fset, bundle, shallow, iexportVersion, pkgs) -} - -func iexportCommon(out io.Writer, fset *token.FileSet, bundle, shallow bool, version int, pkgs []*types.Package) (err error) { - if !debug { - defer func() { - if e := recover(); e != nil { - if ierr, ok := e.(internalError); ok { - err = ierr - return - } - // Not an internal error; panic again. - panic(e) - } - }() - } - - p := iexporter{ - fset: fset, - version: version, - shallow: shallow, - allPkgs: map[*types.Package]bool{}, - stringIndex: map[string]uint64{}, - declIndex: map[types.Object]uint64{}, - tparamNames: map[types.Object]string{}, - typIndex: map[types.Type]uint64{}, - } - if !bundle { - p.localpkg = pkgs[0] - } - - for i, pt := range predeclared() { - p.typIndex[pt] = uint64(i) - } - if len(p.typIndex) > predeclReserved { - panic(internalErrorf("too many predeclared types: %d > %d", len(p.typIndex), predeclReserved)) - } - - // Initialize work queue with exported declarations. - for _, pkg := range pkgs { - scope := pkg.Scope() - for _, name := range scope.Names() { - if token.IsExported(name) { - p.pushDecl(scope.Lookup(name)) - } - } - - if bundle { - // Ensure pkg and its imports are included in the index. - p.allPkgs[pkg] = true - for _, imp := range pkg.Imports() { - p.allPkgs[imp] = true - } - } - } - - // Loop until no more work. - for !p.declTodo.empty() { - p.doDecl(p.declTodo.popHead()) - } - - // Produce index of offset of each file record in files. - var files intWriter - var fileOffset []uint64 // fileOffset[i] is offset in files of file encoded as i - if p.shallow { - fileOffset = make([]uint64, len(p.fileInfos)) - for i, info := range p.fileInfos { - fileOffset[i] = uint64(files.Len()) - p.encodeFile(&files, info.file, info.needed) - } - } - - // Append indices to data0 section. - dataLen := uint64(p.data0.Len()) - w := p.newWriter() - w.writeIndex(p.declIndex) - - if bundle { - w.uint64(uint64(len(pkgs))) - for _, pkg := range pkgs { - w.pkg(pkg) - imps := pkg.Imports() - w.uint64(uint64(len(imps))) - for _, imp := range imps { - w.pkg(imp) - } - } - } - w.flush() - - // Assemble header. - var hdr intWriter - if bundle { - hdr.uint64(bundleVersion) - } - hdr.uint64(uint64(p.version)) - hdr.uint64(uint64(p.strings.Len())) - if p.shallow { - hdr.uint64(uint64(files.Len())) - hdr.uint64(uint64(len(fileOffset))) - for _, offset := range fileOffset { - hdr.uint64(offset) - } - } - hdr.uint64(dataLen) - - // Flush output. - io.Copy(out, &hdr) - io.Copy(out, &p.strings) - if p.shallow { - io.Copy(out, &files) - } - io.Copy(out, &p.data0) - - return nil -} - -// encodeFile writes to w a representation of the file sufficient to -// faithfully restore position information about all needed offsets. -// Mutates the needed array. -func (p *iexporter) encodeFile(w *intWriter, file *token.File, needed []uint64) { - _ = needed[0] // precondition: needed is non-empty - - w.uint64(p.stringOff(file.Name())) - - size := uint64(file.Size()) - w.uint64(size) - - // Sort the set of needed offsets. Duplicates are harmless. - sort.Slice(needed, func(i, j int) bool { return needed[i] < needed[j] }) - - lines := tokeninternal.GetLines(file) // byte offset of each line start - w.uint64(uint64(len(lines))) - - // Rather than record the entire array of line start offsets, - // we save only a sparse list of (index, offset) pairs for - // the start of each line that contains a needed position. - var sparse [][2]int // (index, offset) pairs -outer: - for i, lineStart := range lines { - lineEnd := size - if i < len(lines)-1 { - lineEnd = uint64(lines[i+1]) - } - // Does this line contains a needed offset? - if needed[0] < lineEnd { - sparse = append(sparse, [2]int{i, lineStart}) - for needed[0] < lineEnd { - needed = needed[1:] - if len(needed) == 0 { - break outer - } - } - } - } - - // Delta-encode the columns. - w.uint64(uint64(len(sparse))) - var prev [2]int - for _, pair := range sparse { - w.uint64(uint64(pair[0] - prev[0])) - w.uint64(uint64(pair[1] - prev[1])) - prev = pair - } -} - -// writeIndex writes out an object index. mainIndex indicates whether -// we're writing out the main index, which is also read by -// non-compiler tools and includes a complete package description -// (i.e., name and height). -func (w *exportWriter) writeIndex(index map[types.Object]uint64) { - type pkgObj struct { - obj types.Object - name string // qualified name; differs from obj.Name for type params - } - // Build a map from packages to objects from that package. - pkgObjs := map[*types.Package][]pkgObj{} - - // For the main index, make sure to include every package that - // we reference, even if we're not exporting (or reexporting) - // any symbols from it. - if w.p.localpkg != nil { - pkgObjs[w.p.localpkg] = nil - } - for pkg := range w.p.allPkgs { - pkgObjs[pkg] = nil - } - - for obj := range index { - name := w.p.exportName(obj) - pkgObjs[obj.Pkg()] = append(pkgObjs[obj.Pkg()], pkgObj{obj, name}) - } - - var pkgs []*types.Package - for pkg, objs := range pkgObjs { - pkgs = append(pkgs, pkg) - - sort.Slice(objs, func(i, j int) bool { - return objs[i].name < objs[j].name - }) - } - - sort.Slice(pkgs, func(i, j int) bool { - return w.exportPath(pkgs[i]) < w.exportPath(pkgs[j]) - }) - - w.uint64(uint64(len(pkgs))) - for _, pkg := range pkgs { - w.string(w.exportPath(pkg)) - w.string(pkg.Name()) - w.uint64(uint64(0)) // package height is not needed for go/types - - objs := pkgObjs[pkg] - w.uint64(uint64(len(objs))) - for _, obj := range objs { - w.string(obj.name) - w.uint64(index[obj.obj]) - } - } -} - -// exportName returns the 'exported' name of an object. It differs from -// obj.Name() only for type parameters (see tparamExportName for details). -func (p *iexporter) exportName(obj types.Object) (res string) { - if name := p.tparamNames[obj]; name != "" { - return name - } - return obj.Name() -} - -type iexporter struct { - fset *token.FileSet - out *bytes.Buffer - version int - - shallow bool // don't put types from other packages in the index - objEncoder *objectpath.Encoder // encodes objects from other packages in shallow mode; lazily allocated - localpkg *types.Package // (nil in bundle mode) - - // allPkgs tracks all packages that have been referenced by - // the export data, so we can ensure to include them in the - // main index. - allPkgs map[*types.Package]bool - - declTodo objQueue - - strings intWriter - stringIndex map[string]uint64 - - // In shallow mode, object positions are encoded as (file, offset). - // Each file is recorded as a line-number table. - // Only the lines of needed positions are saved faithfully. - fileInfo map[*token.File]uint64 // value is index in fileInfos - fileInfos []*filePositions - - data0 intWriter - declIndex map[types.Object]uint64 - tparamNames map[types.Object]string // typeparam->exported name - typIndex map[types.Type]uint64 - - indent int // for tracing support -} - -type filePositions struct { - file *token.File - needed []uint64 // unordered list of needed file offsets -} - -func (p *iexporter) trace(format string, args ...interface{}) { - if !trace { - // Call sites should also be guarded, but having this check here allows - // easily enabling/disabling debug trace statements. - return - } - fmt.Printf(strings.Repeat("..", p.indent)+format+"\n", args...) -} - -// objectpathEncoder returns the lazily allocated objectpath.Encoder to use -// when encoding objects in other packages during shallow export. -// -// Using a shared Encoder amortizes some of cost of objectpath search. -func (p *iexporter) objectpathEncoder() *objectpath.Encoder { - if p.objEncoder == nil { - p.objEncoder = new(objectpath.Encoder) - } - return p.objEncoder -} - -// stringOff returns the offset of s within the string section. -// If not already present, it's added to the end. -func (p *iexporter) stringOff(s string) uint64 { - off, ok := p.stringIndex[s] - if !ok { - off = uint64(p.strings.Len()) - p.stringIndex[s] = off - - p.strings.uint64(uint64(len(s))) - p.strings.WriteString(s) - } - return off -} - -// fileIndexAndOffset returns the index of the token.File and the byte offset of pos within it. -func (p *iexporter) fileIndexAndOffset(file *token.File, pos token.Pos) (uint64, uint64) { - index, ok := p.fileInfo[file] - if !ok { - index = uint64(len(p.fileInfo)) - p.fileInfos = append(p.fileInfos, &filePositions{file: file}) - if p.fileInfo == nil { - p.fileInfo = make(map[*token.File]uint64) - } - p.fileInfo[file] = index - } - // Record each needed offset. - info := p.fileInfos[index] - offset := uint64(file.Offset(pos)) - info.needed = append(info.needed, offset) - - return index, offset -} - -// pushDecl adds n to the declaration work queue, if not already present. -func (p *iexporter) pushDecl(obj types.Object) { - // Package unsafe is known to the compiler and predeclared. - // Caller should not ask us to do export it. - if obj.Pkg() == types.Unsafe { - panic("cannot export package unsafe") - } - - // Shallow export data: don't index decls from other packages. - if p.shallow && obj.Pkg() != p.localpkg { - return - } - - if _, ok := p.declIndex[obj]; ok { - return - } - - p.declIndex[obj] = ^uint64(0) // mark obj present in work queue - p.declTodo.pushTail(obj) -} - -// exportWriter handles writing out individual data section chunks. -type exportWriter struct { - p *iexporter - - data intWriter - prevFile string - prevLine int64 - prevColumn int64 -} - -func (w *exportWriter) exportPath(pkg *types.Package) string { - if pkg == w.p.localpkg { - return "" - } - return pkg.Path() -} - -func (p *iexporter) doDecl(obj types.Object) { - if trace { - p.trace("exporting decl %v (%T)", obj, obj) - p.indent++ - defer func() { - p.indent-- - p.trace("=> %s", obj) - }() - } - w := p.newWriter() - - switch obj := obj.(type) { - case *types.Var: - w.tag('V') - w.pos(obj.Pos()) - w.typ(obj.Type(), obj.Pkg()) - - case *types.Func: - sig, _ := obj.Type().(*types.Signature) - if sig.Recv() != nil { - // We shouldn't see methods in the package scope, - // but the type checker may repair "func () F() {}" - // to "func (Invalid) F()" and then treat it like "func F()", - // so allow that. See golang/go#57729. - if sig.Recv().Type() != types.Typ[types.Invalid] { - panic(internalErrorf("unexpected method: %v", sig)) - } - } - - // Function. - if sig.TypeParams().Len() == 0 { - w.tag('F') - } else { - w.tag('G') - } - w.pos(obj.Pos()) - // The tparam list of the function type is the declaration of the type - // params. So, write out the type params right now. Then those type params - // will be referenced via their type offset (via typOff) in all other - // places in the signature and function where they are used. - // - // While importing the type parameters, tparamList computes and records - // their export name, so that it can be later used when writing the index. - if tparams := sig.TypeParams(); tparams.Len() > 0 { - w.tparamList(obj.Name(), tparams, obj.Pkg()) - } - w.signature(sig) - - case *types.Const: - w.tag('C') - w.pos(obj.Pos()) - w.value(obj.Type(), obj.Val()) - - case *types.TypeName: - t := obj.Type() - - if tparam, ok := t.(*types.TypeParam); ok { - w.tag('P') - w.pos(obj.Pos()) - constraint := tparam.Constraint() - if p.version >= iexportVersionGo1_18 { - implicit := false - if iface, _ := constraint.(*types.Interface); iface != nil { - implicit = iface.IsImplicit() - } - w.bool(implicit) - } - w.typ(constraint, obj.Pkg()) - break - } - - if obj.IsAlias() { - w.tag('A') - w.pos(obj.Pos()) - w.typ(t, obj.Pkg()) - break - } - - // Defined type. - named, ok := t.(*types.Named) - if !ok { - panic(internalErrorf("%s is not a defined type", t)) - } - - if named.TypeParams().Len() == 0 { - w.tag('T') - } else { - w.tag('U') - } - w.pos(obj.Pos()) - - if named.TypeParams().Len() > 0 { - // While importing the type parameters, tparamList computes and records - // their export name, so that it can be later used when writing the index. - w.tparamList(obj.Name(), named.TypeParams(), obj.Pkg()) - } - - underlying := obj.Type().Underlying() - w.typ(underlying, obj.Pkg()) - - if types.IsInterface(t) { - break - } - - n := named.NumMethods() - w.uint64(uint64(n)) - for i := 0; i < n; i++ { - m := named.Method(i) - w.pos(m.Pos()) - w.string(m.Name()) - sig, _ := m.Type().(*types.Signature) - - // Receiver type parameters are type arguments of the receiver type, so - // their name must be qualified before exporting recv. - if rparams := sig.RecvTypeParams(); rparams.Len() > 0 { - prefix := obj.Name() + "." + m.Name() - for i := 0; i < rparams.Len(); i++ { - rparam := rparams.At(i) - name := tparamExportName(prefix, rparam) - w.p.tparamNames[rparam.Obj()] = name - } - } - w.param(sig.Recv()) - w.signature(sig) - } - - default: - panic(internalErrorf("unexpected object: %v", obj)) - } - - p.declIndex[obj] = w.flush() -} - -func (w *exportWriter) tag(tag byte) { - w.data.WriteByte(tag) -} - -func (w *exportWriter) pos(pos token.Pos) { - if w.p.shallow { - w.posV2(pos) - } else if w.p.version >= iexportVersionPosCol { - w.posV1(pos) - } else { - w.posV0(pos) - } -} - -// posV2 encoding (used only in shallow mode) records positions as -// (file, offset), where file is the index in the token.File table -// (which records the file name and newline offsets) and offset is a -// byte offset. It effectively ignores //line directives. -func (w *exportWriter) posV2(pos token.Pos) { - if pos == token.NoPos { - w.uint64(0) - return - } - file := w.p.fset.File(pos) // fset must be non-nil - index, offset := w.p.fileIndexAndOffset(file, pos) - w.uint64(1 + index) - w.uint64(offset) -} - -func (w *exportWriter) posV1(pos token.Pos) { - if w.p.fset == nil { - w.int64(0) - return - } - - p := w.p.fset.Position(pos) - file := p.Filename - line := int64(p.Line) - column := int64(p.Column) - - deltaColumn := (column - w.prevColumn) << 1 - deltaLine := (line - w.prevLine) << 1 - - if file != w.prevFile { - deltaLine |= 1 - } - if deltaLine != 0 { - deltaColumn |= 1 - } - - w.int64(deltaColumn) - if deltaColumn&1 != 0 { - w.int64(deltaLine) - if deltaLine&1 != 0 { - w.string(file) - } - } - - w.prevFile = file - w.prevLine = line - w.prevColumn = column -} - -func (w *exportWriter) posV0(pos token.Pos) { - if w.p.fset == nil { - w.int64(0) - return - } - - p := w.p.fset.Position(pos) - file := p.Filename - line := int64(p.Line) - - // When file is the same as the last position (common case), - // we can save a few bytes by delta encoding just the line - // number. - // - // Note: Because data objects may be read out of order (or not - // at all), we can only apply delta encoding within a single - // object. This is handled implicitly by tracking prevFile and - // prevLine as fields of exportWriter. - - if file == w.prevFile { - delta := line - w.prevLine - w.int64(delta) - if delta == deltaNewFile { - w.int64(-1) - } - } else { - w.int64(deltaNewFile) - w.int64(line) // line >= 0 - w.string(file) - w.prevFile = file - } - w.prevLine = line -} - -func (w *exportWriter) pkg(pkg *types.Package) { - // Ensure any referenced packages are declared in the main index. - w.p.allPkgs[pkg] = true - - w.string(w.exportPath(pkg)) -} - -func (w *exportWriter) qualifiedType(obj *types.TypeName) { - name := w.p.exportName(obj) - - // Ensure any referenced declarations are written out too. - w.p.pushDecl(obj) - w.string(name) - w.pkg(obj.Pkg()) -} - -// TODO(rfindley): what does 'pkg' even mean here? It would be better to pass -// it in explicitly into signatures and structs that may use it for -// constructing fields. -func (w *exportWriter) typ(t types.Type, pkg *types.Package) { - w.data.uint64(w.p.typOff(t, pkg)) -} - -func (p *iexporter) newWriter() *exportWriter { - return &exportWriter{p: p} -} - -func (w *exportWriter) flush() uint64 { - off := uint64(w.p.data0.Len()) - io.Copy(&w.p.data0, &w.data) - return off -} - -func (p *iexporter) typOff(t types.Type, pkg *types.Package) uint64 { - off, ok := p.typIndex[t] - if !ok { - w := p.newWriter() - w.doTyp(t, pkg) - off = predeclReserved + w.flush() - p.typIndex[t] = off - } - return off -} - -func (w *exportWriter) startType(k itag) { - w.data.uint64(uint64(k)) -} - -func (w *exportWriter) doTyp(t types.Type, pkg *types.Package) { - if trace { - w.p.trace("exporting type %s (%T)", t, t) - w.p.indent++ - defer func() { - w.p.indent-- - w.p.trace("=> %s", t) - }() - } - switch t := t.(type) { - case *types.Named: - if targs := t.TypeArgs(); targs.Len() > 0 { - w.startType(instanceType) - // TODO(rfindley): investigate if this position is correct, and if it - // matters. - w.pos(t.Obj().Pos()) - w.typeList(targs, pkg) - w.typ(t.Origin(), pkg) - return - } - w.startType(definedType) - w.qualifiedType(t.Obj()) - - case *types.TypeParam: - w.startType(typeParamType) - w.qualifiedType(t.Obj()) - - case *types.Pointer: - w.startType(pointerType) - w.typ(t.Elem(), pkg) - - case *types.Slice: - w.startType(sliceType) - w.typ(t.Elem(), pkg) - - case *types.Array: - w.startType(arrayType) - w.uint64(uint64(t.Len())) - w.typ(t.Elem(), pkg) - - case *types.Chan: - w.startType(chanType) - // 1 RecvOnly; 2 SendOnly; 3 SendRecv - var dir uint64 - switch t.Dir() { - case types.RecvOnly: - dir = 1 - case types.SendOnly: - dir = 2 - case types.SendRecv: - dir = 3 - } - w.uint64(dir) - w.typ(t.Elem(), pkg) - - case *types.Map: - w.startType(mapType) - w.typ(t.Key(), pkg) - w.typ(t.Elem(), pkg) - - case *types.Signature: - w.startType(signatureType) - w.pkg(pkg) - w.signature(t) - - case *types.Struct: - w.startType(structType) - n := t.NumFields() - // Even for struct{} we must emit some qualifying package, because that's - // what the compiler does, and thus that's what the importer expects. - fieldPkg := pkg - if n > 0 { - fieldPkg = t.Field(0).Pkg() - } - if fieldPkg == nil { - // TODO(rfindley): improve this very hacky logic. - // - // The importer expects a package to be set for all struct types, even - // those with no fields. A better encoding might be to set NumFields - // before pkg. setPkg panics with a nil package, which may be possible - // to reach with invalid packages (and perhaps valid packages, too?), so - // (arbitrarily) set the localpkg if available. - // - // Alternatively, we may be able to simply guarantee that pkg != nil, by - // reconsidering the encoding of constant values. - if w.p.shallow { - fieldPkg = w.p.localpkg - } else { - panic(internalErrorf("no package to set for empty struct")) - } - } - w.pkg(fieldPkg) - w.uint64(uint64(n)) - - for i := 0; i < n; i++ { - f := t.Field(i) - if w.p.shallow { - w.objectPath(f) - } - w.pos(f.Pos()) - w.string(f.Name()) // unexported fields implicitly qualified by prior setPkg - w.typ(f.Type(), fieldPkg) - w.bool(f.Anonymous()) - w.string(t.Tag(i)) // note (or tag) - } - - case *types.Interface: - w.startType(interfaceType) - w.pkg(pkg) - - n := t.NumEmbeddeds() - w.uint64(uint64(n)) - for i := 0; i < n; i++ { - ft := t.EmbeddedType(i) - tPkg := pkg - if named, _ := ft.(*types.Named); named != nil { - w.pos(named.Obj().Pos()) - } else { - w.pos(token.NoPos) - } - w.typ(ft, tPkg) - } - - // See comment for struct fields. In shallow mode we change the encoding - // for interface methods that are promoted from other packages. - - n = t.NumExplicitMethods() - w.uint64(uint64(n)) - for i := 0; i < n; i++ { - m := t.ExplicitMethod(i) - if w.p.shallow { - w.objectPath(m) - } - w.pos(m.Pos()) - w.string(m.Name()) - sig, _ := m.Type().(*types.Signature) - w.signature(sig) - } - - case *types.Union: - w.startType(unionType) - nt := t.Len() - w.uint64(uint64(nt)) - for i := 0; i < nt; i++ { - term := t.Term(i) - w.bool(term.Tilde()) - w.typ(term.Type(), pkg) - } - - default: - panic(internalErrorf("unexpected type: %v, %v", t, reflect.TypeOf(t))) - } -} - -// objectPath writes the package and objectPath to use to look up obj in a -// different package, when encoding in "shallow" mode. -// -// When doing a shallow import, the importer creates only the local package, -// and requests package symbols for dependencies from the client. -// However, certain types defined in the local package may hold objects defined -// (perhaps deeply) within another package. -// -// For example, consider the following: -// -// package a -// func F() chan * map[string] struct { X int } -// -// package b -// import "a" -// var B = a.F() -// -// In this example, the type of b.B holds fields defined in package a. -// In order to have the correct canonical objects for the field defined in the -// type of B, they are encoded as objectPaths and later looked up in the -// importer. The same problem applies to interface methods. -func (w *exportWriter) objectPath(obj types.Object) { - if obj.Pkg() == nil || obj.Pkg() == w.p.localpkg { - // obj.Pkg() may be nil for the builtin error.Error. - // In this case, or if obj is declared in the local package, no need to - // encode. - w.string("") - return - } - objectPath, err := w.p.objectpathEncoder().For(obj) - if err != nil { - // Fall back to the empty string, which will cause the importer to create a - // new object, which matches earlier behavior. Creating a new object is - // sufficient for many purposes (such as type checking), but causes certain - // references algorithms to fail (golang/go#60819). However, we didn't - // notice this problem during months of gopls@v0.12.0 testing. - // - // TODO(golang/go#61674): this workaround is insufficient, as in the case - // where the field forwarded from an instantiated type that may not appear - // in the export data of the original package: - // - // // package a - // type A[P any] struct{ F P } - // - // // package b - // type B a.A[int] - // - // We need to update references algorithms not to depend on this - // de-duplication, at which point we may want to simply remove the - // workaround here. - w.string("") - return - } - w.string(string(objectPath)) - w.pkg(obj.Pkg()) -} - -func (w *exportWriter) signature(sig *types.Signature) { - w.paramList(sig.Params()) - w.paramList(sig.Results()) - if sig.Params().Len() > 0 { - w.bool(sig.Variadic()) - } -} - -func (w *exportWriter) typeList(ts *types.TypeList, pkg *types.Package) { - w.uint64(uint64(ts.Len())) - for i := 0; i < ts.Len(); i++ { - w.typ(ts.At(i), pkg) - } -} - -func (w *exportWriter) tparamList(prefix string, list *types.TypeParamList, pkg *types.Package) { - ll := uint64(list.Len()) - w.uint64(ll) - for i := 0; i < list.Len(); i++ { - tparam := list.At(i) - // Set the type parameter exportName before exporting its type. - exportName := tparamExportName(prefix, tparam) - w.p.tparamNames[tparam.Obj()] = exportName - w.typ(list.At(i), pkg) - } -} - -const blankMarker = "$" - -// tparamExportName returns the 'exported' name of a type parameter, which -// differs from its actual object name: it is prefixed with a qualifier, and -// blank type parameter names are disambiguated by their index in the type -// parameter list. -func tparamExportName(prefix string, tparam *types.TypeParam) string { - assert(prefix != "") - name := tparam.Obj().Name() - if name == "_" { - name = blankMarker + strconv.Itoa(tparam.Index()) - } - return prefix + "." + name -} - -// tparamName returns the real name of a type parameter, after stripping its -// qualifying prefix and reverting blank-name encoding. See tparamExportName -// for details. -func tparamName(exportName string) string { - // Remove the "path" from the type param name that makes it unique. - ix := strings.LastIndex(exportName, ".") - if ix < 0 { - errorf("malformed type parameter export name %s: missing prefix", exportName) - } - name := exportName[ix+1:] - if strings.HasPrefix(name, blankMarker) { - return "_" - } - return name -} - -func (w *exportWriter) paramList(tup *types.Tuple) { - n := tup.Len() - w.uint64(uint64(n)) - for i := 0; i < n; i++ { - w.param(tup.At(i)) - } -} - -func (w *exportWriter) param(obj types.Object) { - w.pos(obj.Pos()) - w.localIdent(obj) - w.typ(obj.Type(), obj.Pkg()) -} - -func (w *exportWriter) value(typ types.Type, v constant.Value) { - w.typ(typ, nil) - if w.p.version >= iexportVersionGo1_18 { - w.int64(int64(v.Kind())) - } - - if v.Kind() == constant.Unknown { - // golang/go#60605: treat unknown constant values as if they have invalid type - // - // This loses some fidelity over the package type-checked from source, but that - // is acceptable. - // - // TODO(rfindley): we should switch on the recorded constant kind rather - // than the constant type - return - } - - switch b := typ.Underlying().(*types.Basic); b.Info() & types.IsConstType { - case types.IsBoolean: - w.bool(constant.BoolVal(v)) - case types.IsInteger: - var i big.Int - if i64, exact := constant.Int64Val(v); exact { - i.SetInt64(i64) - } else if ui64, exact := constant.Uint64Val(v); exact { - i.SetUint64(ui64) - } else { - i.SetString(v.ExactString(), 10) - } - w.mpint(&i, typ) - case types.IsFloat: - f := constantToFloat(v) - w.mpfloat(f, typ) - case types.IsComplex: - w.mpfloat(constantToFloat(constant.Real(v)), typ) - w.mpfloat(constantToFloat(constant.Imag(v)), typ) - case types.IsString: - w.string(constant.StringVal(v)) - default: - if b.Kind() == types.Invalid { - // package contains type errors - break - } - panic(internalErrorf("unexpected type %v (%v)", typ, typ.Underlying())) - } -} - -// constantToFloat converts a constant.Value with kind constant.Float to a -// big.Float. -func constantToFloat(x constant.Value) *big.Float { - x = constant.ToFloat(x) - // Use the same floating-point precision (512) as cmd/compile - // (see Mpprec in cmd/compile/internal/gc/mpfloat.go). - const mpprec = 512 - var f big.Float - f.SetPrec(mpprec) - if v, exact := constant.Float64Val(x); exact { - // float64 - f.SetFloat64(v) - } else if num, denom := constant.Num(x), constant.Denom(x); num.Kind() == constant.Int { - // TODO(gri): add big.Rat accessor to constant.Value. - n := valueToRat(num) - d := valueToRat(denom) - f.SetRat(n.Quo(n, d)) - } else { - // Value too large to represent as a fraction => inaccessible. - // TODO(gri): add big.Float accessor to constant.Value. - _, ok := f.SetString(x.ExactString()) - assert(ok) - } - return &f -} - -func valueToRat(x constant.Value) *big.Rat { - // Convert little-endian to big-endian. - // I can't believe this is necessary. - bytes := constant.Bytes(x) - for i := 0; i < len(bytes)/2; i++ { - bytes[i], bytes[len(bytes)-1-i] = bytes[len(bytes)-1-i], bytes[i] - } - return new(big.Rat).SetInt(new(big.Int).SetBytes(bytes)) -} - -// mpint exports a multi-precision integer. -// -// For unsigned types, small values are written out as a single -// byte. Larger values are written out as a length-prefixed big-endian -// byte string, where the length prefix is encoded as its complement. -// For example, bytes 0, 1, and 2 directly represent the integer -// values 0, 1, and 2; while bytes 255, 254, and 253 indicate a 1-, -// 2-, and 3-byte big-endian string follow. -// -// Encoding for signed types use the same general approach as for -// unsigned types, except small values use zig-zag encoding and the -// bottom bit of length prefix byte for large values is reserved as a -// sign bit. -// -// The exact boundary between small and large encodings varies -// according to the maximum number of bytes needed to encode a value -// of type typ. As a special case, 8-bit types are always encoded as a -// single byte. -// -// TODO(mdempsky): Is this level of complexity really worthwhile? -func (w *exportWriter) mpint(x *big.Int, typ types.Type) { - basic, ok := typ.Underlying().(*types.Basic) - if !ok { - panic(internalErrorf("unexpected type %v (%T)", typ.Underlying(), typ.Underlying())) - } - - signed, maxBytes := intSize(basic) - - negative := x.Sign() < 0 - if !signed && negative { - panic(internalErrorf("negative unsigned integer; type %v, value %v", typ, x)) - } - - b := x.Bytes() - if len(b) > 0 && b[0] == 0 { - panic(internalErrorf("leading zeros")) - } - if uint(len(b)) > maxBytes { - panic(internalErrorf("bad mpint length: %d > %d (type %v, value %v)", len(b), maxBytes, typ, x)) - } - - maxSmall := 256 - maxBytes - if signed { - maxSmall = 256 - 2*maxBytes - } - if maxBytes == 1 { - maxSmall = 256 - } - - // Check if x can use small value encoding. - if len(b) <= 1 { - var ux uint - if len(b) == 1 { - ux = uint(b[0]) - } - if signed { - ux <<= 1 - if negative { - ux-- - } - } - if ux < maxSmall { - w.data.WriteByte(byte(ux)) - return - } - } - - n := 256 - uint(len(b)) - if signed { - n = 256 - 2*uint(len(b)) - if negative { - n |= 1 - } - } - if n < maxSmall || n >= 256 { - panic(internalErrorf("encoding mistake: %d, %v, %v => %d", len(b), signed, negative, n)) - } - - w.data.WriteByte(byte(n)) - w.data.Write(b) -} - -// mpfloat exports a multi-precision floating point number. -// -// The number's value is decomposed into mantissa Γ— 2**exponent, where -// mantissa is an integer. The value is written out as mantissa (as a -// multi-precision integer) and then the exponent, except exponent is -// omitted if mantissa is zero. -func (w *exportWriter) mpfloat(f *big.Float, typ types.Type) { - if f.IsInf() { - panic("infinite constant") - } - - // Break into f = mant Γ— 2**exp, with 0.5 <= mant < 1. - var mant big.Float - exp := int64(f.MantExp(&mant)) - - // Scale so that mant is an integer. - prec := mant.MinPrec() - mant.SetMantExp(&mant, int(prec)) - exp -= int64(prec) - - manti, acc := mant.Int(nil) - if acc != big.Exact { - panic(internalErrorf("mantissa scaling failed for %f (%s)", f, acc)) - } - w.mpint(manti, typ) - if manti.Sign() != 0 { - w.int64(exp) - } -} - -func (w *exportWriter) bool(b bool) bool { - var x uint64 - if b { - x = 1 - } - w.uint64(x) - return b -} - -func (w *exportWriter) int64(x int64) { w.data.int64(x) } -func (w *exportWriter) uint64(x uint64) { w.data.uint64(x) } -func (w *exportWriter) string(s string) { w.uint64(w.p.stringOff(s)) } - -func (w *exportWriter) localIdent(obj types.Object) { - // Anonymous parameters. - if obj == nil { - w.string("") - return - } - - name := obj.Name() - if name == "_" { - w.string("_") - return - } - - w.string(name) -} - -type intWriter struct { - bytes.Buffer -} - -func (w *intWriter) int64(x int64) { - var buf [binary.MaxVarintLen64]byte - n := binary.PutVarint(buf[:], x) - w.Write(buf[:n]) -} - -func (w *intWriter) uint64(x uint64) { - var buf [binary.MaxVarintLen64]byte - n := binary.PutUvarint(buf[:], x) - w.Write(buf[:n]) -} - -func assert(cond bool) { - if !cond { - panic("internal error: assertion failed") - } -} - -// The below is copied from go/src/cmd/compile/internal/gc/syntax.go. - -// objQueue is a FIFO queue of types.Object. The zero value of objQueue is -// a ready-to-use empty queue. -type objQueue struct { - ring []types.Object - head, tail int -} - -// empty returns true if q contains no Nodes. -func (q *objQueue) empty() bool { - return q.head == q.tail -} - -// pushTail appends n to the tail of the queue. -func (q *objQueue) pushTail(obj types.Object) { - if len(q.ring) == 0 { - q.ring = make([]types.Object, 16) - } else if q.head+len(q.ring) == q.tail { - // Grow the ring. - nring := make([]types.Object, len(q.ring)*2) - // Copy the old elements. - part := q.ring[q.head%len(q.ring):] - if q.tail-q.head <= len(part) { - part = part[:q.tail-q.head] - copy(nring, part) - } else { - pos := copy(nring, part) - copy(nring[pos:], q.ring[:q.tail%len(q.ring)]) - } - q.ring, q.head, q.tail = nring, 0, q.tail-q.head - } - - q.ring[q.tail%len(q.ring)] = obj - q.tail++ -} - -// popHead pops a node from the head of the queue. It panics if q is empty. -func (q *objQueue) popHead() types.Object { - if q.empty() { - panic("dequeue empty") - } - obj := q.ring[q.head%len(q.ring)] - q.head++ - return obj -} - -// internalError represents an error generated inside this package. -type internalError string - -func (e internalError) Error() string { return "gcimporter: " + string(e) } - -// TODO(adonovan): make this call panic, so that it's symmetric with errorf. -// Otherwise it's easy to forget to do anything with the error. -// -// TODO(adonovan): also, consider switching the names "errorf" and -// "internalErrorf" as the former is used for bugs, whose cause is -// internal inconsistency, whereas the latter is used for ordinary -// situations like bad input, whose cause is external. -func internalErrorf(format string, args ...interface{}) error { - return internalError(fmt.Sprintf(format, args...)) -} diff --git a/go/extractor/vendor/golang.org/x/tools/internal/gcimporter/iimport.go b/go/extractor/vendor/golang.org/x/tools/internal/gcimporter/iimport.go deleted file mode 100644 index 9fffa9ad05cb..000000000000 --- a/go/extractor/vendor/golang.org/x/tools/internal/gcimporter/iimport.go +++ /dev/null @@ -1,1089 +0,0 @@ -// Copyright 2018 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Indexed package import. -// See cmd/compile/internal/gc/iexport.go for the export data format. - -// This file is a copy of $GOROOT/src/go/internal/gcimporter/iimport.go. - -package gcimporter - -import ( - "bytes" - "encoding/binary" - "fmt" - "go/constant" - "go/token" - "go/types" - "io" - "math/big" - "sort" - "strings" - - "golang.org/x/tools/go/types/objectpath" -) - -type intReader struct { - *bytes.Reader - path string -} - -func (r *intReader) int64() int64 { - i, err := binary.ReadVarint(r.Reader) - if err != nil { - errorf("import %q: read varint error: %v", r.path, err) - } - return i -} - -func (r *intReader) uint64() uint64 { - i, err := binary.ReadUvarint(r.Reader) - if err != nil { - errorf("import %q: read varint error: %v", r.path, err) - } - return i -} - -// Keep this in sync with constants in iexport.go. -const ( - iexportVersionGo1_11 = 0 - iexportVersionPosCol = 1 - iexportVersionGo1_18 = 2 - iexportVersionGenerics = 2 - - iexportVersionCurrent = 2 -) - -type ident struct { - pkg *types.Package - name string -} - -const predeclReserved = 32 - -type itag uint64 - -const ( - // Types - definedType itag = iota - pointerType - sliceType - arrayType - chanType - mapType - signatureType - structType - interfaceType - typeParamType - instanceType - unionType -) - -// IImportData imports a package from the serialized package data -// and returns 0 and a reference to the package. -// If the export data version is not recognized or the format is otherwise -// compromised, an error is returned. -func IImportData(fset *token.FileSet, imports map[string]*types.Package, data []byte, path string) (int, *types.Package, error) { - pkgs, err := iimportCommon(fset, GetPackagesFromMap(imports), data, false, path, false, nil) - if err != nil { - return 0, nil, err - } - return 0, pkgs[0], nil -} - -// IImportBundle imports a set of packages from the serialized package bundle. -func IImportBundle(fset *token.FileSet, imports map[string]*types.Package, data []byte) ([]*types.Package, error) { - return iimportCommon(fset, GetPackagesFromMap(imports), data, true, "", false, nil) -} - -// A GetPackagesFunc function obtains the non-nil symbols for a set of -// packages, creating and recursively importing them as needed. An -// implementation should store each package symbol is in the Pkg -// field of the items array. -// -// Any error causes importing to fail. This can be used to quickly read -// the import manifest of an export data file without fully decoding it. -type GetPackagesFunc = func(items []GetPackagesItem) error - -// A GetPackagesItem is a request from the importer for the package -// symbol of the specified name and path. -type GetPackagesItem struct { - Name, Path string - Pkg *types.Package // to be filled in by GetPackagesFunc call - - // private importer state - pathOffset uint64 - nameIndex map[string]uint64 -} - -// GetPackagesFromMap returns a GetPackagesFunc that retrieves -// packages from the given map of package path to package. -// -// The returned function may mutate m: each requested package that is not -// found is created with types.NewPackage and inserted into m. -func GetPackagesFromMap(m map[string]*types.Package) GetPackagesFunc { - return func(items []GetPackagesItem) error { - for i, item := range items { - pkg, ok := m[item.Path] - if !ok { - pkg = types.NewPackage(item.Path, item.Name) - m[item.Path] = pkg - } - items[i].Pkg = pkg - } - return nil - } -} - -func iimportCommon(fset *token.FileSet, getPackages GetPackagesFunc, data []byte, bundle bool, path string, shallow bool, reportf ReportFunc) (pkgs []*types.Package, err error) { - const currentVersion = iexportVersionCurrent - version := int64(-1) - if !debug { - defer func() { - if e := recover(); e != nil { - if bundle { - err = fmt.Errorf("%v", e) - } else if version > currentVersion { - err = fmt.Errorf("cannot import %q (%v), export data is newer version - update tool", path, e) - } else { - err = fmt.Errorf("internal error while importing %q (%v); please report an issue", path, e) - } - } - }() - } - - r := &intReader{bytes.NewReader(data), path} - - if bundle { - if v := r.uint64(); v != bundleVersion { - errorf("unknown bundle format version %d", v) - } - } - - version = int64(r.uint64()) - switch version { - case iexportVersionGo1_18, iexportVersionPosCol, iexportVersionGo1_11: - default: - if version > iexportVersionGo1_18 { - errorf("unstable iexport format version %d, just rebuild compiler and std library", version) - } else { - errorf("unknown iexport format version %d", version) - } - } - - sLen := int64(r.uint64()) - var fLen int64 - var fileOffset []uint64 - if shallow { - // Shallow mode uses a different position encoding. - fLen = int64(r.uint64()) - fileOffset = make([]uint64, r.uint64()) - for i := range fileOffset { - fileOffset[i] = r.uint64() - } - } - dLen := int64(r.uint64()) - - whence, _ := r.Seek(0, io.SeekCurrent) - stringData := data[whence : whence+sLen] - fileData := data[whence+sLen : whence+sLen+fLen] - declData := data[whence+sLen+fLen : whence+sLen+fLen+dLen] - r.Seek(sLen+fLen+dLen, io.SeekCurrent) - - p := iimporter{ - version: int(version), - ipath: path, - shallow: shallow, - reportf: reportf, - - stringData: stringData, - stringCache: make(map[uint64]string), - fileOffset: fileOffset, - fileData: fileData, - fileCache: make([]*token.File, len(fileOffset)), - pkgCache: make(map[uint64]*types.Package), - - declData: declData, - pkgIndex: make(map[*types.Package]map[string]uint64), - typCache: make(map[uint64]types.Type), - // Separate map for typeparams, keyed by their package and unique - // name. - tparamIndex: make(map[ident]types.Type), - - fake: fakeFileSet{ - fset: fset, - files: make(map[string]*fileInfo), - }, - } - defer p.fake.setLines() // set lines for files in fset - - for i, pt := range predeclared() { - p.typCache[uint64(i)] = pt - } - - // Gather the relevant packages from the manifest. - items := make([]GetPackagesItem, r.uint64()) - uniquePkgPaths := make(map[string]bool) - for i := range items { - pkgPathOff := r.uint64() - pkgPath := p.stringAt(pkgPathOff) - pkgName := p.stringAt(r.uint64()) - _ = r.uint64() // package height; unused by go/types - - if pkgPath == "" { - pkgPath = path - } - items[i].Name = pkgName - items[i].Path = pkgPath - items[i].pathOffset = pkgPathOff - - // Read index for package. - nameIndex := make(map[string]uint64) - nSyms := r.uint64() - // In shallow mode, only the current package (i=0) has an index. - assert(!(shallow && i > 0 && nSyms != 0)) - for ; nSyms > 0; nSyms-- { - name := p.stringAt(r.uint64()) - nameIndex[name] = r.uint64() - } - - items[i].nameIndex = nameIndex - - uniquePkgPaths[pkgPath] = true - } - // Debugging #63822; hypothesis: there are duplicate PkgPaths. - if len(uniquePkgPaths) != len(items) { - reportf("found duplicate PkgPaths while reading export data manifest: %v", items) - } - - // Request packages all at once from the client, - // enabling a parallel implementation. - if err := getPackages(items); err != nil { - return nil, err // don't wrap this error - } - - // Check the results and complete the index. - pkgList := make([]*types.Package, len(items)) - for i, item := range items { - pkg := item.Pkg - if pkg == nil { - errorf("internal error: getPackages returned nil package for %q", item.Path) - } else if pkg.Path() != item.Path { - errorf("internal error: getPackages returned wrong path %q, want %q", pkg.Path(), item.Path) - } else if pkg.Name() != item.Name { - errorf("internal error: getPackages returned wrong name %s for package %q, want %s", pkg.Name(), item.Path, item.Name) - } - p.pkgCache[item.pathOffset] = pkg - p.pkgIndex[pkg] = item.nameIndex - pkgList[i] = pkg - } - - if bundle { - pkgs = make([]*types.Package, r.uint64()) - for i := range pkgs { - pkg := p.pkgAt(r.uint64()) - imps := make([]*types.Package, r.uint64()) - for j := range imps { - imps[j] = p.pkgAt(r.uint64()) - } - pkg.SetImports(imps) - pkgs[i] = pkg - } - } else { - if len(pkgList) == 0 { - errorf("no packages found for %s", path) - panic("unreachable") - } - pkgs = pkgList[:1] - - // record all referenced packages as imports - list := append(([]*types.Package)(nil), pkgList[1:]...) - sort.Sort(byPath(list)) - pkgs[0].SetImports(list) - } - - for _, pkg := range pkgs { - if pkg.Complete() { - continue - } - - names := make([]string, 0, len(p.pkgIndex[pkg])) - for name := range p.pkgIndex[pkg] { - names = append(names, name) - } - sort.Strings(names) - for _, name := range names { - p.doDecl(pkg, name) - } - - // package was imported completely and without errors - pkg.MarkComplete() - } - - // SetConstraint can't be called if the constraint type is not yet complete. - // When type params are created in the 'P' case of (*importReader).obj(), - // the associated constraint type may not be complete due to recursion. - // Therefore, we defer calling SetConstraint there, and call it here instead - // after all types are complete. - for _, d := range p.later { - d.t.SetConstraint(d.constraint) - } - - for _, typ := range p.interfaceList { - typ.Complete() - } - - // Workaround for golang/go#61561. See the doc for instanceList for details. - for _, typ := range p.instanceList { - if iface, _ := typ.Underlying().(*types.Interface); iface != nil { - iface.Complete() - } - } - - return pkgs, nil -} - -type setConstraintArgs struct { - t *types.TypeParam - constraint types.Type -} - -type iimporter struct { - version int - ipath string - - shallow bool - reportf ReportFunc // if non-nil, used to report bugs - - stringData []byte - stringCache map[uint64]string - fileOffset []uint64 // fileOffset[i] is offset in fileData for info about file encoded as i - fileData []byte - fileCache []*token.File // memoized decoding of file encoded as i - pkgCache map[uint64]*types.Package - - declData []byte - pkgIndex map[*types.Package]map[string]uint64 - typCache map[uint64]types.Type - tparamIndex map[ident]types.Type - - fake fakeFileSet - interfaceList []*types.Interface - - // Workaround for the go/types bug golang/go#61561: instances produced during - // instantiation may contain incomplete interfaces. Here we only complete the - // underlying type of the instance, which is the most common case but doesn't - // handle parameterized interface literals defined deeper in the type. - instanceList []types.Type // instances for later completion (see golang/go#61561) - - // Arguments for calls to SetConstraint that are deferred due to recursive types - later []setConstraintArgs - - indent int // for tracing support -} - -func (p *iimporter) trace(format string, args ...interface{}) { - if !trace { - // Call sites should also be guarded, but having this check here allows - // easily enabling/disabling debug trace statements. - return - } - fmt.Printf(strings.Repeat("..", p.indent)+format+"\n", args...) -} - -func (p *iimporter) doDecl(pkg *types.Package, name string) { - if debug { - p.trace("import decl %s", name) - p.indent++ - defer func() { - p.indent-- - p.trace("=> %s", name) - }() - } - // See if we've already imported this declaration. - if obj := pkg.Scope().Lookup(name); obj != nil { - return - } - - off, ok := p.pkgIndex[pkg][name] - if !ok { - // In deep mode, the index should be complete. In shallow - // mode, we should have already recursively loaded necessary - // dependencies so the above Lookup succeeds. - errorf("%v.%v not in index", pkg, name) - } - - r := &importReader{p: p, currPkg: pkg} - r.declReader.Reset(p.declData[off:]) - - r.obj(name) -} - -func (p *iimporter) stringAt(off uint64) string { - if s, ok := p.stringCache[off]; ok { - return s - } - - slen, n := binary.Uvarint(p.stringData[off:]) - if n <= 0 { - errorf("varint failed") - } - spos := off + uint64(n) - s := string(p.stringData[spos : spos+slen]) - p.stringCache[off] = s - return s -} - -func (p *iimporter) fileAt(index uint64) *token.File { - file := p.fileCache[index] - if file == nil { - off := p.fileOffset[index] - file = p.decodeFile(intReader{bytes.NewReader(p.fileData[off:]), p.ipath}) - p.fileCache[index] = file - } - return file -} - -func (p *iimporter) decodeFile(rd intReader) *token.File { - filename := p.stringAt(rd.uint64()) - size := int(rd.uint64()) - file := p.fake.fset.AddFile(filename, -1, size) - - // SetLines requires a nondecreasing sequence. - // Because it is common for clients to derive the interval - // [start, start+len(name)] from a start position, and we - // want to ensure that the end offset is on the same line, - // we fill in the gaps of the sparse encoding with values - // that strictly increase by the largest possible amount. - // This allows us to avoid having to record the actual end - // offset of each needed line. - - lines := make([]int, int(rd.uint64())) - var index, offset int - for i, n := 0, int(rd.uint64()); i < n; i++ { - index += int(rd.uint64()) - offset += int(rd.uint64()) - lines[index] = offset - - // Ensure monotonicity between points. - for j := index - 1; j > 0 && lines[j] == 0; j-- { - lines[j] = lines[j+1] - 1 - } - } - - // Ensure monotonicity after last point. - for j := len(lines) - 1; j > 0 && lines[j] == 0; j-- { - size-- - lines[j] = size - } - - if !file.SetLines(lines) { - errorf("SetLines failed: %d", lines) // can't happen - } - return file -} - -func (p *iimporter) pkgAt(off uint64) *types.Package { - if pkg, ok := p.pkgCache[off]; ok { - return pkg - } - path := p.stringAt(off) - errorf("missing package %q in %q", path, p.ipath) - return nil -} - -func (p *iimporter) typAt(off uint64, base *types.Named) types.Type { - if t, ok := p.typCache[off]; ok && canReuse(base, t) { - return t - } - - if off < predeclReserved { - errorf("predeclared type missing from cache: %v", off) - } - - r := &importReader{p: p} - r.declReader.Reset(p.declData[off-predeclReserved:]) - t := r.doType(base) - - if canReuse(base, t) { - p.typCache[off] = t - } - return t -} - -// canReuse reports whether the type rhs on the RHS of the declaration for def -// may be re-used. -// -// Specifically, if def is non-nil and rhs is an interface type with methods, it -// may not be re-used because we have a convention of setting the receiver type -// for interface methods to def. -func canReuse(def *types.Named, rhs types.Type) bool { - if def == nil { - return true - } - iface, _ := rhs.(*types.Interface) - if iface == nil { - return true - } - // Don't use iface.Empty() here as iface may not be complete. - return iface.NumEmbeddeds() == 0 && iface.NumExplicitMethods() == 0 -} - -type importReader struct { - p *iimporter - declReader bytes.Reader - currPkg *types.Package - prevFile string - prevLine int64 - prevColumn int64 -} - -func (r *importReader) obj(name string) { - tag := r.byte() - pos := r.pos() - - switch tag { - case 'A': - typ := r.typ() - - r.declare(types.NewTypeName(pos, r.currPkg, name, typ)) - - case 'C': - typ, val := r.value() - - r.declare(types.NewConst(pos, r.currPkg, name, typ, val)) - - case 'F', 'G': - var tparams []*types.TypeParam - if tag == 'G' { - tparams = r.tparamList() - } - sig := r.signature(nil, nil, tparams) - r.declare(types.NewFunc(pos, r.currPkg, name, sig)) - - case 'T', 'U': - // Types can be recursive. We need to setup a stub - // declaration before recursing. - obj := types.NewTypeName(pos, r.currPkg, name, nil) - named := types.NewNamed(obj, nil, nil) - // Declare obj before calling r.tparamList, so the new type name is recognized - // if used in the constraint of one of its own typeparams (see #48280). - r.declare(obj) - if tag == 'U' { - tparams := r.tparamList() - named.SetTypeParams(tparams) - } - - underlying := r.p.typAt(r.uint64(), named).Underlying() - named.SetUnderlying(underlying) - - if !isInterface(underlying) { - for n := r.uint64(); n > 0; n-- { - mpos := r.pos() - mname := r.ident() - recv := r.param() - - // If the receiver has any targs, set those as the - // rparams of the method (since those are the - // typeparams being used in the method sig/body). - base := baseType(recv.Type()) - assert(base != nil) - targs := base.TypeArgs() - var rparams []*types.TypeParam - if targs.Len() > 0 { - rparams = make([]*types.TypeParam, targs.Len()) - for i := range rparams { - rparams[i] = targs.At(i).(*types.TypeParam) - } - } - msig := r.signature(recv, rparams, nil) - - named.AddMethod(types.NewFunc(mpos, r.currPkg, mname, msig)) - } - } - - case 'P': - // We need to "declare" a typeparam in order to have a name that - // can be referenced recursively (if needed) in the type param's - // bound. - if r.p.version < iexportVersionGenerics { - errorf("unexpected type param type") - } - name0 := tparamName(name) - tn := types.NewTypeName(pos, r.currPkg, name0, nil) - t := types.NewTypeParam(tn, nil) - - // To handle recursive references to the typeparam within its - // bound, save the partial type in tparamIndex before reading the bounds. - id := ident{r.currPkg, name} - r.p.tparamIndex[id] = t - var implicit bool - if r.p.version >= iexportVersionGo1_18 { - implicit = r.bool() - } - constraint := r.typ() - if implicit { - iface, _ := constraint.(*types.Interface) - if iface == nil { - errorf("non-interface constraint marked implicit") - } - iface.MarkImplicit() - } - // The constraint type may not be complete, if we - // are in the middle of a type recursion involving type - // constraints. So, we defer SetConstraint until we have - // completely set up all types in ImportData. - r.p.later = append(r.p.later, setConstraintArgs{t: t, constraint: constraint}) - - case 'V': - typ := r.typ() - - r.declare(types.NewVar(pos, r.currPkg, name, typ)) - - default: - errorf("unexpected tag: %v", tag) - } -} - -func (r *importReader) declare(obj types.Object) { - obj.Pkg().Scope().Insert(obj) -} - -func (r *importReader) value() (typ types.Type, val constant.Value) { - typ = r.typ() - if r.p.version >= iexportVersionGo1_18 { - // TODO: add support for using the kind. - _ = constant.Kind(r.int64()) - } - - switch b := typ.Underlying().(*types.Basic); b.Info() & types.IsConstType { - case types.IsBoolean: - val = constant.MakeBool(r.bool()) - - case types.IsString: - val = constant.MakeString(r.string()) - - case types.IsInteger: - var x big.Int - r.mpint(&x, b) - val = constant.Make(&x) - - case types.IsFloat: - val = r.mpfloat(b) - - case types.IsComplex: - re := r.mpfloat(b) - im := r.mpfloat(b) - val = constant.BinaryOp(re, token.ADD, constant.MakeImag(im)) - - default: - if b.Kind() == types.Invalid { - val = constant.MakeUnknown() - return - } - errorf("unexpected type %v", typ) // panics - panic("unreachable") - } - - return -} - -func intSize(b *types.Basic) (signed bool, maxBytes uint) { - if (b.Info() & types.IsUntyped) != 0 { - return true, 64 - } - - switch b.Kind() { - case types.Float32, types.Complex64: - return true, 3 - case types.Float64, types.Complex128: - return true, 7 - } - - signed = (b.Info() & types.IsUnsigned) == 0 - switch b.Kind() { - case types.Int8, types.Uint8: - maxBytes = 1 - case types.Int16, types.Uint16: - maxBytes = 2 - case types.Int32, types.Uint32: - maxBytes = 4 - default: - maxBytes = 8 - } - - return -} - -func (r *importReader) mpint(x *big.Int, typ *types.Basic) { - signed, maxBytes := intSize(typ) - - maxSmall := 256 - maxBytes - if signed { - maxSmall = 256 - 2*maxBytes - } - if maxBytes == 1 { - maxSmall = 256 - } - - n, _ := r.declReader.ReadByte() - if uint(n) < maxSmall { - v := int64(n) - if signed { - v >>= 1 - if n&1 != 0 { - v = ^v - } - } - x.SetInt64(v) - return - } - - v := -n - if signed { - v = -(n &^ 1) >> 1 - } - if v < 1 || uint(v) > maxBytes { - errorf("weird decoding: %v, %v => %v", n, signed, v) - } - b := make([]byte, v) - io.ReadFull(&r.declReader, b) - x.SetBytes(b) - if signed && n&1 != 0 { - x.Neg(x) - } -} - -func (r *importReader) mpfloat(typ *types.Basic) constant.Value { - var mant big.Int - r.mpint(&mant, typ) - var f big.Float - f.SetInt(&mant) - if f.Sign() != 0 { - f.SetMantExp(&f, int(r.int64())) - } - return constant.Make(&f) -} - -func (r *importReader) ident() string { - return r.string() -} - -func (r *importReader) qualifiedIdent() (*types.Package, string) { - name := r.string() - pkg := r.pkg() - return pkg, name -} - -func (r *importReader) pos() token.Pos { - if r.p.shallow { - // precise offsets are encoded only in shallow mode - return r.posv2() - } - if r.p.version >= iexportVersionPosCol { - r.posv1() - } else { - r.posv0() - } - - if r.prevFile == "" && r.prevLine == 0 && r.prevColumn == 0 { - return token.NoPos - } - return r.p.fake.pos(r.prevFile, int(r.prevLine), int(r.prevColumn)) -} - -func (r *importReader) posv0() { - delta := r.int64() - if delta != deltaNewFile { - r.prevLine += delta - } else if l := r.int64(); l == -1 { - r.prevLine += deltaNewFile - } else { - r.prevFile = r.string() - r.prevLine = l - } -} - -func (r *importReader) posv1() { - delta := r.int64() - r.prevColumn += delta >> 1 - if delta&1 != 0 { - delta = r.int64() - r.prevLine += delta >> 1 - if delta&1 != 0 { - r.prevFile = r.string() - } - } -} - -func (r *importReader) posv2() token.Pos { - file := r.uint64() - if file == 0 { - return token.NoPos - } - tf := r.p.fileAt(file - 1) - return tf.Pos(int(r.uint64())) -} - -func (r *importReader) typ() types.Type { - return r.p.typAt(r.uint64(), nil) -} - -func isInterface(t types.Type) bool { - _, ok := t.(*types.Interface) - return ok -} - -func (r *importReader) pkg() *types.Package { return r.p.pkgAt(r.uint64()) } -func (r *importReader) string() string { return r.p.stringAt(r.uint64()) } - -func (r *importReader) doType(base *types.Named) (res types.Type) { - k := r.kind() - if debug { - r.p.trace("importing type %d (base: %s)", k, base) - r.p.indent++ - defer func() { - r.p.indent-- - r.p.trace("=> %s", res) - }() - } - switch k { - default: - errorf("unexpected kind tag in %q: %v", r.p.ipath, k) - return nil - - case definedType: - pkg, name := r.qualifiedIdent() - r.p.doDecl(pkg, name) - return pkg.Scope().Lookup(name).(*types.TypeName).Type() - case pointerType: - return types.NewPointer(r.typ()) - case sliceType: - return types.NewSlice(r.typ()) - case arrayType: - n := r.uint64() - return types.NewArray(r.typ(), int64(n)) - case chanType: - dir := chanDir(int(r.uint64())) - return types.NewChan(dir, r.typ()) - case mapType: - return types.NewMap(r.typ(), r.typ()) - case signatureType: - r.currPkg = r.pkg() - return r.signature(nil, nil, nil) - - case structType: - r.currPkg = r.pkg() - - fields := make([]*types.Var, r.uint64()) - tags := make([]string, len(fields)) - for i := range fields { - var field *types.Var - if r.p.shallow { - field, _ = r.objectPathObject().(*types.Var) - } - - fpos := r.pos() - fname := r.ident() - ftyp := r.typ() - emb := r.bool() - tag := r.string() - - // Either this is not a shallow import, the field is local, or the - // encoded objectPath failed to produce an object (a bug). - // - // Even in this last, buggy case, fall back on creating a new field. As - // discussed in iexport.go, this is not correct, but mostly works and is - // preferable to failing (for now at least). - if field == nil { - field = types.NewField(fpos, r.currPkg, fname, ftyp, emb) - } - - fields[i] = field - tags[i] = tag - } - return types.NewStruct(fields, tags) - - case interfaceType: - r.currPkg = r.pkg() - - embeddeds := make([]types.Type, r.uint64()) - for i := range embeddeds { - _ = r.pos() - embeddeds[i] = r.typ() - } - - methods := make([]*types.Func, r.uint64()) - for i := range methods { - var method *types.Func - if r.p.shallow { - method, _ = r.objectPathObject().(*types.Func) - } - - mpos := r.pos() - mname := r.ident() - - // TODO(mdempsky): Matches bimport.go, but I - // don't agree with this. - var recv *types.Var - if base != nil { - recv = types.NewVar(token.NoPos, r.currPkg, "", base) - } - msig := r.signature(recv, nil, nil) - - if method == nil { - method = types.NewFunc(mpos, r.currPkg, mname, msig) - } - methods[i] = method - } - - typ := newInterface(methods, embeddeds) - r.p.interfaceList = append(r.p.interfaceList, typ) - return typ - - case typeParamType: - if r.p.version < iexportVersionGenerics { - errorf("unexpected type param type") - } - pkg, name := r.qualifiedIdent() - id := ident{pkg, name} - if t, ok := r.p.tparamIndex[id]; ok { - // We're already in the process of importing this typeparam. - return t - } - // Otherwise, import the definition of the typeparam now. - r.p.doDecl(pkg, name) - return r.p.tparamIndex[id] - - case instanceType: - if r.p.version < iexportVersionGenerics { - errorf("unexpected instantiation type") - } - // pos does not matter for instances: they are positioned on the original - // type. - _ = r.pos() - len := r.uint64() - targs := make([]types.Type, len) - for i := range targs { - targs[i] = r.typ() - } - baseType := r.typ() - // The imported instantiated type doesn't include any methods, so - // we must always use the methods of the base (orig) type. - // TODO provide a non-nil *Environment - t, _ := types.Instantiate(nil, baseType, targs, false) - - // Workaround for golang/go#61561. See the doc for instanceList for details. - r.p.instanceList = append(r.p.instanceList, t) - return t - - case unionType: - if r.p.version < iexportVersionGenerics { - errorf("unexpected instantiation type") - } - terms := make([]*types.Term, r.uint64()) - for i := range terms { - terms[i] = types.NewTerm(r.bool(), r.typ()) - } - return types.NewUnion(terms) - } -} - -func (r *importReader) kind() itag { - return itag(r.uint64()) -} - -// objectPathObject is the inverse of exportWriter.objectPath. -// -// In shallow mode, certain fields and methods may need to be looked up in an -// imported package. See the doc for exportWriter.objectPath for a full -// explanation. -func (r *importReader) objectPathObject() types.Object { - objPath := objectpath.Path(r.string()) - if objPath == "" { - return nil - } - pkg := r.pkg() - obj, err := objectpath.Object(pkg, objPath) - if err != nil { - if r.p.reportf != nil { - r.p.reportf("failed to find object for objectPath %q: %v", objPath, err) - } - } - return obj -} - -func (r *importReader) signature(recv *types.Var, rparams []*types.TypeParam, tparams []*types.TypeParam) *types.Signature { - params := r.paramList() - results := r.paramList() - variadic := params.Len() > 0 && r.bool() - return types.NewSignatureType(recv, rparams, tparams, params, results, variadic) -} - -func (r *importReader) tparamList() []*types.TypeParam { - n := r.uint64() - if n == 0 { - return nil - } - xs := make([]*types.TypeParam, n) - for i := range xs { - // Note: the standard library importer is tolerant of nil types here, - // though would panic in SetTypeParams. - xs[i] = r.typ().(*types.TypeParam) - } - return xs -} - -func (r *importReader) paramList() *types.Tuple { - xs := make([]*types.Var, r.uint64()) - for i := range xs { - xs[i] = r.param() - } - return types.NewTuple(xs...) -} - -func (r *importReader) param() *types.Var { - pos := r.pos() - name := r.ident() - typ := r.typ() - return types.NewParam(pos, r.currPkg, name, typ) -} - -func (r *importReader) bool() bool { - return r.uint64() != 0 -} - -func (r *importReader) int64() int64 { - n, err := binary.ReadVarint(&r.declReader) - if err != nil { - errorf("readVarint: %v", err) - } - return n -} - -func (r *importReader) uint64() uint64 { - n, err := binary.ReadUvarint(&r.declReader) - if err != nil { - errorf("readUvarint: %v", err) - } - return n -} - -func (r *importReader) byte() byte { - x, err := r.declReader.ReadByte() - if err != nil { - errorf("declReader.ReadByte: %v", err) - } - return x -} - -func baseType(typ types.Type) *types.Named { - // pointer receivers are never types.Named types - if p, _ := typ.(*types.Pointer); p != nil { - typ = p.Elem() - } - // receiver base types are always (possibly generic) types.Named types - n, _ := typ.(*types.Named) - return n -} diff --git a/go/extractor/vendor/golang.org/x/tools/internal/gcimporter/newInterface10.go b/go/extractor/vendor/golang.org/x/tools/internal/gcimporter/newInterface10.go deleted file mode 100644 index 8b163e3d058a..000000000000 --- a/go/extractor/vendor/golang.org/x/tools/internal/gcimporter/newInterface10.go +++ /dev/null @@ -1,22 +0,0 @@ -// Copyright 2018 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -//go:build !go1.11 -// +build !go1.11 - -package gcimporter - -import "go/types" - -func newInterface(methods []*types.Func, embeddeds []types.Type) *types.Interface { - named := make([]*types.Named, len(embeddeds)) - for i, e := range embeddeds { - var ok bool - named[i], ok = e.(*types.Named) - if !ok { - panic("embedding of non-defined interfaces in interfaces is not supported before Go 1.11") - } - } - return types.NewInterface(methods, named) -} diff --git a/go/extractor/vendor/golang.org/x/tools/internal/gcimporter/newInterface11.go b/go/extractor/vendor/golang.org/x/tools/internal/gcimporter/newInterface11.go deleted file mode 100644 index 49984f40fd80..000000000000 --- a/go/extractor/vendor/golang.org/x/tools/internal/gcimporter/newInterface11.go +++ /dev/null @@ -1,14 +0,0 @@ -// Copyright 2018 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -//go:build go1.11 -// +build go1.11 - -package gcimporter - -import "go/types" - -func newInterface(methods []*types.Func, embeddeds []types.Type) *types.Interface { - return types.NewInterfaceType(methods, embeddeds) -} diff --git a/go/extractor/vendor/golang.org/x/tools/internal/gcimporter/support_go117.go b/go/extractor/vendor/golang.org/x/tools/internal/gcimporter/support_go117.go deleted file mode 100644 index d892273efb61..000000000000 --- a/go/extractor/vendor/golang.org/x/tools/internal/gcimporter/support_go117.go +++ /dev/null @@ -1,16 +0,0 @@ -// Copyright 2021 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -//go:build !go1.18 -// +build !go1.18 - -package gcimporter - -import "go/types" - -const iexportVersion = iexportVersionGo1_11 - -func additionalPredeclared() []types.Type { - return nil -} diff --git a/go/extractor/vendor/golang.org/x/tools/internal/gcimporter/support_go118.go b/go/extractor/vendor/golang.org/x/tools/internal/gcimporter/support_go118.go deleted file mode 100644 index edbe6ea7041d..000000000000 --- a/go/extractor/vendor/golang.org/x/tools/internal/gcimporter/support_go118.go +++ /dev/null @@ -1,37 +0,0 @@ -// Copyright 2021 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -//go:build go1.18 -// +build go1.18 - -package gcimporter - -import "go/types" - -const iexportVersion = iexportVersionGenerics - -// additionalPredeclared returns additional predeclared types in go.1.18. -func additionalPredeclared() []types.Type { - return []types.Type{ - // comparable - types.Universe.Lookup("comparable").Type(), - - // any - types.Universe.Lookup("any").Type(), - } -} - -// See cmd/compile/internal/types.SplitVargenSuffix. -func splitVargenSuffix(name string) (base, suffix string) { - i := len(name) - for i > 0 && name[i-1] >= '0' && name[i-1] <= '9' { - i-- - } - const dot = "Β·" - if i >= len(dot) && name[i-len(dot):i] == dot { - i -= len(dot) - return name[:i], name[i:] - } - return name, "" -} diff --git a/go/extractor/vendor/golang.org/x/tools/internal/gcimporter/unified_no.go b/go/extractor/vendor/golang.org/x/tools/internal/gcimporter/unified_no.go deleted file mode 100644 index 286bf445483d..000000000000 --- a/go/extractor/vendor/golang.org/x/tools/internal/gcimporter/unified_no.go +++ /dev/null @@ -1,10 +0,0 @@ -// Copyright 2022 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -//go:build !(go1.18 && goexperiment.unified) -// +build !go1.18 !goexperiment.unified - -package gcimporter - -const unifiedIR = false diff --git a/go/extractor/vendor/golang.org/x/tools/internal/gcimporter/unified_yes.go b/go/extractor/vendor/golang.org/x/tools/internal/gcimporter/unified_yes.go deleted file mode 100644 index b5d69ffbe682..000000000000 --- a/go/extractor/vendor/golang.org/x/tools/internal/gcimporter/unified_yes.go +++ /dev/null @@ -1,10 +0,0 @@ -// Copyright 2022 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -//go:build go1.18 && goexperiment.unified -// +build go1.18,goexperiment.unified - -package gcimporter - -const unifiedIR = true diff --git a/go/extractor/vendor/golang.org/x/tools/internal/gcimporter/ureader_no.go b/go/extractor/vendor/golang.org/x/tools/internal/gcimporter/ureader_no.go deleted file mode 100644 index 8eb20729c2ad..000000000000 --- a/go/extractor/vendor/golang.org/x/tools/internal/gcimporter/ureader_no.go +++ /dev/null @@ -1,19 +0,0 @@ -// Copyright 2022 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -//go:build !go1.18 -// +build !go1.18 - -package gcimporter - -import ( - "fmt" - "go/token" - "go/types" -) - -func UImportData(fset *token.FileSet, imports map[string]*types.Package, data []byte, path string) (_ int, pkg *types.Package, err error) { - err = fmt.Errorf("go/tools compiled with a Go version earlier than 1.18 cannot read unified IR export data") - return -} diff --git a/go/extractor/vendor/golang.org/x/tools/internal/gcimporter/ureader_yes.go b/go/extractor/vendor/golang.org/x/tools/internal/gcimporter/ureader_yes.go deleted file mode 100644 index b977435f626d..000000000000 --- a/go/extractor/vendor/golang.org/x/tools/internal/gcimporter/ureader_yes.go +++ /dev/null @@ -1,728 +0,0 @@ -// Copyright 2021 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Derived from go/internal/gcimporter/ureader.go - -//go:build go1.18 -// +build go1.18 - -package gcimporter - -import ( - "fmt" - "go/token" - "go/types" - "sort" - "strings" - - "golang.org/x/tools/internal/pkgbits" -) - -// A pkgReader holds the shared state for reading a unified IR package -// description. -type pkgReader struct { - pkgbits.PkgDecoder - - fake fakeFileSet - - ctxt *types.Context - imports map[string]*types.Package // previously imported packages, indexed by path - - // lazily initialized arrays corresponding to the unified IR - // PosBase, Pkg, and Type sections, respectively. - posBases []string // position bases (i.e., file names) - pkgs []*types.Package - typs []types.Type - - // laterFns holds functions that need to be invoked at the end of - // import reading. - laterFns []func() - // laterFors is used in case of 'type A B' to ensure that B is processed before A. - laterFors map[types.Type]int - - // ifaces holds a list of constructed Interfaces, which need to have - // Complete called after importing is done. - ifaces []*types.Interface -} - -// later adds a function to be invoked at the end of import reading. -func (pr *pkgReader) later(fn func()) { - pr.laterFns = append(pr.laterFns, fn) -} - -// See cmd/compile/internal/noder.derivedInfo. -type derivedInfo struct { - idx pkgbits.Index - needed bool -} - -// See cmd/compile/internal/noder.typeInfo. -type typeInfo struct { - idx pkgbits.Index - derived bool -} - -func UImportData(fset *token.FileSet, imports map[string]*types.Package, data []byte, path string) (_ int, pkg *types.Package, err error) { - if !debug { - defer func() { - if x := recover(); x != nil { - err = fmt.Errorf("internal error in importing %q (%v); please report an issue", path, x) - } - }() - } - - s := string(data) - s = s[:strings.LastIndex(s, "\n$$\n")] - input := pkgbits.NewPkgDecoder(path, s) - pkg = readUnifiedPackage(fset, nil, imports, input) - return -} - -// laterFor adds a function to be invoked at the end of import reading, and records the type that function is finishing. -func (pr *pkgReader) laterFor(t types.Type, fn func()) { - if pr.laterFors == nil { - pr.laterFors = make(map[types.Type]int) - } - pr.laterFors[t] = len(pr.laterFns) - pr.laterFns = append(pr.laterFns, fn) -} - -// readUnifiedPackage reads a package description from the given -// unified IR export data decoder. -func readUnifiedPackage(fset *token.FileSet, ctxt *types.Context, imports map[string]*types.Package, input pkgbits.PkgDecoder) *types.Package { - pr := pkgReader{ - PkgDecoder: input, - - fake: fakeFileSet{ - fset: fset, - files: make(map[string]*fileInfo), - }, - - ctxt: ctxt, - imports: imports, - - posBases: make([]string, input.NumElems(pkgbits.RelocPosBase)), - pkgs: make([]*types.Package, input.NumElems(pkgbits.RelocPkg)), - typs: make([]types.Type, input.NumElems(pkgbits.RelocType)), - } - defer pr.fake.setLines() - - r := pr.newReader(pkgbits.RelocMeta, pkgbits.PublicRootIdx, pkgbits.SyncPublic) - pkg := r.pkg() - r.Bool() // has init - - for i, n := 0, r.Len(); i < n; i++ { - // As if r.obj(), but avoiding the Scope.Lookup call, - // to avoid eager loading of imports. - r.Sync(pkgbits.SyncObject) - assert(!r.Bool()) - r.p.objIdx(r.Reloc(pkgbits.RelocObj)) - assert(r.Len() == 0) - } - - r.Sync(pkgbits.SyncEOF) - - for _, fn := range pr.laterFns { - fn() - } - - for _, iface := range pr.ifaces { - iface.Complete() - } - - // Imports() of pkg are all of the transitive packages that were loaded. - var imps []*types.Package - for _, imp := range pr.pkgs { - if imp != nil && imp != pkg { - imps = append(imps, imp) - } - } - sort.Sort(byPath(imps)) - pkg.SetImports(imps) - - pkg.MarkComplete() - return pkg -} - -// A reader holds the state for reading a single unified IR element -// within a package. -type reader struct { - pkgbits.Decoder - - p *pkgReader - - dict *readerDict -} - -// A readerDict holds the state for type parameters that parameterize -// the current unified IR element. -type readerDict struct { - // bounds is a slice of typeInfos corresponding to the underlying - // bounds of the element's type parameters. - bounds []typeInfo - - // tparams is a slice of the constructed TypeParams for the element. - tparams []*types.TypeParam - - // devived is a slice of types derived from tparams, which may be - // instantiated while reading the current element. - derived []derivedInfo - derivedTypes []types.Type // lazily instantiated from derived -} - -func (pr *pkgReader) newReader(k pkgbits.RelocKind, idx pkgbits.Index, marker pkgbits.SyncMarker) *reader { - return &reader{ - Decoder: pr.NewDecoder(k, idx, marker), - p: pr, - } -} - -func (pr *pkgReader) tempReader(k pkgbits.RelocKind, idx pkgbits.Index, marker pkgbits.SyncMarker) *reader { - return &reader{ - Decoder: pr.TempDecoder(k, idx, marker), - p: pr, - } -} - -func (pr *pkgReader) retireReader(r *reader) { - pr.RetireDecoder(&r.Decoder) -} - -// @@@ Positions - -func (r *reader) pos() token.Pos { - r.Sync(pkgbits.SyncPos) - if !r.Bool() { - return token.NoPos - } - - // TODO(mdempsky): Delta encoding. - posBase := r.posBase() - line := r.Uint() - col := r.Uint() - return r.p.fake.pos(posBase, int(line), int(col)) -} - -func (r *reader) posBase() string { - return r.p.posBaseIdx(r.Reloc(pkgbits.RelocPosBase)) -} - -func (pr *pkgReader) posBaseIdx(idx pkgbits.Index) string { - if b := pr.posBases[idx]; b != "" { - return b - } - - var filename string - { - r := pr.tempReader(pkgbits.RelocPosBase, idx, pkgbits.SyncPosBase) - - // Within types2, position bases have a lot more details (e.g., - // keeping track of where //line directives appeared exactly). - // - // For go/types, we just track the file name. - - filename = r.String() - - if r.Bool() { // file base - // Was: "b = token.NewTrimmedFileBase(filename, true)" - } else { // line base - pos := r.pos() - line := r.Uint() - col := r.Uint() - - // Was: "b = token.NewLineBase(pos, filename, true, line, col)" - _, _, _ = pos, line, col - } - pr.retireReader(r) - } - b := filename - pr.posBases[idx] = b - return b -} - -// @@@ Packages - -func (r *reader) pkg() *types.Package { - r.Sync(pkgbits.SyncPkg) - return r.p.pkgIdx(r.Reloc(pkgbits.RelocPkg)) -} - -func (pr *pkgReader) pkgIdx(idx pkgbits.Index) *types.Package { - // TODO(mdempsky): Consider using some non-nil pointer to indicate - // the universe scope, so we don't need to keep re-reading it. - if pkg := pr.pkgs[idx]; pkg != nil { - return pkg - } - - pkg := pr.newReader(pkgbits.RelocPkg, idx, pkgbits.SyncPkgDef).doPkg() - pr.pkgs[idx] = pkg - return pkg -} - -func (r *reader) doPkg() *types.Package { - path := r.String() - switch path { - case "": - path = r.p.PkgPath() - case "builtin": - return nil // universe - case "unsafe": - return types.Unsafe - } - - if pkg := r.p.imports[path]; pkg != nil { - return pkg - } - - name := r.String() - - pkg := types.NewPackage(path, name) - r.p.imports[path] = pkg - - return pkg -} - -// @@@ Types - -func (r *reader) typ() types.Type { - return r.p.typIdx(r.typInfo(), r.dict) -} - -func (r *reader) typInfo() typeInfo { - r.Sync(pkgbits.SyncType) - if r.Bool() { - return typeInfo{idx: pkgbits.Index(r.Len()), derived: true} - } - return typeInfo{idx: r.Reloc(pkgbits.RelocType), derived: false} -} - -func (pr *pkgReader) typIdx(info typeInfo, dict *readerDict) types.Type { - idx := info.idx - var where *types.Type - if info.derived { - where = &dict.derivedTypes[idx] - idx = dict.derived[idx].idx - } else { - where = &pr.typs[idx] - } - - if typ := *where; typ != nil { - return typ - } - - var typ types.Type - { - r := pr.tempReader(pkgbits.RelocType, idx, pkgbits.SyncTypeIdx) - r.dict = dict - - typ = r.doTyp() - assert(typ != nil) - pr.retireReader(r) - } - // See comment in pkgReader.typIdx explaining how this happens. - if prev := *where; prev != nil { - return prev - } - - *where = typ - return typ -} - -func (r *reader) doTyp() (res types.Type) { - switch tag := pkgbits.CodeType(r.Code(pkgbits.SyncType)); tag { - default: - errorf("unhandled type tag: %v", tag) - panic("unreachable") - - case pkgbits.TypeBasic: - return types.Typ[r.Len()] - - case pkgbits.TypeNamed: - obj, targs := r.obj() - name := obj.(*types.TypeName) - if len(targs) != 0 { - t, _ := types.Instantiate(r.p.ctxt, name.Type(), targs, false) - return t - } - return name.Type() - - case pkgbits.TypeTypeParam: - return r.dict.tparams[r.Len()] - - case pkgbits.TypeArray: - len := int64(r.Uint64()) - return types.NewArray(r.typ(), len) - case pkgbits.TypeChan: - dir := types.ChanDir(r.Len()) - return types.NewChan(dir, r.typ()) - case pkgbits.TypeMap: - return types.NewMap(r.typ(), r.typ()) - case pkgbits.TypePointer: - return types.NewPointer(r.typ()) - case pkgbits.TypeSignature: - return r.signature(nil, nil, nil) - case pkgbits.TypeSlice: - return types.NewSlice(r.typ()) - case pkgbits.TypeStruct: - return r.structType() - case pkgbits.TypeInterface: - return r.interfaceType() - case pkgbits.TypeUnion: - return r.unionType() - } -} - -func (r *reader) structType() *types.Struct { - fields := make([]*types.Var, r.Len()) - var tags []string - for i := range fields { - pos := r.pos() - pkg, name := r.selector() - ftyp := r.typ() - tag := r.String() - embedded := r.Bool() - - fields[i] = types.NewField(pos, pkg, name, ftyp, embedded) - if tag != "" { - for len(tags) < i { - tags = append(tags, "") - } - tags = append(tags, tag) - } - } - return types.NewStruct(fields, tags) -} - -func (r *reader) unionType() *types.Union { - terms := make([]*types.Term, r.Len()) - for i := range terms { - terms[i] = types.NewTerm(r.Bool(), r.typ()) - } - return types.NewUnion(terms) -} - -func (r *reader) interfaceType() *types.Interface { - methods := make([]*types.Func, r.Len()) - embeddeds := make([]types.Type, r.Len()) - implicit := len(methods) == 0 && len(embeddeds) == 1 && r.Bool() - - for i := range methods { - pos := r.pos() - pkg, name := r.selector() - mtyp := r.signature(nil, nil, nil) - methods[i] = types.NewFunc(pos, pkg, name, mtyp) - } - - for i := range embeddeds { - embeddeds[i] = r.typ() - } - - iface := types.NewInterfaceType(methods, embeddeds) - if implicit { - iface.MarkImplicit() - } - - // We need to call iface.Complete(), but if there are any embedded - // defined types, then we may not have set their underlying - // interface type yet. So we need to defer calling Complete until - // after we've called SetUnderlying everywhere. - // - // TODO(mdempsky): After CL 424876 lands, it should be safe to call - // iface.Complete() immediately. - r.p.ifaces = append(r.p.ifaces, iface) - - return iface -} - -func (r *reader) signature(recv *types.Var, rtparams, tparams []*types.TypeParam) *types.Signature { - r.Sync(pkgbits.SyncSignature) - - params := r.params() - results := r.params() - variadic := r.Bool() - - return types.NewSignatureType(recv, rtparams, tparams, params, results, variadic) -} - -func (r *reader) params() *types.Tuple { - r.Sync(pkgbits.SyncParams) - - params := make([]*types.Var, r.Len()) - for i := range params { - params[i] = r.param() - } - - return types.NewTuple(params...) -} - -func (r *reader) param() *types.Var { - r.Sync(pkgbits.SyncParam) - - pos := r.pos() - pkg, name := r.localIdent() - typ := r.typ() - - return types.NewParam(pos, pkg, name, typ) -} - -// @@@ Objects - -func (r *reader) obj() (types.Object, []types.Type) { - r.Sync(pkgbits.SyncObject) - - assert(!r.Bool()) - - pkg, name := r.p.objIdx(r.Reloc(pkgbits.RelocObj)) - obj := pkgScope(pkg).Lookup(name) - - targs := make([]types.Type, r.Len()) - for i := range targs { - targs[i] = r.typ() - } - - return obj, targs -} - -func (pr *pkgReader) objIdx(idx pkgbits.Index) (*types.Package, string) { - - var objPkg *types.Package - var objName string - var tag pkgbits.CodeObj - { - rname := pr.tempReader(pkgbits.RelocName, idx, pkgbits.SyncObject1) - - objPkg, objName = rname.qualifiedIdent() - assert(objName != "") - - tag = pkgbits.CodeObj(rname.Code(pkgbits.SyncCodeObj)) - pr.retireReader(rname) - } - - if tag == pkgbits.ObjStub { - assert(objPkg == nil || objPkg == types.Unsafe) - return objPkg, objName - } - - // Ignore local types promoted to global scope (#55110). - if _, suffix := splitVargenSuffix(objName); suffix != "" { - return objPkg, objName - } - - if objPkg.Scope().Lookup(objName) == nil { - dict := pr.objDictIdx(idx) - - r := pr.newReader(pkgbits.RelocObj, idx, pkgbits.SyncObject1) - r.dict = dict - - declare := func(obj types.Object) { - objPkg.Scope().Insert(obj) - } - - switch tag { - default: - panic("weird") - - case pkgbits.ObjAlias: - pos := r.pos() - typ := r.typ() - declare(types.NewTypeName(pos, objPkg, objName, typ)) - - case pkgbits.ObjConst: - pos := r.pos() - typ := r.typ() - val := r.Value() - declare(types.NewConst(pos, objPkg, objName, typ, val)) - - case pkgbits.ObjFunc: - pos := r.pos() - tparams := r.typeParamNames() - sig := r.signature(nil, nil, tparams) - declare(types.NewFunc(pos, objPkg, objName, sig)) - - case pkgbits.ObjType: - pos := r.pos() - - obj := types.NewTypeName(pos, objPkg, objName, nil) - named := types.NewNamed(obj, nil, nil) - declare(obj) - - named.SetTypeParams(r.typeParamNames()) - - setUnderlying := func(underlying types.Type) { - // If the underlying type is an interface, we need to - // duplicate its methods so we can replace the receiver - // parameter's type (#49906). - if iface, ok := underlying.(*types.Interface); ok && iface.NumExplicitMethods() != 0 { - methods := make([]*types.Func, iface.NumExplicitMethods()) - for i := range methods { - fn := iface.ExplicitMethod(i) - sig := fn.Type().(*types.Signature) - - recv := types.NewVar(fn.Pos(), fn.Pkg(), "", named) - methods[i] = types.NewFunc(fn.Pos(), fn.Pkg(), fn.Name(), types.NewSignature(recv, sig.Params(), sig.Results(), sig.Variadic())) - } - - embeds := make([]types.Type, iface.NumEmbeddeds()) - for i := range embeds { - embeds[i] = iface.EmbeddedType(i) - } - - newIface := types.NewInterfaceType(methods, embeds) - r.p.ifaces = append(r.p.ifaces, newIface) - underlying = newIface - } - - named.SetUnderlying(underlying) - } - - // Since go.dev/cl/455279, we can assume rhs.Underlying() will - // always be non-nil. However, to temporarily support users of - // older snapshot releases, we continue to fallback to the old - // behavior for now. - // - // TODO(mdempsky): Remove fallback code and simplify after - // allowing time for snapshot users to upgrade. - rhs := r.typ() - if underlying := rhs.Underlying(); underlying != nil { - setUnderlying(underlying) - } else { - pk := r.p - pk.laterFor(named, func() { - // First be sure that the rhs is initialized, if it needs to be initialized. - delete(pk.laterFors, named) // prevent cycles - if i, ok := pk.laterFors[rhs]; ok { - f := pk.laterFns[i] - pk.laterFns[i] = func() {} // function is running now, so replace it with a no-op - f() // initialize RHS - } - setUnderlying(rhs.Underlying()) - }) - } - - for i, n := 0, r.Len(); i < n; i++ { - named.AddMethod(r.method()) - } - - case pkgbits.ObjVar: - pos := r.pos() - typ := r.typ() - declare(types.NewVar(pos, objPkg, objName, typ)) - } - } - - return objPkg, objName -} - -func (pr *pkgReader) objDictIdx(idx pkgbits.Index) *readerDict { - - var dict readerDict - - { - r := pr.tempReader(pkgbits.RelocObjDict, idx, pkgbits.SyncObject1) - if implicits := r.Len(); implicits != 0 { - errorf("unexpected object with %v implicit type parameter(s)", implicits) - } - - dict.bounds = make([]typeInfo, r.Len()) - for i := range dict.bounds { - dict.bounds[i] = r.typInfo() - } - - dict.derived = make([]derivedInfo, r.Len()) - dict.derivedTypes = make([]types.Type, len(dict.derived)) - for i := range dict.derived { - dict.derived[i] = derivedInfo{r.Reloc(pkgbits.RelocType), r.Bool()} - } - - pr.retireReader(r) - } - // function references follow, but reader doesn't need those - - return &dict -} - -func (r *reader) typeParamNames() []*types.TypeParam { - r.Sync(pkgbits.SyncTypeParamNames) - - // Note: This code assumes it only processes objects without - // implement type parameters. This is currently fine, because - // reader is only used to read in exported declarations, which are - // always package scoped. - - if len(r.dict.bounds) == 0 { - return nil - } - - // Careful: Type parameter lists may have cycles. To allow for this, - // we construct the type parameter list in two passes: first we - // create all the TypeNames and TypeParams, then we construct and - // set the bound type. - - r.dict.tparams = make([]*types.TypeParam, len(r.dict.bounds)) - for i := range r.dict.bounds { - pos := r.pos() - pkg, name := r.localIdent() - - tname := types.NewTypeName(pos, pkg, name, nil) - r.dict.tparams[i] = types.NewTypeParam(tname, nil) - } - - typs := make([]types.Type, len(r.dict.bounds)) - for i, bound := range r.dict.bounds { - typs[i] = r.p.typIdx(bound, r.dict) - } - - // TODO(mdempsky): This is subtle, elaborate further. - // - // We have to save tparams outside of the closure, because - // typeParamNames() can be called multiple times with the same - // dictionary instance. - // - // Also, this needs to happen later to make sure SetUnderlying has - // been called. - // - // TODO(mdempsky): Is it safe to have a single "later" slice or do - // we need to have multiple passes? See comments on CL 386002 and - // go.dev/issue/52104. - tparams := r.dict.tparams - r.p.later(func() { - for i, typ := range typs { - tparams[i].SetConstraint(typ) - } - }) - - return r.dict.tparams -} - -func (r *reader) method() *types.Func { - r.Sync(pkgbits.SyncMethod) - pos := r.pos() - pkg, name := r.selector() - - rparams := r.typeParamNames() - sig := r.signature(r.param(), rparams, nil) - - _ = r.pos() // TODO(mdempsky): Remove; this is a hacker for linker.go. - return types.NewFunc(pos, pkg, name, sig) -} - -func (r *reader) qualifiedIdent() (*types.Package, string) { return r.ident(pkgbits.SyncSym) } -func (r *reader) localIdent() (*types.Package, string) { return r.ident(pkgbits.SyncLocalIdent) } -func (r *reader) selector() (*types.Package, string) { return r.ident(pkgbits.SyncSelector) } - -func (r *reader) ident(marker pkgbits.SyncMarker) (*types.Package, string) { - r.Sync(marker) - return r.pkg(), r.String() -} - -// pkgScope returns pkg.Scope(). -// If pkg is nil, it returns types.Universe instead. -// -// TODO(mdempsky): Remove after x/tools can depend on Go 1.19. -func pkgScope(pkg *types.Package) *types.Scope { - if pkg != nil { - return pkg.Scope() - } - return types.Universe -} diff --git a/go/extractor/vendor/golang.org/x/tools/internal/gocommand/BUILD.bazel b/go/extractor/vendor/golang.org/x/tools/internal/gocommand/BUILD.bazel deleted file mode 100644 index 7e64f94b95cb..000000000000 --- a/go/extractor/vendor/golang.org/x/tools/internal/gocommand/BUILD.bazel +++ /dev/null @@ -1,22 +0,0 @@ -# generated running `bazel run //go/gazelle`, do not edit - -load("@rules_go//go:def.bzl", "go_library") - -go_library( - name = "gocommand", - srcs = [ - "invoke.go", - "vendor.go", - "version.go", - ], - importmap = "github.com/github/codeql-go/extractor/vendor/golang.org/x/tools/internal/gocommand", - importpath = "golang.org/x/tools/internal/gocommand", - visibility = ["//go/extractor/vendor/golang.org/x/tools:__subpackages__"], - deps = [ - "//go/extractor/vendor/golang.org/x/mod/semver", - "//go/extractor/vendor/golang.org/x/tools/internal/event", - "//go/extractor/vendor/golang.org/x/tools/internal/event/keys", - "//go/extractor/vendor/golang.org/x/tools/internal/event/label", - "//go/extractor/vendor/golang.org/x/tools/internal/event/tag", - ], -) diff --git a/go/extractor/vendor/golang.org/x/tools/internal/gocommand/invoke.go b/go/extractor/vendor/golang.org/x/tools/internal/gocommand/invoke.go deleted file mode 100644 index 55312522dc2d..000000000000 --- a/go/extractor/vendor/golang.org/x/tools/internal/gocommand/invoke.go +++ /dev/null @@ -1,465 +0,0 @@ -// Copyright 2020 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Package gocommand is a helper for calling the go command. -package gocommand - -import ( - "bytes" - "context" - "errors" - "fmt" - "io" - "log" - "os" - "os/exec" - "reflect" - "regexp" - "runtime" - "strconv" - "strings" - "sync" - "time" - - "golang.org/x/tools/internal/event" - "golang.org/x/tools/internal/event/keys" - "golang.org/x/tools/internal/event/label" - "golang.org/x/tools/internal/event/tag" -) - -// An Runner will run go command invocations and serialize -// them if it sees a concurrency error. -type Runner struct { - // once guards the runner initialization. - once sync.Once - - // inFlight tracks available workers. - inFlight chan struct{} - - // serialized guards the ability to run a go command serially, - // to avoid deadlocks when claiming workers. - serialized chan struct{} -} - -const maxInFlight = 10 - -func (runner *Runner) initialize() { - runner.once.Do(func() { - runner.inFlight = make(chan struct{}, maxInFlight) - runner.serialized = make(chan struct{}, 1) - }) -} - -// 1.13: go: updates to go.mod needed, but contents have changed -// 1.14: go: updating go.mod: existing contents have changed since last read -var modConcurrencyError = regexp.MustCompile(`go:.*go.mod.*contents have changed`) - -// verb is an event label for the go command verb. -var verb = keys.NewString("verb", "go command verb") - -func invLabels(inv Invocation) []label.Label { - return []label.Label{verb.Of(inv.Verb), tag.Directory.Of(inv.WorkingDir)} -} - -// Run is a convenience wrapper around RunRaw. -// It returns only stdout and a "friendly" error. -func (runner *Runner) Run(ctx context.Context, inv Invocation) (*bytes.Buffer, error) { - ctx, done := event.Start(ctx, "gocommand.Runner.Run", invLabels(inv)...) - defer done() - - stdout, _, friendly, _ := runner.RunRaw(ctx, inv) - return stdout, friendly -} - -// RunPiped runs the invocation serially, always waiting for any concurrent -// invocations to complete first. -func (runner *Runner) RunPiped(ctx context.Context, inv Invocation, stdout, stderr io.Writer) error { - ctx, done := event.Start(ctx, "gocommand.Runner.RunPiped", invLabels(inv)...) - defer done() - - _, err := runner.runPiped(ctx, inv, stdout, stderr) - return err -} - -// RunRaw runs the invocation, serializing requests only if they fight over -// go.mod changes. -// Postcondition: both error results have same nilness. -func (runner *Runner) RunRaw(ctx context.Context, inv Invocation) (*bytes.Buffer, *bytes.Buffer, error, error) { - ctx, done := event.Start(ctx, "gocommand.Runner.RunRaw", invLabels(inv)...) - defer done() - // Make sure the runner is always initialized. - runner.initialize() - - // First, try to run the go command concurrently. - stdout, stderr, friendlyErr, err := runner.runConcurrent(ctx, inv) - - // If we encounter a load concurrency error, we need to retry serially. - if friendlyErr != nil && modConcurrencyError.MatchString(friendlyErr.Error()) { - event.Error(ctx, "Load concurrency error, will retry serially", err) - - // Run serially by calling runPiped. - stdout.Reset() - stderr.Reset() - friendlyErr, err = runner.runPiped(ctx, inv, stdout, stderr) - } - - return stdout, stderr, friendlyErr, err -} - -// Postcondition: both error results have same nilness. -func (runner *Runner) runConcurrent(ctx context.Context, inv Invocation) (*bytes.Buffer, *bytes.Buffer, error, error) { - // Wait for 1 worker to become available. - select { - case <-ctx.Done(): - return nil, nil, ctx.Err(), ctx.Err() - case runner.inFlight <- struct{}{}: - defer func() { <-runner.inFlight }() - } - - stdout, stderr := &bytes.Buffer{}, &bytes.Buffer{} - friendlyErr, err := inv.runWithFriendlyError(ctx, stdout, stderr) - return stdout, stderr, friendlyErr, err -} - -// Postcondition: both error results have same nilness. -func (runner *Runner) runPiped(ctx context.Context, inv Invocation, stdout, stderr io.Writer) (error, error) { - // Make sure the runner is always initialized. - runner.initialize() - - // Acquire the serialization lock. This avoids deadlocks between two - // runPiped commands. - select { - case <-ctx.Done(): - return ctx.Err(), ctx.Err() - case runner.serialized <- struct{}{}: - defer func() { <-runner.serialized }() - } - - // Wait for all in-progress go commands to return before proceeding, - // to avoid load concurrency errors. - for i := 0; i < maxInFlight; i++ { - select { - case <-ctx.Done(): - return ctx.Err(), ctx.Err() - case runner.inFlight <- struct{}{}: - // Make sure we always "return" any workers we took. - defer func() { <-runner.inFlight }() - } - } - - return inv.runWithFriendlyError(ctx, stdout, stderr) -} - -// An Invocation represents a call to the go command. -type Invocation struct { - Verb string - Args []string - BuildFlags []string - - // If ModFlag is set, the go command is invoked with -mod=ModFlag. - ModFlag string - - // If ModFile is set, the go command is invoked with -modfile=ModFile. - ModFile string - - // If Overlay is set, the go command is invoked with -overlay=Overlay. - Overlay string - - // If CleanEnv is set, the invocation will run only with the environment - // in Env, not starting with os.Environ. - CleanEnv bool - Env []string - WorkingDir string - Logf func(format string, args ...interface{}) -} - -// Postcondition: both error results have same nilness. -func (i *Invocation) runWithFriendlyError(ctx context.Context, stdout, stderr io.Writer) (friendlyError error, rawError error) { - rawError = i.run(ctx, stdout, stderr) - if rawError != nil { - friendlyError = rawError - // Check for 'go' executable not being found. - if ee, ok := rawError.(*exec.Error); ok && ee.Err == exec.ErrNotFound { - friendlyError = fmt.Errorf("go command required, not found: %v", ee) - } - if ctx.Err() != nil { - friendlyError = ctx.Err() - } - friendlyError = fmt.Errorf("err: %v: stderr: %s", friendlyError, stderr) - } - return -} - -func (i *Invocation) run(ctx context.Context, stdout, stderr io.Writer) error { - log := i.Logf - if log == nil { - log = func(string, ...interface{}) {} - } - - goArgs := []string{i.Verb} - - appendModFile := func() { - if i.ModFile != "" { - goArgs = append(goArgs, "-modfile="+i.ModFile) - } - } - appendModFlag := func() { - if i.ModFlag != "" { - goArgs = append(goArgs, "-mod="+i.ModFlag) - } - } - appendOverlayFlag := func() { - if i.Overlay != "" { - goArgs = append(goArgs, "-overlay="+i.Overlay) - } - } - - switch i.Verb { - case "env", "version": - goArgs = append(goArgs, i.Args...) - case "mod": - // mod needs the sub-verb before flags. - goArgs = append(goArgs, i.Args[0]) - appendModFile() - goArgs = append(goArgs, i.Args[1:]...) - case "get": - goArgs = append(goArgs, i.BuildFlags...) - appendModFile() - goArgs = append(goArgs, i.Args...) - - default: // notably list and build. - goArgs = append(goArgs, i.BuildFlags...) - appendModFile() - appendModFlag() - appendOverlayFlag() - goArgs = append(goArgs, i.Args...) - } - cmd := exec.Command("go", goArgs...) - cmd.Stdout = stdout - cmd.Stderr = stderr - - // cmd.WaitDelay was added only in go1.20 (see #50436). - if waitDelay := reflect.ValueOf(cmd).Elem().FieldByName("WaitDelay"); waitDelay.IsValid() { - // https://go.dev/issue/59541: don't wait forever copying stderr - // after the command has exited. - // After CL 484741 we copy stdout manually, so we we'll stop reading that as - // soon as ctx is done. However, we also don't want to wait around forever - // for stderr. Give a much-longer-than-reasonable delay and then assume that - // something has wedged in the kernel or runtime. - waitDelay.Set(reflect.ValueOf(30 * time.Second)) - } - - // On darwin the cwd gets resolved to the real path, which breaks anything that - // expects the working directory to keep the original path, including the - // go command when dealing with modules. - // The Go stdlib has a special feature where if the cwd and the PWD are the - // same node then it trusts the PWD, so by setting it in the env for the child - // process we fix up all the paths returned by the go command. - if !i.CleanEnv { - cmd.Env = os.Environ() - } - cmd.Env = append(cmd.Env, i.Env...) - if i.WorkingDir != "" { - cmd.Env = append(cmd.Env, "PWD="+i.WorkingDir) - cmd.Dir = i.WorkingDir - } - - defer func(start time.Time) { log("%s for %v", time.Since(start), cmdDebugStr(cmd)) }(time.Now()) - - return runCmdContext(ctx, cmd) -} - -// DebugHangingGoCommands may be set by tests to enable additional -// instrumentation (including panics) for debugging hanging Go commands. -// -// See golang/go#54461 for details. -var DebugHangingGoCommands = false - -// runCmdContext is like exec.CommandContext except it sends os.Interrupt -// before os.Kill. -func runCmdContext(ctx context.Context, cmd *exec.Cmd) (err error) { - // If cmd.Stdout is not an *os.File, the exec package will create a pipe and - // copy it to the Writer in a goroutine until the process has finished and - // either the pipe reaches EOF or command's WaitDelay expires. - // - // However, the output from 'go list' can be quite large, and we don't want to - // keep reading (and allocating buffers) if we've already decided we don't - // care about the output. We don't want to wait for the process to finish, and - // we don't wait to wait for the WaitDelay to expire either. - // - // Instead, if cmd.Stdout requires a copying goroutine we explicitly replace - // it with a pipe (which is an *os.File), which we can close in order to stop - // copying output as soon as we realize we don't care about it. - var stdoutW *os.File - if cmd.Stdout != nil { - if _, ok := cmd.Stdout.(*os.File); !ok { - var stdoutR *os.File - stdoutR, stdoutW, err = os.Pipe() - if err != nil { - return err - } - prevStdout := cmd.Stdout - cmd.Stdout = stdoutW - - stdoutErr := make(chan error, 1) - go func() { - _, err := io.Copy(prevStdout, stdoutR) - if err != nil { - err = fmt.Errorf("copying stdout: %w", err) - } - stdoutErr <- err - }() - defer func() { - // We started a goroutine to copy a stdout pipe. - // Wait for it to finish, or terminate it if need be. - var err2 error - select { - case err2 = <-stdoutErr: - stdoutR.Close() - case <-ctx.Done(): - stdoutR.Close() - // Per https://pkg.go.dev/os#File.Close, the call to stdoutR.Close - // should cause the Read call in io.Copy to unblock and return - // immediately, but we still need to receive from stdoutErr to confirm - // that it has happened. - <-stdoutErr - err2 = ctx.Err() - } - if err == nil { - err = err2 - } - }() - - // Per https://pkg.go.dev/os/exec#Cmd, β€œIf Stdout and Stderr are the - // same writer, and have a type that can be compared with ==, at most - // one goroutine at a time will call Write.” - // - // Since we're starting a goroutine that writes to cmd.Stdout, we must - // also update cmd.Stderr so that it still holds. - func() { - defer func() { recover() }() - if cmd.Stderr == prevStdout { - cmd.Stderr = cmd.Stdout - } - }() - } - } - - err = cmd.Start() - if stdoutW != nil { - // The child process has inherited the pipe file, - // so close the copy held in this process. - stdoutW.Close() - stdoutW = nil - } - if err != nil { - return err - } - - resChan := make(chan error, 1) - go func() { - resChan <- cmd.Wait() - }() - - // If we're interested in debugging hanging Go commands, stop waiting after a - // minute and panic with interesting information. - debug := DebugHangingGoCommands - if debug { - timer := time.NewTimer(1 * time.Minute) - defer timer.Stop() - select { - case err := <-resChan: - return err - case <-timer.C: - HandleHangingGoCommand(cmd.Process) - case <-ctx.Done(): - } - } else { - select { - case err := <-resChan: - return err - case <-ctx.Done(): - } - } - - // Cancelled. Interrupt and see if it ends voluntarily. - if err := cmd.Process.Signal(os.Interrupt); err == nil { - // (We used to wait only 1s but this proved - // fragile on loaded builder machines.) - timer := time.NewTimer(5 * time.Second) - defer timer.Stop() - select { - case err := <-resChan: - return err - case <-timer.C: - } - } - - // Didn't shut down in response to interrupt. Kill it hard. - // TODO(rfindley): per advice from bcmills@, it may be better to send SIGQUIT - // on certain platforms, such as unix. - if err := cmd.Process.Kill(); err != nil && !errors.Is(err, os.ErrProcessDone) && debug { - log.Printf("error killing the Go command: %v", err) - } - - return <-resChan -} - -func HandleHangingGoCommand(proc *os.Process) { - switch runtime.GOOS { - case "linux", "darwin", "freebsd", "netbsd": - fmt.Fprintln(os.Stderr, `DETECTED A HANGING GO COMMAND - -The gopls test runner has detected a hanging go command. In order to debug -this, the output of ps and lsof/fstat is printed below. - -See golang/go#54461 for more details.`) - - fmt.Fprintln(os.Stderr, "\nps axo ppid,pid,command:") - fmt.Fprintln(os.Stderr, "-------------------------") - psCmd := exec.Command("ps", "axo", "ppid,pid,command") - psCmd.Stdout = os.Stderr - psCmd.Stderr = os.Stderr - if err := psCmd.Run(); err != nil { - panic(fmt.Sprintf("running ps: %v", err)) - } - - listFiles := "lsof" - if runtime.GOOS == "freebsd" || runtime.GOOS == "netbsd" { - listFiles = "fstat" - } - - fmt.Fprintln(os.Stderr, "\n"+listFiles+":") - fmt.Fprintln(os.Stderr, "-----") - listFilesCmd := exec.Command(listFiles) - listFilesCmd.Stdout = os.Stderr - listFilesCmd.Stderr = os.Stderr - if err := listFilesCmd.Run(); err != nil { - panic(fmt.Sprintf("running %s: %v", listFiles, err)) - } - } - panic(fmt.Sprintf("detected hanging go command (pid %d): see golang/go#54461 for more details", proc.Pid)) -} - -func cmdDebugStr(cmd *exec.Cmd) string { - env := make(map[string]string) - for _, kv := range cmd.Env { - split := strings.SplitN(kv, "=", 2) - if len(split) == 2 { - k, v := split[0], split[1] - env[k] = v - } - } - - var args []string - for _, arg := range cmd.Args { - quoted := strconv.Quote(arg) - if quoted[1:len(quoted)-1] != arg || strings.Contains(arg, " ") { - args = append(args, quoted) - } else { - args = append(args, arg) - } - } - return fmt.Sprintf("GOROOT=%v GOPATH=%v GO111MODULE=%v GOPROXY=%v PWD=%v %v", env["GOROOT"], env["GOPATH"], env["GO111MODULE"], env["GOPROXY"], env["PWD"], strings.Join(args, " ")) -} diff --git a/go/extractor/vendor/golang.org/x/tools/internal/gocommand/vendor.go b/go/extractor/vendor/golang.org/x/tools/internal/gocommand/vendor.go deleted file mode 100644 index 2d3d408c0bed..000000000000 --- a/go/extractor/vendor/golang.org/x/tools/internal/gocommand/vendor.go +++ /dev/null @@ -1,109 +0,0 @@ -// Copyright 2020 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package gocommand - -import ( - "bytes" - "context" - "fmt" - "os" - "path/filepath" - "regexp" - "strings" - "time" - - "golang.org/x/mod/semver" -) - -// ModuleJSON holds information about a module. -type ModuleJSON struct { - Path string // module path - Version string // module version - Versions []string // available module versions (with -versions) - Replace *ModuleJSON // replaced by this module - Time *time.Time // time version was created - Update *ModuleJSON // available update, if any (with -u) - Main bool // is this the main module? - Indirect bool // is this module only an indirect dependency of main module? - Dir string // directory holding files for this module, if any - GoMod string // path to go.mod file used when loading this module, if any - GoVersion string // go version used in module -} - -var modFlagRegexp = regexp.MustCompile(`-mod[ =](\w+)`) - -// VendorEnabled reports whether vendoring is enabled. It takes a *Runner to execute Go commands -// with the supplied context.Context and Invocation. The Invocation can contain pre-defined fields, -// of which only Verb and Args are modified to run the appropriate Go command. -// Inspired by setDefaultBuildMod in modload/init.go -func VendorEnabled(ctx context.Context, inv Invocation, r *Runner) (bool, *ModuleJSON, error) { - mainMod, go114, err := getMainModuleAnd114(ctx, inv, r) - if err != nil { - return false, nil, err - } - - // We check the GOFLAGS to see if there is anything overridden or not. - inv.Verb = "env" - inv.Args = []string{"GOFLAGS"} - stdout, err := r.Run(ctx, inv) - if err != nil { - return false, nil, err - } - goflags := string(bytes.TrimSpace(stdout.Bytes())) - matches := modFlagRegexp.FindStringSubmatch(goflags) - var modFlag string - if len(matches) != 0 { - modFlag = matches[1] - } - // Don't override an explicit '-mod=' argument. - if modFlag == "vendor" { - return true, mainMod, nil - } else if modFlag != "" { - return false, nil, nil - } - if mainMod == nil || !go114 { - return false, nil, nil - } - // Check 1.14's automatic vendor mode. - if fi, err := os.Stat(filepath.Join(mainMod.Dir, "vendor")); err == nil && fi.IsDir() { - if mainMod.GoVersion != "" && semver.Compare("v"+mainMod.GoVersion, "v1.14") >= 0 { - // The Go version is at least 1.14, and a vendor directory exists. - // Set -mod=vendor by default. - return true, mainMod, nil - } - } - return false, nil, nil -} - -// getMainModuleAnd114 gets one of the main modules' information and whether the -// go command in use is 1.14+. This is the information needed to figure out -// if vendoring should be enabled. -func getMainModuleAnd114(ctx context.Context, inv Invocation, r *Runner) (*ModuleJSON, bool, error) { - const format = `{{.Path}} -{{.Dir}} -{{.GoMod}} -{{.GoVersion}} -{{range context.ReleaseTags}}{{if eq . "go1.14"}}{{.}}{{end}}{{end}} -` - inv.Verb = "list" - inv.Args = []string{"-m", "-f", format} - stdout, err := r.Run(ctx, inv) - if err != nil { - return nil, false, err - } - - lines := strings.Split(stdout.String(), "\n") - if len(lines) < 5 { - return nil, false, fmt.Errorf("unexpected stdout: %q", stdout.String()) - } - mod := &ModuleJSON{ - Path: lines[0], - Dir: lines[1], - GoMod: lines[2], - GoVersion: lines[3], - Main: true, - } - return mod, lines[4] == "go1.14", nil -} diff --git a/go/extractor/vendor/golang.org/x/tools/internal/gocommand/version.go b/go/extractor/vendor/golang.org/x/tools/internal/gocommand/version.go deleted file mode 100644 index 446c5846a60f..000000000000 --- a/go/extractor/vendor/golang.org/x/tools/internal/gocommand/version.go +++ /dev/null @@ -1,71 +0,0 @@ -// Copyright 2020 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package gocommand - -import ( - "context" - "fmt" - "regexp" - "strings" -) - -// GoVersion reports the minor version number of the highest release -// tag built into the go command on the PATH. -// -// Note that this may be higher than the version of the go tool used -// to build this application, and thus the versions of the standard -// go/{scanner,parser,ast,types} packages that are linked into it. -// In that case, callers should either downgrade to the version of -// go used to build the application, or report an error that the -// application is too old to use the go command on the PATH. -func GoVersion(ctx context.Context, inv Invocation, r *Runner) (int, error) { - inv.Verb = "list" - inv.Args = []string{"-e", "-f", `{{context.ReleaseTags}}`, `--`, `unsafe`} - inv.BuildFlags = nil // This is not a build command. - inv.ModFlag = "" - inv.ModFile = "" - inv.Env = append(inv.Env[:len(inv.Env):len(inv.Env)], "GO111MODULE=off") - - stdoutBytes, err := r.Run(ctx, inv) - if err != nil { - return 0, err - } - stdout := stdoutBytes.String() - if len(stdout) < 3 { - return 0, fmt.Errorf("bad ReleaseTags output: %q", stdout) - } - // Split up "[go1.1 go1.15]" and return highest go1.X value. - tags := strings.Fields(stdout[1 : len(stdout)-2]) - for i := len(tags) - 1; i >= 0; i-- { - var version int - if _, err := fmt.Sscanf(tags[i], "go1.%d", &version); err != nil { - continue - } - return version, nil - } - return 0, fmt.Errorf("no parseable ReleaseTags in %v", tags) -} - -// GoVersionOutput returns the complete output of the go version command. -func GoVersionOutput(ctx context.Context, inv Invocation, r *Runner) (string, error) { - inv.Verb = "version" - goVersion, err := r.Run(ctx, inv) - if err != nil { - return "", err - } - return goVersion.String(), nil -} - -// ParseGoVersionOutput extracts the Go version string -// from the output of the "go version" command. -// Given an unrecognized form, it returns an empty string. -func ParseGoVersionOutput(data string) string { - re := regexp.MustCompile(`^go version (go\S+|devel \S+)`) - m := re.FindStringSubmatch(data) - if len(m) != 2 { - return "" // unrecognized version - } - return m[1] -} diff --git a/go/extractor/vendor/golang.org/x/tools/internal/packagesinternal/BUILD.bazel b/go/extractor/vendor/golang.org/x/tools/internal/packagesinternal/BUILD.bazel deleted file mode 100644 index 2d2b7dc5b33c..000000000000 --- a/go/extractor/vendor/golang.org/x/tools/internal/packagesinternal/BUILD.bazel +++ /dev/null @@ -1,11 +0,0 @@ -# generated running `bazel run //go/gazelle`, do not edit - -load("@rules_go//go:def.bzl", "go_library") - -go_library( - name = "packagesinternal", - srcs = ["packages.go"], - importmap = "github.com/github/codeql-go/extractor/vendor/golang.org/x/tools/internal/packagesinternal", - importpath = "golang.org/x/tools/internal/packagesinternal", - visibility = ["//go/extractor/vendor/golang.org/x/tools:__subpackages__"], -) diff --git a/go/extractor/vendor/golang.org/x/tools/internal/packagesinternal/packages.go b/go/extractor/vendor/golang.org/x/tools/internal/packagesinternal/packages.go deleted file mode 100644 index 44719de173bf..000000000000 --- a/go/extractor/vendor/golang.org/x/tools/internal/packagesinternal/packages.go +++ /dev/null @@ -1,22 +0,0 @@ -// Copyright 2020 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Package packagesinternal exposes internal-only fields from go/packages. -package packagesinternal - -var GetForTest = func(p interface{}) string { return "" } -var GetDepsErrors = func(p interface{}) []*PackageError { return nil } - -type PackageError struct { - ImportStack []string // shortest path from package named on command line to this one - Pos string // position of error (if present, file:line:col) - Err string // the error itself -} - -var TypecheckCgo int -var DepsErrors int // must be set as a LoadMode to call GetDepsErrors -var ForTest int // must be set as a LoadMode to call GetForTest - -var SetModFlag = func(config interface{}, value string) {} -var SetModFile = func(config interface{}, value string) {} diff --git a/go/extractor/vendor/golang.org/x/tools/internal/pkgbits/BUILD.bazel b/go/extractor/vendor/golang.org/x/tools/internal/pkgbits/BUILD.bazel deleted file mode 100644 index cce327470517..000000000000 --- a/go/extractor/vendor/golang.org/x/tools/internal/pkgbits/BUILD.bazel +++ /dev/null @@ -1,23 +0,0 @@ -# generated running `bazel run //go/gazelle`, do not edit - -load("@rules_go//go:def.bzl", "go_library") - -go_library( - name = "pkgbits", - srcs = [ - "codes.go", - "decoder.go", - "doc.go", - "encoder.go", - "flags.go", - "frames_go1.go", - "frames_go17.go", - "reloc.go", - "support.go", - "sync.go", - "syncmarker_string.go", - ], - importmap = "github.com/github/codeql-go/extractor/vendor/golang.org/x/tools/internal/pkgbits", - importpath = "golang.org/x/tools/internal/pkgbits", - visibility = ["//go/extractor/vendor/golang.org/x/tools:__subpackages__"], -) diff --git a/go/extractor/vendor/golang.org/x/tools/internal/pkgbits/codes.go b/go/extractor/vendor/golang.org/x/tools/internal/pkgbits/codes.go deleted file mode 100644 index f0cabde96eba..000000000000 --- a/go/extractor/vendor/golang.org/x/tools/internal/pkgbits/codes.go +++ /dev/null @@ -1,77 +0,0 @@ -// Copyright 2021 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package pkgbits - -// A Code is an enum value that can be encoded into bitstreams. -// -// Code types are preferable for enum types, because they allow -// Decoder to detect desyncs. -type Code interface { - // Marker returns the SyncMarker for the Code's dynamic type. - Marker() SyncMarker - - // Value returns the Code's ordinal value. - Value() int -} - -// A CodeVal distinguishes among go/constant.Value encodings. -type CodeVal int - -func (c CodeVal) Marker() SyncMarker { return SyncVal } -func (c CodeVal) Value() int { return int(c) } - -// Note: These values are public and cannot be changed without -// updating the go/types importers. - -const ( - ValBool CodeVal = iota - ValString - ValInt64 - ValBigInt - ValBigRat - ValBigFloat -) - -// A CodeType distinguishes among go/types.Type encodings. -type CodeType int - -func (c CodeType) Marker() SyncMarker { return SyncType } -func (c CodeType) Value() int { return int(c) } - -// Note: These values are public and cannot be changed without -// updating the go/types importers. - -const ( - TypeBasic CodeType = iota - TypeNamed - TypePointer - TypeSlice - TypeArray - TypeChan - TypeMap - TypeSignature - TypeStruct - TypeInterface - TypeUnion - TypeTypeParam -) - -// A CodeObj distinguishes among go/types.Object encodings. -type CodeObj int - -func (c CodeObj) Marker() SyncMarker { return SyncCodeObj } -func (c CodeObj) Value() int { return int(c) } - -// Note: These values are public and cannot be changed without -// updating the go/types importers. - -const ( - ObjAlias CodeObj = iota - ObjConst - ObjType - ObjFunc - ObjVar - ObjStub -) diff --git a/go/extractor/vendor/golang.org/x/tools/internal/pkgbits/decoder.go b/go/extractor/vendor/golang.org/x/tools/internal/pkgbits/decoder.go deleted file mode 100644 index b92e8e6eb329..000000000000 --- a/go/extractor/vendor/golang.org/x/tools/internal/pkgbits/decoder.go +++ /dev/null @@ -1,517 +0,0 @@ -// Copyright 2021 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package pkgbits - -import ( - "encoding/binary" - "errors" - "fmt" - "go/constant" - "go/token" - "io" - "math/big" - "os" - "runtime" - "strings" -) - -// A PkgDecoder provides methods for decoding a package's Unified IR -// export data. -type PkgDecoder struct { - // version is the file format version. - version uint32 - - // sync indicates whether the file uses sync markers. - sync bool - - // pkgPath is the package path for the package to be decoded. - // - // TODO(mdempsky): Remove; unneeded since CL 391014. - pkgPath string - - // elemData is the full data payload of the encoded package. - // Elements are densely and contiguously packed together. - // - // The last 8 bytes of elemData are the package fingerprint. - elemData string - - // elemEnds stores the byte-offset end positions of element - // bitstreams within elemData. - // - // For example, element I's bitstream data starts at elemEnds[I-1] - // (or 0, if I==0) and ends at elemEnds[I]. - // - // Note: elemEnds is indexed by absolute indices, not - // section-relative indices. - elemEnds []uint32 - - // elemEndsEnds stores the index-offset end positions of relocation - // sections within elemEnds. - // - // For example, section K's end positions start at elemEndsEnds[K-1] - // (or 0, if K==0) and end at elemEndsEnds[K]. - elemEndsEnds [numRelocs]uint32 - - scratchRelocEnt []RelocEnt -} - -// PkgPath returns the package path for the package -// -// TODO(mdempsky): Remove; unneeded since CL 391014. -func (pr *PkgDecoder) PkgPath() string { return pr.pkgPath } - -// SyncMarkers reports whether pr uses sync markers. -func (pr *PkgDecoder) SyncMarkers() bool { return pr.sync } - -// NewPkgDecoder returns a PkgDecoder initialized to read the Unified -// IR export data from input. pkgPath is the package path for the -// compilation unit that produced the export data. -// -// TODO(mdempsky): Remove pkgPath parameter; unneeded since CL 391014. -func NewPkgDecoder(pkgPath, input string) PkgDecoder { - pr := PkgDecoder{ - pkgPath: pkgPath, - } - - // TODO(mdempsky): Implement direct indexing of input string to - // avoid copying the position information. - - r := strings.NewReader(input) - - assert(binary.Read(r, binary.LittleEndian, &pr.version) == nil) - - switch pr.version { - default: - panic(fmt.Errorf("unsupported version: %v", pr.version)) - case 0: - // no flags - case 1: - var flags uint32 - assert(binary.Read(r, binary.LittleEndian, &flags) == nil) - pr.sync = flags&flagSyncMarkers != 0 - } - - assert(binary.Read(r, binary.LittleEndian, pr.elemEndsEnds[:]) == nil) - - pr.elemEnds = make([]uint32, pr.elemEndsEnds[len(pr.elemEndsEnds)-1]) - assert(binary.Read(r, binary.LittleEndian, pr.elemEnds[:]) == nil) - - pos, err := r.Seek(0, io.SeekCurrent) - assert(err == nil) - - pr.elemData = input[pos:] - assert(len(pr.elemData)-8 == int(pr.elemEnds[len(pr.elemEnds)-1])) - - return pr -} - -// NumElems returns the number of elements in section k. -func (pr *PkgDecoder) NumElems(k RelocKind) int { - count := int(pr.elemEndsEnds[k]) - if k > 0 { - count -= int(pr.elemEndsEnds[k-1]) - } - return count -} - -// TotalElems returns the total number of elements across all sections. -func (pr *PkgDecoder) TotalElems() int { - return len(pr.elemEnds) -} - -// Fingerprint returns the package fingerprint. -func (pr *PkgDecoder) Fingerprint() [8]byte { - var fp [8]byte - copy(fp[:], pr.elemData[len(pr.elemData)-8:]) - return fp -} - -// AbsIdx returns the absolute index for the given (section, index) -// pair. -func (pr *PkgDecoder) AbsIdx(k RelocKind, idx Index) int { - absIdx := int(idx) - if k > 0 { - absIdx += int(pr.elemEndsEnds[k-1]) - } - if absIdx >= int(pr.elemEndsEnds[k]) { - errorf("%v:%v is out of bounds; %v", k, idx, pr.elemEndsEnds) - } - return absIdx -} - -// DataIdx returns the raw element bitstream for the given (section, -// index) pair. -func (pr *PkgDecoder) DataIdx(k RelocKind, idx Index) string { - absIdx := pr.AbsIdx(k, idx) - - var start uint32 - if absIdx > 0 { - start = pr.elemEnds[absIdx-1] - } - end := pr.elemEnds[absIdx] - - return pr.elemData[start:end] -} - -// StringIdx returns the string value for the given string index. -func (pr *PkgDecoder) StringIdx(idx Index) string { - return pr.DataIdx(RelocString, idx) -} - -// NewDecoder returns a Decoder for the given (section, index) pair, -// and decodes the given SyncMarker from the element bitstream. -func (pr *PkgDecoder) NewDecoder(k RelocKind, idx Index, marker SyncMarker) Decoder { - r := pr.NewDecoderRaw(k, idx) - r.Sync(marker) - return r -} - -// TempDecoder returns a Decoder for the given (section, index) pair, -// and decodes the given SyncMarker from the element bitstream. -// If possible the Decoder should be RetireDecoder'd when it is no longer -// needed, this will avoid heap allocations. -func (pr *PkgDecoder) TempDecoder(k RelocKind, idx Index, marker SyncMarker) Decoder { - r := pr.TempDecoderRaw(k, idx) - r.Sync(marker) - return r -} - -func (pr *PkgDecoder) RetireDecoder(d *Decoder) { - pr.scratchRelocEnt = d.Relocs - d.Relocs = nil -} - -// NewDecoderRaw returns a Decoder for the given (section, index) pair. -// -// Most callers should use NewDecoder instead. -func (pr *PkgDecoder) NewDecoderRaw(k RelocKind, idx Index) Decoder { - r := Decoder{ - common: pr, - k: k, - Idx: idx, - } - - // TODO(mdempsky) r.data.Reset(...) after #44505 is resolved. - r.Data = *strings.NewReader(pr.DataIdx(k, idx)) - - r.Sync(SyncRelocs) - r.Relocs = make([]RelocEnt, r.Len()) - for i := range r.Relocs { - r.Sync(SyncReloc) - r.Relocs[i] = RelocEnt{RelocKind(r.Len()), Index(r.Len())} - } - - return r -} - -func (pr *PkgDecoder) TempDecoderRaw(k RelocKind, idx Index) Decoder { - r := Decoder{ - common: pr, - k: k, - Idx: idx, - } - - r.Data.Reset(pr.DataIdx(k, idx)) - r.Sync(SyncRelocs) - l := r.Len() - if cap(pr.scratchRelocEnt) >= l { - r.Relocs = pr.scratchRelocEnt[:l] - pr.scratchRelocEnt = nil - } else { - r.Relocs = make([]RelocEnt, l) - } - for i := range r.Relocs { - r.Sync(SyncReloc) - r.Relocs[i] = RelocEnt{RelocKind(r.Len()), Index(r.Len())} - } - - return r -} - -// A Decoder provides methods for decoding an individual element's -// bitstream data. -type Decoder struct { - common *PkgDecoder - - Relocs []RelocEnt - Data strings.Reader - - k RelocKind - Idx Index -} - -func (r *Decoder) checkErr(err error) { - if err != nil { - errorf("unexpected decoding error: %w", err) - } -} - -func (r *Decoder) rawUvarint() uint64 { - x, err := readUvarint(&r.Data) - r.checkErr(err) - return x -} - -// readUvarint is a type-specialized copy of encoding/binary.ReadUvarint. -// This avoids the interface conversion and thus has better escape properties, -// which flows up the stack. -func readUvarint(r *strings.Reader) (uint64, error) { - var x uint64 - var s uint - for i := 0; i < binary.MaxVarintLen64; i++ { - b, err := r.ReadByte() - if err != nil { - if i > 0 && err == io.EOF { - err = io.ErrUnexpectedEOF - } - return x, err - } - if b < 0x80 { - if i == binary.MaxVarintLen64-1 && b > 1 { - return x, overflow - } - return x | uint64(b)<> 1) - if ux&1 != 0 { - x = ^x - } - return x -} - -func (r *Decoder) rawReloc(k RelocKind, idx int) Index { - e := r.Relocs[idx] - assert(e.Kind == k) - return e.Idx -} - -// Sync decodes a sync marker from the element bitstream and asserts -// that it matches the expected marker. -// -// If r.common.sync is false, then Sync is a no-op. -func (r *Decoder) Sync(mWant SyncMarker) { - if !r.common.sync { - return - } - - pos, _ := r.Data.Seek(0, io.SeekCurrent) - mHave := SyncMarker(r.rawUvarint()) - writerPCs := make([]int, r.rawUvarint()) - for i := range writerPCs { - writerPCs[i] = int(r.rawUvarint()) - } - - if mHave == mWant { - return - } - - // There's some tension here between printing: - // - // (1) full file paths that tools can recognize (e.g., so emacs - // hyperlinks the "file:line" text for easy navigation), or - // - // (2) short file paths that are easier for humans to read (e.g., by - // omitting redundant or irrelevant details, so it's easier to - // focus on the useful bits that remain). - // - // The current formatting favors the former, as it seems more - // helpful in practice. But perhaps the formatting could be improved - // to better address both concerns. For example, use relative file - // paths if they would be shorter, or rewrite file paths to contain - // "$GOROOT" (like objabi.AbsFile does) if tools can be taught how - // to reliably expand that again. - - fmt.Printf("export data desync: package %q, section %v, index %v, offset %v\n", r.common.pkgPath, r.k, r.Idx, pos) - - fmt.Printf("\nfound %v, written at:\n", mHave) - if len(writerPCs) == 0 { - fmt.Printf("\t[stack trace unavailable; recompile package %q with -d=syncframes]\n", r.common.pkgPath) - } - for _, pc := range writerPCs { - fmt.Printf("\t%s\n", r.common.StringIdx(r.rawReloc(RelocString, pc))) - } - - fmt.Printf("\nexpected %v, reading at:\n", mWant) - var readerPCs [32]uintptr // TODO(mdempsky): Dynamically size? - n := runtime.Callers(2, readerPCs[:]) - for _, pc := range fmtFrames(readerPCs[:n]...) { - fmt.Printf("\t%s\n", pc) - } - - // We already printed a stack trace for the reader, so now we can - // simply exit. Printing a second one with panic or base.Fatalf - // would just be noise. - os.Exit(1) -} - -// Bool decodes and returns a bool value from the element bitstream. -func (r *Decoder) Bool() bool { - r.Sync(SyncBool) - x, err := r.Data.ReadByte() - r.checkErr(err) - assert(x < 2) - return x != 0 -} - -// Int64 decodes and returns an int64 value from the element bitstream. -func (r *Decoder) Int64() int64 { - r.Sync(SyncInt64) - return r.rawVarint() -} - -// Uint64 decodes and returns a uint64 value from the element bitstream. -func (r *Decoder) Uint64() uint64 { - r.Sync(SyncUint64) - return r.rawUvarint() -} - -// Len decodes and returns a non-negative int value from the element bitstream. -func (r *Decoder) Len() int { x := r.Uint64(); v := int(x); assert(uint64(v) == x); return v } - -// Int decodes and returns an int value from the element bitstream. -func (r *Decoder) Int() int { x := r.Int64(); v := int(x); assert(int64(v) == x); return v } - -// Uint decodes and returns a uint value from the element bitstream. -func (r *Decoder) Uint() uint { x := r.Uint64(); v := uint(x); assert(uint64(v) == x); return v } - -// Code decodes a Code value from the element bitstream and returns -// its ordinal value. It's the caller's responsibility to convert the -// result to an appropriate Code type. -// -// TODO(mdempsky): Ideally this method would have signature "Code[T -// Code] T" instead, but we don't allow generic methods and the -// compiler can't depend on generics yet anyway. -func (r *Decoder) Code(mark SyncMarker) int { - r.Sync(mark) - return r.Len() -} - -// Reloc decodes a relocation of expected section k from the element -// bitstream and returns an index to the referenced element. -func (r *Decoder) Reloc(k RelocKind) Index { - r.Sync(SyncUseReloc) - return r.rawReloc(k, r.Len()) -} - -// String decodes and returns a string value from the element -// bitstream. -func (r *Decoder) String() string { - r.Sync(SyncString) - return r.common.StringIdx(r.Reloc(RelocString)) -} - -// Strings decodes and returns a variable-length slice of strings from -// the element bitstream. -func (r *Decoder) Strings() []string { - res := make([]string, r.Len()) - for i := range res { - res[i] = r.String() - } - return res -} - -// Value decodes and returns a constant.Value from the element -// bitstream. -func (r *Decoder) Value() constant.Value { - r.Sync(SyncValue) - isComplex := r.Bool() - val := r.scalar() - if isComplex { - val = constant.BinaryOp(val, token.ADD, constant.MakeImag(r.scalar())) - } - return val -} - -func (r *Decoder) scalar() constant.Value { - switch tag := CodeVal(r.Code(SyncVal)); tag { - default: - panic(fmt.Errorf("unexpected scalar tag: %v", tag)) - - case ValBool: - return constant.MakeBool(r.Bool()) - case ValString: - return constant.MakeString(r.String()) - case ValInt64: - return constant.MakeInt64(r.Int64()) - case ValBigInt: - return constant.Make(r.bigInt()) - case ValBigRat: - num := r.bigInt() - denom := r.bigInt() - return constant.Make(new(big.Rat).SetFrac(num, denom)) - case ValBigFloat: - return constant.Make(r.bigFloat()) - } -} - -func (r *Decoder) bigInt() *big.Int { - v := new(big.Int).SetBytes([]byte(r.String())) - if r.Bool() { - v.Neg(v) - } - return v -} - -func (r *Decoder) bigFloat() *big.Float { - v := new(big.Float).SetPrec(512) - assert(v.UnmarshalText([]byte(r.String())) == nil) - return v -} - -// @@@ Helpers - -// TODO(mdempsky): These should probably be removed. I think they're a -// smell that the export data format is not yet quite right. - -// PeekPkgPath returns the package path for the specified package -// index. -func (pr *PkgDecoder) PeekPkgPath(idx Index) string { - var path string - { - r := pr.TempDecoder(RelocPkg, idx, SyncPkgDef) - path = r.String() - pr.RetireDecoder(&r) - } - if path == "" { - path = pr.pkgPath - } - return path -} - -// PeekObj returns the package path, object name, and CodeObj for the -// specified object index. -func (pr *PkgDecoder) PeekObj(idx Index) (string, string, CodeObj) { - var ridx Index - var name string - var rcode int - { - r := pr.TempDecoder(RelocName, idx, SyncObject1) - r.Sync(SyncSym) - r.Sync(SyncPkg) - ridx = r.Reloc(RelocPkg) - name = r.String() - rcode = r.Code(SyncCodeObj) - pr.RetireDecoder(&r) - } - - path := pr.PeekPkgPath(ridx) - assert(name != "") - - tag := CodeObj(rcode) - - return path, name, tag -} diff --git a/go/extractor/vendor/golang.org/x/tools/internal/pkgbits/doc.go b/go/extractor/vendor/golang.org/x/tools/internal/pkgbits/doc.go deleted file mode 100644 index c8a2796b5e4c..000000000000 --- a/go/extractor/vendor/golang.org/x/tools/internal/pkgbits/doc.go +++ /dev/null @@ -1,32 +0,0 @@ -// Copyright 2022 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Package pkgbits implements low-level coding abstractions for -// Unified IR's export data format. -// -// At a low-level, a package is a collection of bitstream elements. -// Each element has a "kind" and a dense, non-negative index. -// Elements can be randomly accessed given their kind and index. -// -// Individual elements are sequences of variable-length values (e.g., -// integers, booleans, strings, go/constant values, cross-references -// to other elements). Package pkgbits provides APIs for encoding and -// decoding these low-level values, but the details of mapping -// higher-level Go constructs into elements is left to higher-level -// abstractions. -// -// Elements may cross-reference each other with "relocations." For -// example, an element representing a pointer type has a relocation -// referring to the element type. -// -// Go constructs may be composed as a constellation of multiple -// elements. For example, a declared function may have one element to -// describe the object (e.g., its name, type, position), and a -// separate element to describe its function body. This allows readers -// some flexibility in efficiently seeking or re-reading data (e.g., -// inlining requires re-reading the function body for each inlined -// call, without needing to re-read the object-level details). -// -// This is a copy of internal/pkgbits in the Go implementation. -package pkgbits diff --git a/go/extractor/vendor/golang.org/x/tools/internal/pkgbits/encoder.go b/go/extractor/vendor/golang.org/x/tools/internal/pkgbits/encoder.go deleted file mode 100644 index 6482617a4fcc..000000000000 --- a/go/extractor/vendor/golang.org/x/tools/internal/pkgbits/encoder.go +++ /dev/null @@ -1,383 +0,0 @@ -// Copyright 2021 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package pkgbits - -import ( - "bytes" - "crypto/md5" - "encoding/binary" - "go/constant" - "io" - "math/big" - "runtime" -) - -// currentVersion is the current version number. -// -// - v0: initial prototype -// -// - v1: adds the flags uint32 word -const currentVersion uint32 = 1 - -// A PkgEncoder provides methods for encoding a package's Unified IR -// export data. -type PkgEncoder struct { - // elems holds the bitstream for previously encoded elements. - elems [numRelocs][]string - - // stringsIdx maps previously encoded strings to their index within - // the RelocString section, to allow deduplication. That is, - // elems[RelocString][stringsIdx[s]] == s (if present). - stringsIdx map[string]Index - - // syncFrames is the number of frames to write at each sync - // marker. A negative value means sync markers are omitted. - syncFrames int -} - -// SyncMarkers reports whether pw uses sync markers. -func (pw *PkgEncoder) SyncMarkers() bool { return pw.syncFrames >= 0 } - -// NewPkgEncoder returns an initialized PkgEncoder. -// -// syncFrames is the number of caller frames that should be serialized -// at Sync points. Serializing additional frames results in larger -// export data files, but can help diagnosing desync errors in -// higher-level Unified IR reader/writer code. If syncFrames is -// negative, then sync markers are omitted entirely. -func NewPkgEncoder(syncFrames int) PkgEncoder { - return PkgEncoder{ - stringsIdx: make(map[string]Index), - syncFrames: syncFrames, - } -} - -// DumpTo writes the package's encoded data to out0 and returns the -// package fingerprint. -func (pw *PkgEncoder) DumpTo(out0 io.Writer) (fingerprint [8]byte) { - h := md5.New() - out := io.MultiWriter(out0, h) - - writeUint32 := func(x uint32) { - assert(binary.Write(out, binary.LittleEndian, x) == nil) - } - - writeUint32(currentVersion) - - var flags uint32 - if pw.SyncMarkers() { - flags |= flagSyncMarkers - } - writeUint32(flags) - - // Write elemEndsEnds. - var sum uint32 - for _, elems := range &pw.elems { - sum += uint32(len(elems)) - writeUint32(sum) - } - - // Write elemEnds. - sum = 0 - for _, elems := range &pw.elems { - for _, elem := range elems { - sum += uint32(len(elem)) - writeUint32(sum) - } - } - - // Write elemData. - for _, elems := range &pw.elems { - for _, elem := range elems { - _, err := io.WriteString(out, elem) - assert(err == nil) - } - } - - // Write fingerprint. - copy(fingerprint[:], h.Sum(nil)) - _, err := out0.Write(fingerprint[:]) - assert(err == nil) - - return -} - -// StringIdx adds a string value to the strings section, if not -// already present, and returns its index. -func (pw *PkgEncoder) StringIdx(s string) Index { - if idx, ok := pw.stringsIdx[s]; ok { - assert(pw.elems[RelocString][idx] == s) - return idx - } - - idx := Index(len(pw.elems[RelocString])) - pw.elems[RelocString] = append(pw.elems[RelocString], s) - pw.stringsIdx[s] = idx - return idx -} - -// NewEncoder returns an Encoder for a new element within the given -// section, and encodes the given SyncMarker as the start of the -// element bitstream. -func (pw *PkgEncoder) NewEncoder(k RelocKind, marker SyncMarker) Encoder { - e := pw.NewEncoderRaw(k) - e.Sync(marker) - return e -} - -// NewEncoderRaw returns an Encoder for a new element within the given -// section. -// -// Most callers should use NewEncoder instead. -func (pw *PkgEncoder) NewEncoderRaw(k RelocKind) Encoder { - idx := Index(len(pw.elems[k])) - pw.elems[k] = append(pw.elems[k], "") // placeholder - - return Encoder{ - p: pw, - k: k, - Idx: idx, - } -} - -// An Encoder provides methods for encoding an individual element's -// bitstream data. -type Encoder struct { - p *PkgEncoder - - Relocs []RelocEnt - RelocMap map[RelocEnt]uint32 - Data bytes.Buffer // accumulated element bitstream data - - encodingRelocHeader bool - - k RelocKind - Idx Index // index within relocation section -} - -// Flush finalizes the element's bitstream and returns its Index. -func (w *Encoder) Flush() Index { - var sb bytes.Buffer // TODO(mdempsky): strings.Builder after #44505 is resolved - - // Backup the data so we write the relocations at the front. - var tmp bytes.Buffer - io.Copy(&tmp, &w.Data) - - // TODO(mdempsky): Consider writing these out separately so they're - // easier to strip, along with function bodies, so that we can prune - // down to just the data that's relevant to go/types. - if w.encodingRelocHeader { - panic("encodingRelocHeader already true; recursive flush?") - } - w.encodingRelocHeader = true - w.Sync(SyncRelocs) - w.Len(len(w.Relocs)) - for _, rEnt := range w.Relocs { - w.Sync(SyncReloc) - w.Len(int(rEnt.Kind)) - w.Len(int(rEnt.Idx)) - } - - io.Copy(&sb, &w.Data) - io.Copy(&sb, &tmp) - w.p.elems[w.k][w.Idx] = sb.String() - - return w.Idx -} - -func (w *Encoder) checkErr(err error) { - if err != nil { - errorf("unexpected encoding error: %v", err) - } -} - -func (w *Encoder) rawUvarint(x uint64) { - var buf [binary.MaxVarintLen64]byte - n := binary.PutUvarint(buf[:], x) - _, err := w.Data.Write(buf[:n]) - w.checkErr(err) -} - -func (w *Encoder) rawVarint(x int64) { - // Zig-zag encode. - ux := uint64(x) << 1 - if x < 0 { - ux = ^ux - } - - w.rawUvarint(ux) -} - -func (w *Encoder) rawReloc(r RelocKind, idx Index) int { - e := RelocEnt{r, idx} - if w.RelocMap != nil { - if i, ok := w.RelocMap[e]; ok { - return int(i) - } - } else { - w.RelocMap = make(map[RelocEnt]uint32) - } - - i := len(w.Relocs) - w.RelocMap[e] = uint32(i) - w.Relocs = append(w.Relocs, e) - return i -} - -func (w *Encoder) Sync(m SyncMarker) { - if !w.p.SyncMarkers() { - return - } - - // Writing out stack frame string references requires working - // relocations, but writing out the relocations themselves involves - // sync markers. To prevent infinite recursion, we simply trim the - // stack frame for sync markers within the relocation header. - var frames []string - if !w.encodingRelocHeader && w.p.syncFrames > 0 { - pcs := make([]uintptr, w.p.syncFrames) - n := runtime.Callers(2, pcs) - frames = fmtFrames(pcs[:n]...) - } - - // TODO(mdempsky): Save space by writing out stack frames as a - // linked list so we can share common stack frames. - w.rawUvarint(uint64(m)) - w.rawUvarint(uint64(len(frames))) - for _, frame := range frames { - w.rawUvarint(uint64(w.rawReloc(RelocString, w.p.StringIdx(frame)))) - } -} - -// Bool encodes and writes a bool value into the element bitstream, -// and then returns the bool value. -// -// For simple, 2-alternative encodings, the idiomatic way to call Bool -// is something like: -// -// if w.Bool(x != 0) { -// // alternative #1 -// } else { -// // alternative #2 -// } -// -// For multi-alternative encodings, use Code instead. -func (w *Encoder) Bool(b bool) bool { - w.Sync(SyncBool) - var x byte - if b { - x = 1 - } - err := w.Data.WriteByte(x) - w.checkErr(err) - return b -} - -// Int64 encodes and writes an int64 value into the element bitstream. -func (w *Encoder) Int64(x int64) { - w.Sync(SyncInt64) - w.rawVarint(x) -} - -// Uint64 encodes and writes a uint64 value into the element bitstream. -func (w *Encoder) Uint64(x uint64) { - w.Sync(SyncUint64) - w.rawUvarint(x) -} - -// Len encodes and writes a non-negative int value into the element bitstream. -func (w *Encoder) Len(x int) { assert(x >= 0); w.Uint64(uint64(x)) } - -// Int encodes and writes an int value into the element bitstream. -func (w *Encoder) Int(x int) { w.Int64(int64(x)) } - -// Uint encodes and writes a uint value into the element bitstream. -func (w *Encoder) Uint(x uint) { w.Uint64(uint64(x)) } - -// Reloc encodes and writes a relocation for the given (section, -// index) pair into the element bitstream. -// -// Note: Only the index is formally written into the element -// bitstream, so bitstream decoders must know from context which -// section an encoded relocation refers to. -func (w *Encoder) Reloc(r RelocKind, idx Index) { - w.Sync(SyncUseReloc) - w.Len(w.rawReloc(r, idx)) -} - -// Code encodes and writes a Code value into the element bitstream. -func (w *Encoder) Code(c Code) { - w.Sync(c.Marker()) - w.Len(c.Value()) -} - -// String encodes and writes a string value into the element -// bitstream. -// -// Internally, strings are deduplicated by adding them to the strings -// section (if not already present), and then writing a relocation -// into the element bitstream. -func (w *Encoder) String(s string) { - w.Sync(SyncString) - w.Reloc(RelocString, w.p.StringIdx(s)) -} - -// Strings encodes and writes a variable-length slice of strings into -// the element bitstream. -func (w *Encoder) Strings(ss []string) { - w.Len(len(ss)) - for _, s := range ss { - w.String(s) - } -} - -// Value encodes and writes a constant.Value into the element -// bitstream. -func (w *Encoder) Value(val constant.Value) { - w.Sync(SyncValue) - if w.Bool(val.Kind() == constant.Complex) { - w.scalar(constant.Real(val)) - w.scalar(constant.Imag(val)) - } else { - w.scalar(val) - } -} - -func (w *Encoder) scalar(val constant.Value) { - switch v := constant.Val(val).(type) { - default: - errorf("unhandled %v (%v)", val, val.Kind()) - case bool: - w.Code(ValBool) - w.Bool(v) - case string: - w.Code(ValString) - w.String(v) - case int64: - w.Code(ValInt64) - w.Int64(v) - case *big.Int: - w.Code(ValBigInt) - w.bigInt(v) - case *big.Rat: - w.Code(ValBigRat) - w.bigInt(v.Num()) - w.bigInt(v.Denom()) - case *big.Float: - w.Code(ValBigFloat) - w.bigFloat(v) - } -} - -func (w *Encoder) bigInt(v *big.Int) { - b := v.Bytes() - w.String(string(b)) // TODO: More efficient encoding. - w.Bool(v.Sign() < 0) -} - -func (w *Encoder) bigFloat(v *big.Float) { - b := v.Append(nil, 'p', -1) - w.String(string(b)) // TODO: More efficient encoding. -} diff --git a/go/extractor/vendor/golang.org/x/tools/internal/pkgbits/flags.go b/go/extractor/vendor/golang.org/x/tools/internal/pkgbits/flags.go deleted file mode 100644 index 654222745fac..000000000000 --- a/go/extractor/vendor/golang.org/x/tools/internal/pkgbits/flags.go +++ /dev/null @@ -1,9 +0,0 @@ -// Copyright 2022 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package pkgbits - -const ( - flagSyncMarkers = 1 << iota // file format contains sync markers -) diff --git a/go/extractor/vendor/golang.org/x/tools/internal/pkgbits/frames_go1.go b/go/extractor/vendor/golang.org/x/tools/internal/pkgbits/frames_go1.go deleted file mode 100644 index 5294f6a63edd..000000000000 --- a/go/extractor/vendor/golang.org/x/tools/internal/pkgbits/frames_go1.go +++ /dev/null @@ -1,21 +0,0 @@ -// Copyright 2021 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -//go:build !go1.7 -// +build !go1.7 - -// TODO(mdempsky): Remove after #44505 is resolved - -package pkgbits - -import "runtime" - -func walkFrames(pcs []uintptr, visit frameVisitor) { - for _, pc := range pcs { - fn := runtime.FuncForPC(pc) - file, line := fn.FileLine(pc) - - visit(file, line, fn.Name(), pc-fn.Entry()) - } -} diff --git a/go/extractor/vendor/golang.org/x/tools/internal/pkgbits/frames_go17.go b/go/extractor/vendor/golang.org/x/tools/internal/pkgbits/frames_go17.go deleted file mode 100644 index 2324ae7adfe2..000000000000 --- a/go/extractor/vendor/golang.org/x/tools/internal/pkgbits/frames_go17.go +++ /dev/null @@ -1,28 +0,0 @@ -// Copyright 2021 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -//go:build go1.7 -// +build go1.7 - -package pkgbits - -import "runtime" - -// walkFrames calls visit for each call frame represented by pcs. -// -// pcs should be a slice of PCs, as returned by runtime.Callers. -func walkFrames(pcs []uintptr, visit frameVisitor) { - if len(pcs) == 0 { - return - } - - frames := runtime.CallersFrames(pcs) - for { - frame, more := frames.Next() - visit(frame.File, frame.Line, frame.Function, frame.PC-frame.Entry) - if !more { - return - } - } -} diff --git a/go/extractor/vendor/golang.org/x/tools/internal/pkgbits/reloc.go b/go/extractor/vendor/golang.org/x/tools/internal/pkgbits/reloc.go deleted file mode 100644 index fcdfb97ca992..000000000000 --- a/go/extractor/vendor/golang.org/x/tools/internal/pkgbits/reloc.go +++ /dev/null @@ -1,42 +0,0 @@ -// Copyright 2021 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package pkgbits - -// A RelocKind indicates a particular section within a unified IR export. -type RelocKind int32 - -// An Index represents a bitstream element index within a particular -// section. -type Index int32 - -// A relocEnt (relocation entry) is an entry in an element's local -// reference table. -// -// TODO(mdempsky): Rename this too. -type RelocEnt struct { - Kind RelocKind - Idx Index -} - -// Reserved indices within the meta relocation section. -const ( - PublicRootIdx Index = 0 - PrivateRootIdx Index = 1 -) - -const ( - RelocString RelocKind = iota - RelocMeta - RelocPosBase - RelocPkg - RelocName - RelocType - RelocObj - RelocObjExt - RelocObjDict - RelocBody - - numRelocs = iota -) diff --git a/go/extractor/vendor/golang.org/x/tools/internal/pkgbits/support.go b/go/extractor/vendor/golang.org/x/tools/internal/pkgbits/support.go deleted file mode 100644 index ad26d3b28cae..000000000000 --- a/go/extractor/vendor/golang.org/x/tools/internal/pkgbits/support.go +++ /dev/null @@ -1,17 +0,0 @@ -// Copyright 2022 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package pkgbits - -import "fmt" - -func assert(b bool) { - if !b { - panic("assertion failed") - } -} - -func errorf(format string, args ...interface{}) { - panic(fmt.Errorf(format, args...)) -} diff --git a/go/extractor/vendor/golang.org/x/tools/internal/pkgbits/sync.go b/go/extractor/vendor/golang.org/x/tools/internal/pkgbits/sync.go deleted file mode 100644 index 5bd51ef71700..000000000000 --- a/go/extractor/vendor/golang.org/x/tools/internal/pkgbits/sync.go +++ /dev/null @@ -1,113 +0,0 @@ -// Copyright 2021 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package pkgbits - -import ( - "fmt" - "strings" -) - -// fmtFrames formats a backtrace for reporting reader/writer desyncs. -func fmtFrames(pcs ...uintptr) []string { - res := make([]string, 0, len(pcs)) - walkFrames(pcs, func(file string, line int, name string, offset uintptr) { - // Trim package from function name. It's just redundant noise. - name = strings.TrimPrefix(name, "cmd/compile/internal/noder.") - - res = append(res, fmt.Sprintf("%s:%v: %s +0x%v", file, line, name, offset)) - }) - return res -} - -type frameVisitor func(file string, line int, name string, offset uintptr) - -// SyncMarker is an enum type that represents markers that may be -// written to export data to ensure the reader and writer stay -// synchronized. -type SyncMarker int - -//go:generate stringer -type=SyncMarker -trimprefix=Sync - -const ( - _ SyncMarker = iota - - // Public markers (known to go/types importers). - - // Low-level coding markers. - SyncEOF - SyncBool - SyncInt64 - SyncUint64 - SyncString - SyncValue - SyncVal - SyncRelocs - SyncReloc - SyncUseReloc - - // Higher-level object and type markers. - SyncPublic - SyncPos - SyncPosBase - SyncObject - SyncObject1 - SyncPkg - SyncPkgDef - SyncMethod - SyncType - SyncTypeIdx - SyncTypeParamNames - SyncSignature - SyncParams - SyncParam - SyncCodeObj - SyncSym - SyncLocalIdent - SyncSelector - - // Private markers (only known to cmd/compile). - SyncPrivate - - SyncFuncExt - SyncVarExt - SyncTypeExt - SyncPragma - - SyncExprList - SyncExprs - SyncExpr - SyncExprType - SyncAssign - SyncOp - SyncFuncLit - SyncCompLit - - SyncDecl - SyncFuncBody - SyncOpenScope - SyncCloseScope - SyncCloseAnotherScope - SyncDeclNames - SyncDeclName - - SyncStmts - SyncBlockStmt - SyncIfStmt - SyncForStmt - SyncSwitchStmt - SyncRangeStmt - SyncCaseClause - SyncCommClause - SyncSelectStmt - SyncDecls - SyncLabeledStmt - SyncUseObjLocal - SyncAddLocal - SyncLinkname - SyncStmt1 - SyncStmtsEnd - SyncLabel - SyncOptLabel -) diff --git a/go/extractor/vendor/golang.org/x/tools/internal/pkgbits/syncmarker_string.go b/go/extractor/vendor/golang.org/x/tools/internal/pkgbits/syncmarker_string.go deleted file mode 100644 index 4a5b0ca5f2ff..000000000000 --- a/go/extractor/vendor/golang.org/x/tools/internal/pkgbits/syncmarker_string.go +++ /dev/null @@ -1,89 +0,0 @@ -// Code generated by "stringer -type=SyncMarker -trimprefix=Sync"; DO NOT EDIT. - -package pkgbits - -import "strconv" - -func _() { - // An "invalid array index" compiler error signifies that the constant values have changed. - // Re-run the stringer command to generate them again. - var x [1]struct{} - _ = x[SyncEOF-1] - _ = x[SyncBool-2] - _ = x[SyncInt64-3] - _ = x[SyncUint64-4] - _ = x[SyncString-5] - _ = x[SyncValue-6] - _ = x[SyncVal-7] - _ = x[SyncRelocs-8] - _ = x[SyncReloc-9] - _ = x[SyncUseReloc-10] - _ = x[SyncPublic-11] - _ = x[SyncPos-12] - _ = x[SyncPosBase-13] - _ = x[SyncObject-14] - _ = x[SyncObject1-15] - _ = x[SyncPkg-16] - _ = x[SyncPkgDef-17] - _ = x[SyncMethod-18] - _ = x[SyncType-19] - _ = x[SyncTypeIdx-20] - _ = x[SyncTypeParamNames-21] - _ = x[SyncSignature-22] - _ = x[SyncParams-23] - _ = x[SyncParam-24] - _ = x[SyncCodeObj-25] - _ = x[SyncSym-26] - _ = x[SyncLocalIdent-27] - _ = x[SyncSelector-28] - _ = x[SyncPrivate-29] - _ = x[SyncFuncExt-30] - _ = x[SyncVarExt-31] - _ = x[SyncTypeExt-32] - _ = x[SyncPragma-33] - _ = x[SyncExprList-34] - _ = x[SyncExprs-35] - _ = x[SyncExpr-36] - _ = x[SyncExprType-37] - _ = x[SyncAssign-38] - _ = x[SyncOp-39] - _ = x[SyncFuncLit-40] - _ = x[SyncCompLit-41] - _ = x[SyncDecl-42] - _ = x[SyncFuncBody-43] - _ = x[SyncOpenScope-44] - _ = x[SyncCloseScope-45] - _ = x[SyncCloseAnotherScope-46] - _ = x[SyncDeclNames-47] - _ = x[SyncDeclName-48] - _ = x[SyncStmts-49] - _ = x[SyncBlockStmt-50] - _ = x[SyncIfStmt-51] - _ = x[SyncForStmt-52] - _ = x[SyncSwitchStmt-53] - _ = x[SyncRangeStmt-54] - _ = x[SyncCaseClause-55] - _ = x[SyncCommClause-56] - _ = x[SyncSelectStmt-57] - _ = x[SyncDecls-58] - _ = x[SyncLabeledStmt-59] - _ = x[SyncUseObjLocal-60] - _ = x[SyncAddLocal-61] - _ = x[SyncLinkname-62] - _ = x[SyncStmt1-63] - _ = x[SyncStmtsEnd-64] - _ = x[SyncLabel-65] - _ = x[SyncOptLabel-66] -} - -const _SyncMarker_name = "EOFBoolInt64Uint64StringValueValRelocsRelocUseRelocPublicPosPosBaseObjectObject1PkgPkgDefMethodTypeTypeIdxTypeParamNamesSignatureParamsParamCodeObjSymLocalIdentSelectorPrivateFuncExtVarExtTypeExtPragmaExprListExprsExprExprTypeAssignOpFuncLitCompLitDeclFuncBodyOpenScopeCloseScopeCloseAnotherScopeDeclNamesDeclNameStmtsBlockStmtIfStmtForStmtSwitchStmtRangeStmtCaseClauseCommClauseSelectStmtDeclsLabeledStmtUseObjLocalAddLocalLinknameStmt1StmtsEndLabelOptLabel" - -var _SyncMarker_index = [...]uint16{0, 3, 7, 12, 18, 24, 29, 32, 38, 43, 51, 57, 60, 67, 73, 80, 83, 89, 95, 99, 106, 120, 129, 135, 140, 147, 150, 160, 168, 175, 182, 188, 195, 201, 209, 214, 218, 226, 232, 234, 241, 248, 252, 260, 269, 279, 296, 305, 313, 318, 327, 333, 340, 350, 359, 369, 379, 389, 394, 405, 416, 424, 432, 437, 445, 450, 458} - -func (i SyncMarker) String() string { - i -= 1 - if i < 0 || i >= SyncMarker(len(_SyncMarker_index)-1) { - return "SyncMarker(" + strconv.FormatInt(int64(i+1), 10) + ")" - } - return _SyncMarker_name[_SyncMarker_index[i]:_SyncMarker_index[i+1]] -} diff --git a/go/extractor/vendor/golang.org/x/tools/internal/tokeninternal/BUILD.bazel b/go/extractor/vendor/golang.org/x/tools/internal/tokeninternal/BUILD.bazel deleted file mode 100644 index c0f6cc8fb13a..000000000000 --- a/go/extractor/vendor/golang.org/x/tools/internal/tokeninternal/BUILD.bazel +++ /dev/null @@ -1,11 +0,0 @@ -# generated running `bazel run //go/gazelle`, do not edit - -load("@rules_go//go:def.bzl", "go_library") - -go_library( - name = "tokeninternal", - srcs = ["tokeninternal.go"], - importmap = "github.com/github/codeql-go/extractor/vendor/golang.org/x/tools/internal/tokeninternal", - importpath = "golang.org/x/tools/internal/tokeninternal", - visibility = ["//go/extractor/vendor/golang.org/x/tools:__subpackages__"], -) diff --git a/go/extractor/vendor/golang.org/x/tools/internal/tokeninternal/tokeninternal.go b/go/extractor/vendor/golang.org/x/tools/internal/tokeninternal/tokeninternal.go deleted file mode 100644 index 7e638ec24fcb..000000000000 --- a/go/extractor/vendor/golang.org/x/tools/internal/tokeninternal/tokeninternal.go +++ /dev/null @@ -1,151 +0,0 @@ -// Copyright 2023 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// package tokeninternal provides access to some internal features of the token -// package. -package tokeninternal - -import ( - "fmt" - "go/token" - "sort" - "sync" - "unsafe" -) - -// GetLines returns the table of line-start offsets from a token.File. -func GetLines(file *token.File) []int { - // token.File has a Lines method on Go 1.21 and later. - if file, ok := (interface{})(file).(interface{ Lines() []int }); ok { - return file.Lines() - } - - // This declaration must match that of token.File. - // This creates a risk of dependency skew. - // For now we check that the size of the two - // declarations is the same, on the (fragile) assumption - // that future changes would add fields. - type tokenFile119 struct { - _ string - _ int - _ int - mu sync.Mutex // we're not complete monsters - lines []int - _ []struct{} - } - type tokenFile118 struct { - _ *token.FileSet // deleted in go1.19 - tokenFile119 - } - - type uP = unsafe.Pointer - switch unsafe.Sizeof(*file) { - case unsafe.Sizeof(tokenFile118{}): - var ptr *tokenFile118 - *(*uP)(uP(&ptr)) = uP(file) - ptr.mu.Lock() - defer ptr.mu.Unlock() - return ptr.lines - - case unsafe.Sizeof(tokenFile119{}): - var ptr *tokenFile119 - *(*uP)(uP(&ptr)) = uP(file) - ptr.mu.Lock() - defer ptr.mu.Unlock() - return ptr.lines - - default: - panic("unexpected token.File size") - } -} - -// AddExistingFiles adds the specified files to the FileSet if they -// are not already present. It panics if any pair of files in the -// resulting FileSet would overlap. -func AddExistingFiles(fset *token.FileSet, files []*token.File) { - // Punch through the FileSet encapsulation. - type tokenFileSet struct { - // This type remained essentially consistent from go1.16 to go1.21. - mutex sync.RWMutex - base int - files []*token.File - _ *token.File // changed to atomic.Pointer[token.File] in go1.19 - } - - // If the size of token.FileSet changes, this will fail to compile. - const delta = int64(unsafe.Sizeof(tokenFileSet{})) - int64(unsafe.Sizeof(token.FileSet{})) - var _ [-delta * delta]int - - type uP = unsafe.Pointer - var ptr *tokenFileSet - *(*uP)(uP(&ptr)) = uP(fset) - ptr.mutex.Lock() - defer ptr.mutex.Unlock() - - // Merge and sort. - newFiles := append(ptr.files, files...) - sort.Slice(newFiles, func(i, j int) bool { - return newFiles[i].Base() < newFiles[j].Base() - }) - - // Reject overlapping files. - // Discard adjacent identical files. - out := newFiles[:0] - for i, file := range newFiles { - if i > 0 { - prev := newFiles[i-1] - if file == prev { - continue - } - if prev.Base()+prev.Size()+1 > file.Base() { - panic(fmt.Sprintf("file %s (%d-%d) overlaps with file %s (%d-%d)", - prev.Name(), prev.Base(), prev.Base()+prev.Size(), - file.Name(), file.Base(), file.Base()+file.Size())) - } - } - out = append(out, file) - } - newFiles = out - - ptr.files = newFiles - - // Advance FileSet.Base(). - if len(newFiles) > 0 { - last := newFiles[len(newFiles)-1] - newBase := last.Base() + last.Size() + 1 - if ptr.base < newBase { - ptr.base = newBase - } - } -} - -// FileSetFor returns a new FileSet containing a sequence of new Files with -// the same base, size, and line as the input files, for use in APIs that -// require a FileSet. -// -// Precondition: the input files must be non-overlapping, and sorted in order -// of their Base. -func FileSetFor(files ...*token.File) *token.FileSet { - fset := token.NewFileSet() - for _, f := range files { - f2 := fset.AddFile(f.Name(), f.Base(), f.Size()) - lines := GetLines(f) - f2.SetLines(lines) - } - return fset -} - -// CloneFileSet creates a new FileSet holding all files in fset. It does not -// create copies of the token.Files in fset: they are added to the resulting -// FileSet unmodified. -func CloneFileSet(fset *token.FileSet) *token.FileSet { - var files []*token.File - fset.Iterate(func(f *token.File) bool { - files = append(files, f) - return true - }) - newFileSet := token.NewFileSet() - AddExistingFiles(newFileSet, files) - return newFileSet -} diff --git a/go/extractor/vendor/golang.org/x/tools/internal/typeparams/BUILD.bazel b/go/extractor/vendor/golang.org/x/tools/internal/typeparams/BUILD.bazel deleted file mode 100644 index 9c2dc20b6c65..000000000000 --- a/go/extractor/vendor/golang.org/x/tools/internal/typeparams/BUILD.bazel +++ /dev/null @@ -1,17 +0,0 @@ -# generated running `bazel run //go/gazelle`, do not edit - -load("@rules_go//go:def.bzl", "go_library") - -go_library( - name = "typeparams", - srcs = [ - "common.go", - "coretype.go", - "normalize.go", - "termlist.go", - "typeterm.go", - ], - importmap = "github.com/github/codeql-go/extractor/vendor/golang.org/x/tools/internal/typeparams", - importpath = "golang.org/x/tools/internal/typeparams", - visibility = ["//go/extractor/vendor/golang.org/x/tools:__subpackages__"], -) diff --git a/go/extractor/vendor/golang.org/x/tools/internal/typeparams/common.go b/go/extractor/vendor/golang.org/x/tools/internal/typeparams/common.go deleted file mode 100644 index cdab9885314f..000000000000 --- a/go/extractor/vendor/golang.org/x/tools/internal/typeparams/common.go +++ /dev/null @@ -1,204 +0,0 @@ -// Copyright 2021 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Package typeparams contains common utilities for writing tools that interact -// with generic Go code, as introduced with Go 1.18. -// -// Many of the types and functions in this package are proxies for the new APIs -// introduced in the standard library with Go 1.18. For example, the -// typeparams.Union type is an alias for go/types.Union, and the ForTypeSpec -// function returns the value of the go/ast.TypeSpec.TypeParams field. At Go -// versions older than 1.18 these helpers are implemented as stubs, allowing -// users of this package to write code that handles generic constructs inline, -// even if the Go version being used to compile does not support generics. -// -// Additionally, this package contains common utilities for working with the -// new generic constructs, to supplement the standard library APIs. Notably, -// the StructuralTerms API computes a minimal representation of the structural -// restrictions on a type parameter. -// -// An external version of these APIs is available in the -// golang.org/x/exp/typeparams module. -package typeparams - -import ( - "fmt" - "go/ast" - "go/token" - "go/types" -) - -// UnpackIndexExpr extracts data from AST nodes that represent index -// expressions. -// -// For an ast.IndexExpr, the resulting indices slice will contain exactly one -// index expression. For an ast.IndexListExpr (go1.18+), it may have a variable -// number of index expressions. -// -// For nodes that don't represent index expressions, the first return value of -// UnpackIndexExpr will be nil. -func UnpackIndexExpr(n ast.Node) (x ast.Expr, lbrack token.Pos, indices []ast.Expr, rbrack token.Pos) { - switch e := n.(type) { - case *ast.IndexExpr: - return e.X, e.Lbrack, []ast.Expr{e.Index}, e.Rbrack - case *ast.IndexListExpr: - return e.X, e.Lbrack, e.Indices, e.Rbrack - } - return nil, token.NoPos, nil, token.NoPos -} - -// PackIndexExpr returns an *ast.IndexExpr or *ast.IndexListExpr, depending on -// the cardinality of indices. Calling PackIndexExpr with len(indices) == 0 -// will panic. -func PackIndexExpr(x ast.Expr, lbrack token.Pos, indices []ast.Expr, rbrack token.Pos) ast.Expr { - switch len(indices) { - case 0: - panic("empty indices") - case 1: - return &ast.IndexExpr{ - X: x, - Lbrack: lbrack, - Index: indices[0], - Rbrack: rbrack, - } - default: - return &ast.IndexListExpr{ - X: x, - Lbrack: lbrack, - Indices: indices, - Rbrack: rbrack, - } - } -} - -// IsTypeParam reports whether t is a type parameter. -func IsTypeParam(t types.Type) bool { - _, ok := t.(*types.TypeParam) - return ok -} - -// OriginMethod returns the origin method associated with the method fn. -// For methods on a non-generic receiver base type, this is just -// fn. However, for methods with a generic receiver, OriginMethod returns the -// corresponding method in the method set of the origin type. -// -// As a special case, if fn is not a method (has no receiver), OriginMethod -// returns fn. -func OriginMethod(fn *types.Func) *types.Func { - recv := fn.Type().(*types.Signature).Recv() - if recv == nil { - return fn - } - base := recv.Type() - p, isPtr := base.(*types.Pointer) - if isPtr { - base = p.Elem() - } - named, isNamed := base.(*types.Named) - if !isNamed { - // Receiver is a *types.Interface. - return fn - } - if named.TypeParams().Len() == 0 { - // Receiver base has no type parameters, so we can avoid the lookup below. - return fn - } - orig := named.Origin() - gfn, _, _ := types.LookupFieldOrMethod(orig, true, fn.Pkg(), fn.Name()) - - // This is a fix for a gopls crash (#60628) due to a go/types bug (#60634). In: - // package p - // type T *int - // func (*T) f() {} - // LookupFieldOrMethod(T, true, p, f)=nil, but NewMethodSet(*T)={(*T).f}. - // Here we make them consistent by force. - // (The go/types bug is general, but this workaround is reached only - // for generic T thanks to the early return above.) - if gfn == nil { - mset := types.NewMethodSet(types.NewPointer(orig)) - for i := 0; i < mset.Len(); i++ { - m := mset.At(i) - if m.Obj().Id() == fn.Id() { - gfn = m.Obj() - break - } - } - } - - // In golang/go#61196, we observe another crash, this time inexplicable. - if gfn == nil { - panic(fmt.Sprintf("missing origin method for %s.%s; named == origin: %t, named.NumMethods(): %d, origin.NumMethods(): %d", named, fn, named == orig, named.NumMethods(), orig.NumMethods())) - } - - return gfn.(*types.Func) -} - -// GenericAssignableTo is a generalization of types.AssignableTo that -// implements the following rule for uninstantiated generic types: -// -// If V and T are generic named types, then V is considered assignable to T if, -// for every possible instantation of V[A_1, ..., A_N], the instantiation -// T[A_1, ..., A_N] is valid and V[A_1, ..., A_N] implements T[A_1, ..., A_N]. -// -// If T has structural constraints, they must be satisfied by V. -// -// For example, consider the following type declarations: -// -// type Interface[T any] interface { -// Accept(T) -// } -// -// type Container[T any] struct { -// Element T -// } -// -// func (c Container[T]) Accept(t T) { c.Element = t } -// -// In this case, GenericAssignableTo reports that instantiations of Container -// are assignable to the corresponding instantiation of Interface. -func GenericAssignableTo(ctxt *types.Context, V, T types.Type) bool { - // If V and T are not both named, or do not have matching non-empty type - // parameter lists, fall back on types.AssignableTo. - - VN, Vnamed := V.(*types.Named) - TN, Tnamed := T.(*types.Named) - if !Vnamed || !Tnamed { - return types.AssignableTo(V, T) - } - - vtparams := VN.TypeParams() - ttparams := TN.TypeParams() - if vtparams.Len() == 0 || vtparams.Len() != ttparams.Len() || VN.TypeArgs().Len() != 0 || TN.TypeArgs().Len() != 0 { - return types.AssignableTo(V, T) - } - - // V and T have the same (non-zero) number of type params. Instantiate both - // with the type parameters of V. This must always succeed for V, and will - // succeed for T if and only if the type set of each type parameter of V is a - // subset of the type set of the corresponding type parameter of T, meaning - // that every instantiation of V corresponds to a valid instantiation of T. - - // Minor optimization: ensure we share a context across the two - // instantiations below. - if ctxt == nil { - ctxt = types.NewContext() - } - - var targs []types.Type - for i := 0; i < vtparams.Len(); i++ { - targs = append(targs, vtparams.At(i)) - } - - vinst, err := types.Instantiate(ctxt, V, targs, true) - if err != nil { - panic("type parameters should satisfy their own constraints") - } - - tinst, err := types.Instantiate(ctxt, T, targs, true) - if err != nil { - return false - } - - return types.AssignableTo(vinst, tinst) -} diff --git a/go/extractor/vendor/golang.org/x/tools/internal/typeparams/coretype.go b/go/extractor/vendor/golang.org/x/tools/internal/typeparams/coretype.go deleted file mode 100644 index 7ea8840eab7c..000000000000 --- a/go/extractor/vendor/golang.org/x/tools/internal/typeparams/coretype.go +++ /dev/null @@ -1,122 +0,0 @@ -// Copyright 2022 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package typeparams - -import ( - "go/types" -) - -// CoreType returns the core type of T or nil if T does not have a core type. -// -// See https://go.dev/ref/spec#Core_types for the definition of a core type. -func CoreType(T types.Type) types.Type { - U := T.Underlying() - if _, ok := U.(*types.Interface); !ok { - return U // for non-interface types, - } - - terms, err := _NormalTerms(U) - if len(terms) == 0 || err != nil { - // len(terms) -> empty type set of interface. - // err != nil => U is invalid, exceeds complexity bounds, or has an empty type set. - return nil // no core type. - } - - U = terms[0].Type().Underlying() - var identical int // i in [0,identical) => Identical(U, terms[i].Type().Underlying()) - for identical = 1; identical < len(terms); identical++ { - if !types.Identical(U, terms[identical].Type().Underlying()) { - break - } - } - - if identical == len(terms) { - // https://go.dev/ref/spec#Core_types - // "There is a single type U which is the underlying type of all types in the type set of T" - return U - } - ch, ok := U.(*types.Chan) - if !ok { - return nil // no core type as identical < len(terms) and U is not a channel. - } - // https://go.dev/ref/spec#Core_types - // "the type chan E if T contains only bidirectional channels, or the type chan<- E or - // <-chan E depending on the direction of the directional channels present." - for chans := identical; chans < len(terms); chans++ { - curr, ok := terms[chans].Type().Underlying().(*types.Chan) - if !ok { - return nil - } - if !types.Identical(ch.Elem(), curr.Elem()) { - return nil // channel elements are not identical. - } - if ch.Dir() == types.SendRecv { - // ch is bidirectional. We can safely always use curr's direction. - ch = curr - } else if curr.Dir() != types.SendRecv && ch.Dir() != curr.Dir() { - // ch and curr are not bidirectional and not the same direction. - return nil - } - } - return ch -} - -// _NormalTerms returns a slice of terms representing the normalized structural -// type restrictions of a type, if any. -// -// For all types other than *types.TypeParam, *types.Interface, and -// *types.Union, this is just a single term with Tilde() == false and -// Type() == typ. For *types.TypeParam, *types.Interface, and *types.Union, see -// below. -// -// Structural type restrictions of a type parameter are created via -// non-interface types embedded in its constraint interface (directly, or via a -// chain of interface embeddings). For example, in the declaration type -// T[P interface{~int; m()}] int the structural restriction of the type -// parameter P is ~int. -// -// With interface embedding and unions, the specification of structural type -// restrictions may be arbitrarily complex. For example, consider the -// following: -// -// type A interface{ ~string|~[]byte } -// -// type B interface{ int|string } -// -// type C interface { ~string|~int } -// -// type T[P interface{ A|B; C }] int -// -// In this example, the structural type restriction of P is ~string|int: A|B -// expands to ~string|~[]byte|int|string, which reduces to ~string|~[]byte|int, -// which when intersected with C (~string|~int) yields ~string|int. -// -// _NormalTerms computes these expansions and reductions, producing a -// "normalized" form of the embeddings. A structural restriction is normalized -// if it is a single union containing no interface terms, and is minimal in the -// sense that removing any term changes the set of types satisfying the -// constraint. It is left as a proof for the reader that, modulo sorting, there -// is exactly one such normalized form. -// -// Because the minimal representation always takes this form, _NormalTerms -// returns a slice of tilde terms corresponding to the terms of the union in -// the normalized structural restriction. An error is returned if the type is -// invalid, exceeds complexity bounds, or has an empty type set. In the latter -// case, _NormalTerms returns ErrEmptyTypeSet. -// -// _NormalTerms makes no guarantees about the order of terms, except that it -// is deterministic. -func _NormalTerms(typ types.Type) ([]*types.Term, error) { - switch typ := typ.(type) { - case *types.TypeParam: - return StructuralTerms(typ) - case *types.Union: - return UnionTermSet(typ) - case *types.Interface: - return InterfaceTermSet(typ) - default: - return []*types.Term{types.NewTerm(false, typ)}, nil - } -} diff --git a/go/extractor/vendor/golang.org/x/tools/internal/typeparams/normalize.go b/go/extractor/vendor/golang.org/x/tools/internal/typeparams/normalize.go deleted file mode 100644 index 93c80fdc96ce..000000000000 --- a/go/extractor/vendor/golang.org/x/tools/internal/typeparams/normalize.go +++ /dev/null @@ -1,218 +0,0 @@ -// Copyright 2021 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package typeparams - -import ( - "errors" - "fmt" - "go/types" - "os" - "strings" -) - -//go:generate go run copytermlist.go - -const debug = false - -var ErrEmptyTypeSet = errors.New("empty type set") - -// StructuralTerms returns a slice of terms representing the normalized -// structural type restrictions of a type parameter, if any. -// -// Structural type restrictions of a type parameter are created via -// non-interface types embedded in its constraint interface (directly, or via a -// chain of interface embeddings). For example, in the declaration -// -// type T[P interface{~int; m()}] int -// -// the structural restriction of the type parameter P is ~int. -// -// With interface embedding and unions, the specification of structural type -// restrictions may be arbitrarily complex. For example, consider the -// following: -// -// type A interface{ ~string|~[]byte } -// -// type B interface{ int|string } -// -// type C interface { ~string|~int } -// -// type T[P interface{ A|B; C }] int -// -// In this example, the structural type restriction of P is ~string|int: A|B -// expands to ~string|~[]byte|int|string, which reduces to ~string|~[]byte|int, -// which when intersected with C (~string|~int) yields ~string|int. -// -// StructuralTerms computes these expansions and reductions, producing a -// "normalized" form of the embeddings. A structural restriction is normalized -// if it is a single union containing no interface terms, and is minimal in the -// sense that removing any term changes the set of types satisfying the -// constraint. It is left as a proof for the reader that, modulo sorting, there -// is exactly one such normalized form. -// -// Because the minimal representation always takes this form, StructuralTerms -// returns a slice of tilde terms corresponding to the terms of the union in -// the normalized structural restriction. An error is returned if the -// constraint interface is invalid, exceeds complexity bounds, or has an empty -// type set. In the latter case, StructuralTerms returns ErrEmptyTypeSet. -// -// StructuralTerms makes no guarantees about the order of terms, except that it -// is deterministic. -func StructuralTerms(tparam *types.TypeParam) ([]*types.Term, error) { - constraint := tparam.Constraint() - if constraint == nil { - return nil, fmt.Errorf("%s has nil constraint", tparam) - } - iface, _ := constraint.Underlying().(*types.Interface) - if iface == nil { - return nil, fmt.Errorf("constraint is %T, not *types.Interface", constraint.Underlying()) - } - return InterfaceTermSet(iface) -} - -// InterfaceTermSet computes the normalized terms for a constraint interface, -// returning an error if the term set cannot be computed or is empty. In the -// latter case, the error will be ErrEmptyTypeSet. -// -// See the documentation of StructuralTerms for more information on -// normalization. -func InterfaceTermSet(iface *types.Interface) ([]*types.Term, error) { - return computeTermSet(iface) -} - -// UnionTermSet computes the normalized terms for a union, returning an error -// if the term set cannot be computed or is empty. In the latter case, the -// error will be ErrEmptyTypeSet. -// -// See the documentation of StructuralTerms for more information on -// normalization. -func UnionTermSet(union *types.Union) ([]*types.Term, error) { - return computeTermSet(union) -} - -func computeTermSet(typ types.Type) ([]*types.Term, error) { - tset, err := computeTermSetInternal(typ, make(map[types.Type]*termSet), 0) - if err != nil { - return nil, err - } - if tset.terms.isEmpty() { - return nil, ErrEmptyTypeSet - } - if tset.terms.isAll() { - return nil, nil - } - var terms []*types.Term - for _, term := range tset.terms { - terms = append(terms, types.NewTerm(term.tilde, term.typ)) - } - return terms, nil -} - -// A termSet holds the normalized set of terms for a given type. -// -// The name termSet is intentionally distinct from 'type set': a type set is -// all types that implement a type (and includes method restrictions), whereas -// a term set just represents the structural restrictions on a type. -type termSet struct { - complete bool - terms termlist -} - -func indentf(depth int, format string, args ...interface{}) { - fmt.Fprintf(os.Stderr, strings.Repeat(".", depth)+format+"\n", args...) -} - -func computeTermSetInternal(t types.Type, seen map[types.Type]*termSet, depth int) (res *termSet, err error) { - if t == nil { - panic("nil type") - } - - if debug { - indentf(depth, "%s", t.String()) - defer func() { - if err != nil { - indentf(depth, "=> %s", err) - } else { - indentf(depth, "=> %s", res.terms.String()) - } - }() - } - - const maxTermCount = 100 - if tset, ok := seen[t]; ok { - if !tset.complete { - return nil, fmt.Errorf("cycle detected in the declaration of %s", t) - } - return tset, nil - } - - // Mark the current type as seen to avoid infinite recursion. - tset := new(termSet) - defer func() { - tset.complete = true - }() - seen[t] = tset - - switch u := t.Underlying().(type) { - case *types.Interface: - // The term set of an interface is the intersection of the term sets of its - // embedded types. - tset.terms = allTermlist - for i := 0; i < u.NumEmbeddeds(); i++ { - embedded := u.EmbeddedType(i) - if _, ok := embedded.Underlying().(*types.TypeParam); ok { - return nil, fmt.Errorf("invalid embedded type %T", embedded) - } - tset2, err := computeTermSetInternal(embedded, seen, depth+1) - if err != nil { - return nil, err - } - tset.terms = tset.terms.intersect(tset2.terms) - } - case *types.Union: - // The term set of a union is the union of term sets of its terms. - tset.terms = nil - for i := 0; i < u.Len(); i++ { - t := u.Term(i) - var terms termlist - switch t.Type().Underlying().(type) { - case *types.Interface: - tset2, err := computeTermSetInternal(t.Type(), seen, depth+1) - if err != nil { - return nil, err - } - terms = tset2.terms - case *types.TypeParam, *types.Union: - // A stand-alone type parameter or union is not permitted as union - // term. - return nil, fmt.Errorf("invalid union term %T", t) - default: - if t.Type() == types.Typ[types.Invalid] { - continue - } - terms = termlist{{t.Tilde(), t.Type()}} - } - tset.terms = tset.terms.union(terms) - if len(tset.terms) > maxTermCount { - return nil, fmt.Errorf("exceeded max term count %d", maxTermCount) - } - } - case *types.TypeParam: - panic("unreachable") - default: - // For all other types, the term set is just a single non-tilde term - // holding the type itself. - if u != types.Typ[types.Invalid] { - tset.terms = termlist{{false, t}} - } - } - return tset, nil -} - -// under is a facade for the go/types internal function of the same name. It is -// used by typeterm.go. -func under(t types.Type) types.Type { - return t.Underlying() -} diff --git a/go/extractor/vendor/golang.org/x/tools/internal/typeparams/termlist.go b/go/extractor/vendor/golang.org/x/tools/internal/typeparams/termlist.go deleted file mode 100644 index cbd12f801314..000000000000 --- a/go/extractor/vendor/golang.org/x/tools/internal/typeparams/termlist.go +++ /dev/null @@ -1,163 +0,0 @@ -// Copyright 2021 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Code generated by copytermlist.go DO NOT EDIT. - -package typeparams - -import ( - "bytes" - "go/types" -) - -// A termlist represents the type set represented by the union -// t1 βˆͺ y2 βˆͺ ... tn of the type sets of the terms t1 to tn. -// A termlist is in normal form if all terms are disjoint. -// termlist operations don't require the operands to be in -// normal form. -type termlist []*term - -// allTermlist represents the set of all types. -// It is in normal form. -var allTermlist = termlist{new(term)} - -// String prints the termlist exactly (without normalization). -func (xl termlist) String() string { - if len(xl) == 0 { - return "βˆ…" - } - var buf bytes.Buffer - for i, x := range xl { - if i > 0 { - buf.WriteString(" | ") - } - buf.WriteString(x.String()) - } - return buf.String() -} - -// isEmpty reports whether the termlist xl represents the empty set of types. -func (xl termlist) isEmpty() bool { - // If there's a non-nil term, the entire list is not empty. - // If the termlist is in normal form, this requires at most - // one iteration. - for _, x := range xl { - if x != nil { - return false - } - } - return true -} - -// isAll reports whether the termlist xl represents the set of all types. -func (xl termlist) isAll() bool { - // If there's a 𝓀 term, the entire list is 𝓀. - // If the termlist is in normal form, this requires at most - // one iteration. - for _, x := range xl { - if x != nil && x.typ == nil { - return true - } - } - return false -} - -// norm returns the normal form of xl. -func (xl termlist) norm() termlist { - // Quadratic algorithm, but good enough for now. - // TODO(gri) fix asymptotic performance - used := make([]bool, len(xl)) - var rl termlist - for i, xi := range xl { - if xi == nil || used[i] { - continue - } - for j := i + 1; j < len(xl); j++ { - xj := xl[j] - if xj == nil || used[j] { - continue - } - if u1, u2 := xi.union(xj); u2 == nil { - // If we encounter a 𝓀 term, the entire list is 𝓀. - // Exit early. - // (Note that this is not just an optimization; - // if we continue, we may end up with a 𝓀 term - // and other terms and the result would not be - // in normal form.) - if u1.typ == nil { - return allTermlist - } - xi = u1 - used[j] = true // xj is now unioned into xi - ignore it in future iterations - } - } - rl = append(rl, xi) - } - return rl -} - -// union returns the union xl βˆͺ yl. -func (xl termlist) union(yl termlist) termlist { - return append(xl, yl...).norm() -} - -// intersect returns the intersection xl ∩ yl. -func (xl termlist) intersect(yl termlist) termlist { - if xl.isEmpty() || yl.isEmpty() { - return nil - } - - // Quadratic algorithm, but good enough for now. - // TODO(gri) fix asymptotic performance - var rl termlist - for _, x := range xl { - for _, y := range yl { - if r := x.intersect(y); r != nil { - rl = append(rl, r) - } - } - } - return rl.norm() -} - -// equal reports whether xl and yl represent the same type set. -func (xl termlist) equal(yl termlist) bool { - // TODO(gri) this should be more efficient - return xl.subsetOf(yl) && yl.subsetOf(xl) -} - -// includes reports whether t ∈ xl. -func (xl termlist) includes(t types.Type) bool { - for _, x := range xl { - if x.includes(t) { - return true - } - } - return false -} - -// supersetOf reports whether y βŠ† xl. -func (xl termlist) supersetOf(y *term) bool { - for _, x := range xl { - if y.subsetOf(x) { - return true - } - } - return false -} - -// subsetOf reports whether xl βŠ† yl. -func (xl termlist) subsetOf(yl termlist) bool { - if yl.isEmpty() { - return xl.isEmpty() - } - - // each term x of xl must be a subset of yl - for _, x := range xl { - if !yl.supersetOf(x) { - return false // x is not a subset yl - } - } - return true -} diff --git a/go/extractor/vendor/golang.org/x/tools/internal/typeparams/typeterm.go b/go/extractor/vendor/golang.org/x/tools/internal/typeparams/typeterm.go deleted file mode 100644 index 7350bb702a17..000000000000 --- a/go/extractor/vendor/golang.org/x/tools/internal/typeparams/typeterm.go +++ /dev/null @@ -1,169 +0,0 @@ -// Copyright 2021 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Code generated by copytermlist.go DO NOT EDIT. - -package typeparams - -import "go/types" - -// A term describes elementary type sets: -// -// βˆ…: (*term)(nil) == βˆ… // set of no types (empty set) -// 𝓀: &term{} == 𝓀 // set of all types (𝓀niverse) -// T: &term{false, T} == {T} // set of type T -// ~t: &term{true, t} == {t' | under(t') == t} // set of types with underlying type t -type term struct { - tilde bool // valid if typ != nil - typ types.Type -} - -func (x *term) String() string { - switch { - case x == nil: - return "βˆ…" - case x.typ == nil: - return "𝓀" - case x.tilde: - return "~" + x.typ.String() - default: - return x.typ.String() - } -} - -// equal reports whether x and y represent the same type set. -func (x *term) equal(y *term) bool { - // easy cases - switch { - case x == nil || y == nil: - return x == y - case x.typ == nil || y.typ == nil: - return x.typ == y.typ - } - // βˆ… βŠ‚ x, y βŠ‚ 𝓀 - - return x.tilde == y.tilde && types.Identical(x.typ, y.typ) -} - -// union returns the union x βˆͺ y: zero, one, or two non-nil terms. -func (x *term) union(y *term) (_, _ *term) { - // easy cases - switch { - case x == nil && y == nil: - return nil, nil // βˆ… βˆͺ βˆ… == βˆ… - case x == nil: - return y, nil // βˆ… βˆͺ y == y - case y == nil: - return x, nil // x βˆͺ βˆ… == x - case x.typ == nil: - return x, nil // 𝓀 βˆͺ y == 𝓀 - case y.typ == nil: - return y, nil // x βˆͺ 𝓀 == 𝓀 - } - // βˆ… βŠ‚ x, y βŠ‚ 𝓀 - - if x.disjoint(y) { - return x, y // x βˆͺ y == (x, y) if x ∩ y == βˆ… - } - // x.typ == y.typ - - // ~t βˆͺ ~t == ~t - // ~t βˆͺ T == ~t - // T βˆͺ ~t == ~t - // T βˆͺ T == T - if x.tilde || !y.tilde { - return x, nil - } - return y, nil -} - -// intersect returns the intersection x ∩ y. -func (x *term) intersect(y *term) *term { - // easy cases - switch { - case x == nil || y == nil: - return nil // βˆ… ∩ y == βˆ… and ∩ βˆ… == βˆ… - case x.typ == nil: - return y // 𝓀 ∩ y == y - case y.typ == nil: - return x // x ∩ 𝓀 == x - } - // βˆ… βŠ‚ x, y βŠ‚ 𝓀 - - if x.disjoint(y) { - return nil // x ∩ y == βˆ… if x ∩ y == βˆ… - } - // x.typ == y.typ - - // ~t ∩ ~t == ~t - // ~t ∩ T == T - // T ∩ ~t == T - // T ∩ T == T - if !x.tilde || y.tilde { - return x - } - return y -} - -// includes reports whether t ∈ x. -func (x *term) includes(t types.Type) bool { - // easy cases - switch { - case x == nil: - return false // t ∈ βˆ… == false - case x.typ == nil: - return true // t ∈ 𝓀 == true - } - // βˆ… βŠ‚ x βŠ‚ 𝓀 - - u := t - if x.tilde { - u = under(u) - } - return types.Identical(x.typ, u) -} - -// subsetOf reports whether x βŠ† y. -func (x *term) subsetOf(y *term) bool { - // easy cases - switch { - case x == nil: - return true // βˆ… βŠ† y == true - case y == nil: - return false // x βŠ† βˆ… == false since x != βˆ… - case y.typ == nil: - return true // x βŠ† 𝓀 == true - case x.typ == nil: - return false // 𝓀 βŠ† y == false since y != 𝓀 - } - // βˆ… βŠ‚ x, y βŠ‚ 𝓀 - - if x.disjoint(y) { - return false // x βŠ† y == false if x ∩ y == βˆ… - } - // x.typ == y.typ - - // ~t βŠ† ~t == true - // ~t βŠ† T == false - // T βŠ† ~t == true - // T βŠ† T == true - return !x.tilde || y.tilde -} - -// disjoint reports whether x ∩ y == βˆ…. -// x.typ and y.typ must not be nil. -func (x *term) disjoint(y *term) bool { - if debug && (x.typ == nil || y.typ == nil) { - panic("invalid argument(s)") - } - ux := x.typ - if y.tilde { - ux = under(ux) - } - uy := y.typ - if x.tilde { - uy = under(uy) - } - return !types.Identical(ux, uy) -} diff --git a/go/extractor/vendor/golang.org/x/tools/internal/typesinternal/BUILD.bazel b/go/extractor/vendor/golang.org/x/tools/internal/typesinternal/BUILD.bazel deleted file mode 100644 index 653752ab7152..000000000000 --- a/go/extractor/vendor/golang.org/x/tools/internal/typesinternal/BUILD.bazel +++ /dev/null @@ -1,16 +0,0 @@ -# generated running `bazel run //go/gazelle`, do not edit - -load("@rules_go//go:def.bzl", "go_library") - -go_library( - name = "typesinternal", - srcs = [ - "errorcode.go", - "errorcode_string.go", - "types.go", - "types_118.go", - ], - importmap = "github.com/github/codeql-go/extractor/vendor/golang.org/x/tools/internal/typesinternal", - importpath = "golang.org/x/tools/internal/typesinternal", - visibility = ["//go/extractor/vendor/golang.org/x/tools:__subpackages__"], -) diff --git a/go/extractor/vendor/golang.org/x/tools/internal/typesinternal/errorcode.go b/go/extractor/vendor/golang.org/x/tools/internal/typesinternal/errorcode.go deleted file mode 100644 index 07484073a57d..000000000000 --- a/go/extractor/vendor/golang.org/x/tools/internal/typesinternal/errorcode.go +++ /dev/null @@ -1,1560 +0,0 @@ -// Copyright 2020 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package typesinternal - -//go:generate stringer -type=ErrorCode - -type ErrorCode int - -// This file defines the error codes that can be produced during type-checking. -// Collectively, these codes provide an identifier that may be used to -// implement special handling for certain types of errors. -// -// Error codes should be fine-grained enough that the exact nature of the error -// can be easily determined, but coarse enough that they are not an -// implementation detail of the type checking algorithm. As a rule-of-thumb, -// errors should be considered equivalent if there is a theoretical refactoring -// of the type checker in which they are emitted in exactly one place. For -// example, the type checker emits different error messages for "too many -// arguments" and "too few arguments", but one can imagine an alternative type -// checker where this check instead just emits a single "wrong number of -// arguments", so these errors should have the same code. -// -// Error code names should be as brief as possible while retaining accuracy and -// distinctiveness. In most cases names should start with an adjective -// describing the nature of the error (e.g. "invalid", "unused", "misplaced"), -// and end with a noun identifying the relevant language object. For example, -// "DuplicateDecl" or "InvalidSliceExpr". For brevity, naming follows the -// convention that "bad" implies a problem with syntax, and "invalid" implies a -// problem with types. - -const ( - // InvalidSyntaxTree occurs if an invalid syntax tree is provided - // to the type checker. It should never happen. - InvalidSyntaxTree ErrorCode = -1 -) - -const ( - _ ErrorCode = iota - - // Test is reserved for errors that only apply while in self-test mode. - Test - - /* package names */ - - // BlankPkgName occurs when a package name is the blank identifier "_". - // - // Per the spec: - // "The PackageName must not be the blank identifier." - BlankPkgName - - // MismatchedPkgName occurs when a file's package name doesn't match the - // package name already established by other files. - MismatchedPkgName - - // InvalidPkgUse occurs when a package identifier is used outside of a - // selector expression. - // - // Example: - // import "fmt" - // - // var _ = fmt - InvalidPkgUse - - /* imports */ - - // BadImportPath occurs when an import path is not valid. - BadImportPath - - // BrokenImport occurs when importing a package fails. - // - // Example: - // import "amissingpackage" - BrokenImport - - // ImportCRenamed occurs when the special import "C" is renamed. "C" is a - // pseudo-package, and must not be renamed. - // - // Example: - // import _ "C" - ImportCRenamed - - // UnusedImport occurs when an import is unused. - // - // Example: - // import "fmt" - // - // func main() {} - UnusedImport - - /* initialization */ - - // InvalidInitCycle occurs when an invalid cycle is detected within the - // initialization graph. - // - // Example: - // var x int = f() - // - // func f() int { return x } - InvalidInitCycle - - /* decls */ - - // DuplicateDecl occurs when an identifier is declared multiple times. - // - // Example: - // var x = 1 - // var x = 2 - DuplicateDecl - - // InvalidDeclCycle occurs when a declaration cycle is not valid. - // - // Example: - // import "unsafe" - // - // type T struct { - // a [n]int - // } - // - // var n = unsafe.Sizeof(T{}) - InvalidDeclCycle - - // InvalidTypeCycle occurs when a cycle in type definitions results in a - // type that is not well-defined. - // - // Example: - // import "unsafe" - // - // type T [unsafe.Sizeof(T{})]int - InvalidTypeCycle - - /* decls > const */ - - // InvalidConstInit occurs when a const declaration has a non-constant - // initializer. - // - // Example: - // var x int - // const _ = x - InvalidConstInit - - // InvalidConstVal occurs when a const value cannot be converted to its - // target type. - // - // TODO(findleyr): this error code and example are not very clear. Consider - // removing it. - // - // Example: - // const _ = 1 << "hello" - InvalidConstVal - - // InvalidConstType occurs when the underlying type in a const declaration - // is not a valid constant type. - // - // Example: - // const c *int = 4 - InvalidConstType - - /* decls > var (+ other variable assignment codes) */ - - // UntypedNilUse occurs when the predeclared (untyped) value nil is used to - // initialize a variable declared without an explicit type. - // - // Example: - // var x = nil - UntypedNilUse - - // WrongAssignCount occurs when the number of values on the right-hand side - // of an assignment or or initialization expression does not match the number - // of variables on the left-hand side. - // - // Example: - // var x = 1, 2 - WrongAssignCount - - // UnassignableOperand occurs when the left-hand side of an assignment is - // not assignable. - // - // Example: - // func f() { - // const c = 1 - // c = 2 - // } - UnassignableOperand - - // NoNewVar occurs when a short variable declaration (':=') does not declare - // new variables. - // - // Example: - // func f() { - // x := 1 - // x := 2 - // } - NoNewVar - - // MultiValAssignOp occurs when an assignment operation (+=, *=, etc) does - // not have single-valued left-hand or right-hand side. - // - // Per the spec: - // "In assignment operations, both the left- and right-hand expression lists - // must contain exactly one single-valued expression" - // - // Example: - // func f() int { - // x, y := 1, 2 - // x, y += 1 - // return x + y - // } - MultiValAssignOp - - // InvalidIfaceAssign occurs when a value of type T is used as an - // interface, but T does not implement a method of the expected interface. - // - // Example: - // type I interface { - // f() - // } - // - // type T int - // - // var x I = T(1) - InvalidIfaceAssign - - // InvalidChanAssign occurs when a chan assignment is invalid. - // - // Per the spec, a value x is assignable to a channel type T if: - // "x is a bidirectional channel value, T is a channel type, x's type V and - // T have identical element types, and at least one of V or T is not a - // defined type." - // - // Example: - // type T1 chan int - // type T2 chan int - // - // var x T1 - // // Invalid assignment because both types are named - // var _ T2 = x - InvalidChanAssign - - // IncompatibleAssign occurs when the type of the right-hand side expression - // in an assignment cannot be assigned to the type of the variable being - // assigned. - // - // Example: - // var x []int - // var _ int = x - IncompatibleAssign - - // UnaddressableFieldAssign occurs when trying to assign to a struct field - // in a map value. - // - // Example: - // func f() { - // m := make(map[string]struct{i int}) - // m["foo"].i = 42 - // } - UnaddressableFieldAssign - - /* decls > type (+ other type expression codes) */ - - // NotAType occurs when the identifier used as the underlying type in a type - // declaration or the right-hand side of a type alias does not denote a type. - // - // Example: - // var S = 2 - // - // type T S - NotAType - - // InvalidArrayLen occurs when an array length is not a constant value. - // - // Example: - // var n = 3 - // var _ = [n]int{} - InvalidArrayLen - - // BlankIfaceMethod occurs when a method name is '_'. - // - // Per the spec: - // "The name of each explicitly specified method must be unique and not - // blank." - // - // Example: - // type T interface { - // _(int) - // } - BlankIfaceMethod - - // IncomparableMapKey occurs when a map key type does not support the == and - // != operators. - // - // Per the spec: - // "The comparison operators == and != must be fully defined for operands of - // the key type; thus the key type must not be a function, map, or slice." - // - // Example: - // var x map[T]int - // - // type T []int - IncomparableMapKey - - // InvalidIfaceEmbed occurs when a non-interface type is embedded in an - // interface. - // - // Example: - // type T struct {} - // - // func (T) m() - // - // type I interface { - // T - // } - InvalidIfaceEmbed - - // InvalidPtrEmbed occurs when an embedded field is of the pointer form *T, - // and T itself is itself a pointer, an unsafe.Pointer, or an interface. - // - // Per the spec: - // "An embedded field must be specified as a type name T or as a pointer to - // a non-interface type name *T, and T itself may not be a pointer type." - // - // Example: - // type T *int - // - // type S struct { - // *T - // } - InvalidPtrEmbed - - /* decls > func and method */ - - // BadRecv occurs when a method declaration does not have exactly one - // receiver parameter. - // - // Example: - // func () _() {} - BadRecv - - // InvalidRecv occurs when a receiver type expression is not of the form T - // or *T, or T is a pointer type. - // - // Example: - // type T struct {} - // - // func (**T) m() {} - InvalidRecv - - // DuplicateFieldAndMethod occurs when an identifier appears as both a field - // and method name. - // - // Example: - // type T struct { - // m int - // } - // - // func (T) m() {} - DuplicateFieldAndMethod - - // DuplicateMethod occurs when two methods on the same receiver type have - // the same name. - // - // Example: - // type T struct {} - // func (T) m() {} - // func (T) m(i int) int { return i } - DuplicateMethod - - /* decls > special */ - - // InvalidBlank occurs when a blank identifier is used as a value or type. - // - // Per the spec: - // "The blank identifier may appear as an operand only on the left-hand side - // of an assignment." - // - // Example: - // var x = _ - InvalidBlank - - // InvalidIota occurs when the predeclared identifier iota is used outside - // of a constant declaration. - // - // Example: - // var x = iota - InvalidIota - - // MissingInitBody occurs when an init function is missing its body. - // - // Example: - // func init() - MissingInitBody - - // InvalidInitSig occurs when an init function declares parameters or - // results. - // - // Example: - // func init() int { return 1 } - InvalidInitSig - - // InvalidInitDecl occurs when init is declared as anything other than a - // function. - // - // Example: - // var init = 1 - InvalidInitDecl - - // InvalidMainDecl occurs when main is declared as anything other than a - // function, in a main package. - InvalidMainDecl - - /* exprs */ - - // TooManyValues occurs when a function returns too many values for the - // expression context in which it is used. - // - // Example: - // func ReturnTwo() (int, int) { - // return 1, 2 - // } - // - // var x = ReturnTwo() - TooManyValues - - // NotAnExpr occurs when a type expression is used where a value expression - // is expected. - // - // Example: - // type T struct {} - // - // func f() { - // T - // } - NotAnExpr - - /* exprs > const */ - - // TruncatedFloat occurs when a float constant is truncated to an integer - // value. - // - // Example: - // var _ int = 98.6 - TruncatedFloat - - // NumericOverflow occurs when a numeric constant overflows its target type. - // - // Example: - // var x int8 = 1000 - NumericOverflow - - /* exprs > operation */ - - // UndefinedOp occurs when an operator is not defined for the type(s) used - // in an operation. - // - // Example: - // var c = "a" - "b" - UndefinedOp - - // MismatchedTypes occurs when operand types are incompatible in a binary - // operation. - // - // Example: - // var a = "hello" - // var b = 1 - // var c = a - b - MismatchedTypes - - // DivByZero occurs when a division operation is provable at compile - // time to be a division by zero. - // - // Example: - // const divisor = 0 - // var x int = 1/divisor - DivByZero - - // NonNumericIncDec occurs when an increment or decrement operator is - // applied to a non-numeric value. - // - // Example: - // func f() { - // var c = "c" - // c++ - // } - NonNumericIncDec - - /* exprs > ptr */ - - // UnaddressableOperand occurs when the & operator is applied to an - // unaddressable expression. - // - // Example: - // var x = &1 - UnaddressableOperand - - // InvalidIndirection occurs when a non-pointer value is indirected via the - // '*' operator. - // - // Example: - // var x int - // var y = *x - InvalidIndirection - - /* exprs > [] */ - - // NonIndexableOperand occurs when an index operation is applied to a value - // that cannot be indexed. - // - // Example: - // var x = 1 - // var y = x[1] - NonIndexableOperand - - // InvalidIndex occurs when an index argument is not of integer type, - // negative, or out-of-bounds. - // - // Example: - // var s = [...]int{1,2,3} - // var x = s[5] - // - // Example: - // var s = []int{1,2,3} - // var _ = s[-1] - // - // Example: - // var s = []int{1,2,3} - // var i string - // var _ = s[i] - InvalidIndex - - // SwappedSliceIndices occurs when constant indices in a slice expression - // are decreasing in value. - // - // Example: - // var _ = []int{1,2,3}[2:1] - SwappedSliceIndices - - /* operators > slice */ - - // NonSliceableOperand occurs when a slice operation is applied to a value - // whose type is not sliceable, or is unaddressable. - // - // Example: - // var x = [...]int{1, 2, 3}[:1] - // - // Example: - // var x = 1 - // var y = 1[:1] - NonSliceableOperand - - // InvalidSliceExpr occurs when a three-index slice expression (a[x:y:z]) is - // applied to a string. - // - // Example: - // var s = "hello" - // var x = s[1:2:3] - InvalidSliceExpr - - /* exprs > shift */ - - // InvalidShiftCount occurs when the right-hand side of a shift operation is - // either non-integer, negative, or too large. - // - // Example: - // var ( - // x string - // y int = 1 << x - // ) - InvalidShiftCount - - // InvalidShiftOperand occurs when the shifted operand is not an integer. - // - // Example: - // var s = "hello" - // var x = s << 2 - InvalidShiftOperand - - /* exprs > chan */ - - // InvalidReceive occurs when there is a channel receive from a value that - // is either not a channel, or is a send-only channel. - // - // Example: - // func f() { - // var x = 1 - // <-x - // } - InvalidReceive - - // InvalidSend occurs when there is a channel send to a value that is not a - // channel, or is a receive-only channel. - // - // Example: - // func f() { - // var x = 1 - // x <- "hello!" - // } - InvalidSend - - /* exprs > literal */ - - // DuplicateLitKey occurs when an index is duplicated in a slice, array, or - // map literal. - // - // Example: - // var _ = []int{0:1, 0:2} - // - // Example: - // var _ = map[string]int{"a": 1, "a": 2} - DuplicateLitKey - - // MissingLitKey occurs when a map literal is missing a key expression. - // - // Example: - // var _ = map[string]int{1} - MissingLitKey - - // InvalidLitIndex occurs when the key in a key-value element of a slice or - // array literal is not an integer constant. - // - // Example: - // var i = 0 - // var x = []string{i: "world"} - InvalidLitIndex - - // OversizeArrayLit occurs when an array literal exceeds its length. - // - // Example: - // var _ = [2]int{1,2,3} - OversizeArrayLit - - // MixedStructLit occurs when a struct literal contains a mix of positional - // and named elements. - // - // Example: - // var _ = struct{i, j int}{i: 1, 2} - MixedStructLit - - // InvalidStructLit occurs when a positional struct literal has an incorrect - // number of values. - // - // Example: - // var _ = struct{i, j int}{1,2,3} - InvalidStructLit - - // MissingLitField occurs when a struct literal refers to a field that does - // not exist on the struct type. - // - // Example: - // var _ = struct{i int}{j: 2} - MissingLitField - - // DuplicateLitField occurs when a struct literal contains duplicated - // fields. - // - // Example: - // var _ = struct{i int}{i: 1, i: 2} - DuplicateLitField - - // UnexportedLitField occurs when a positional struct literal implicitly - // assigns an unexported field of an imported type. - UnexportedLitField - - // InvalidLitField occurs when a field name is not a valid identifier. - // - // Example: - // var _ = struct{i int}{1: 1} - InvalidLitField - - // UntypedLit occurs when a composite literal omits a required type - // identifier. - // - // Example: - // type outer struct{ - // inner struct { i int } - // } - // - // var _ = outer{inner: {1}} - UntypedLit - - // InvalidLit occurs when a composite literal expression does not match its - // type. - // - // Example: - // type P *struct{ - // x int - // } - // var _ = P {} - InvalidLit - - /* exprs > selector */ - - // AmbiguousSelector occurs when a selector is ambiguous. - // - // Example: - // type E1 struct { i int } - // type E2 struct { i int } - // type T struct { E1; E2 } - // - // var x T - // var _ = x.i - AmbiguousSelector - - // UndeclaredImportedName occurs when a package-qualified identifier is - // undeclared by the imported package. - // - // Example: - // import "go/types" - // - // var _ = types.NotAnActualIdentifier - UndeclaredImportedName - - // UnexportedName occurs when a selector refers to an unexported identifier - // of an imported package. - // - // Example: - // import "reflect" - // - // type _ reflect.flag - UnexportedName - - // UndeclaredName occurs when an identifier is not declared in the current - // scope. - // - // Example: - // var x T - UndeclaredName - - // MissingFieldOrMethod occurs when a selector references a field or method - // that does not exist. - // - // Example: - // type T struct {} - // - // var x = T{}.f - MissingFieldOrMethod - - /* exprs > ... */ - - // BadDotDotDotSyntax occurs when a "..." occurs in a context where it is - // not valid. - // - // Example: - // var _ = map[int][...]int{0: {}} - BadDotDotDotSyntax - - // NonVariadicDotDotDot occurs when a "..." is used on the final argument to - // a non-variadic function. - // - // Example: - // func printArgs(s []string) { - // for _, a := range s { - // println(a) - // } - // } - // - // func f() { - // s := []string{"a", "b", "c"} - // printArgs(s...) - // } - NonVariadicDotDotDot - - // MisplacedDotDotDot occurs when a "..." is used somewhere other than the - // final argument to a function call. - // - // Example: - // func printArgs(args ...int) { - // for _, a := range args { - // println(a) - // } - // } - // - // func f() { - // a := []int{1,2,3} - // printArgs(0, a...) - // } - MisplacedDotDotDot - - // InvalidDotDotDotOperand occurs when a "..." operator is applied to a - // single-valued operand. - // - // Example: - // func printArgs(args ...int) { - // for _, a := range args { - // println(a) - // } - // } - // - // func f() { - // a := 1 - // printArgs(a...) - // } - // - // Example: - // func args() (int, int) { - // return 1, 2 - // } - // - // func printArgs(args ...int) { - // for _, a := range args { - // println(a) - // } - // } - // - // func g() { - // printArgs(args()...) - // } - InvalidDotDotDotOperand - - // InvalidDotDotDot occurs when a "..." is used in a non-variadic built-in - // function. - // - // Example: - // var s = []int{1, 2, 3} - // var l = len(s...) - InvalidDotDotDot - - /* exprs > built-in */ - - // UncalledBuiltin occurs when a built-in function is used as a - // function-valued expression, instead of being called. - // - // Per the spec: - // "The built-in functions do not have standard Go types, so they can only - // appear in call expressions; they cannot be used as function values." - // - // Example: - // var _ = copy - UncalledBuiltin - - // InvalidAppend occurs when append is called with a first argument that is - // not a slice. - // - // Example: - // var _ = append(1, 2) - InvalidAppend - - // InvalidCap occurs when an argument to the cap built-in function is not of - // supported type. - // - // See https://golang.org/ref/spec#Lengthand_capacity for information on - // which underlying types are supported as arguments to cap and len. - // - // Example: - // var s = 2 - // var x = cap(s) - InvalidCap - - // InvalidClose occurs when close(...) is called with an argument that is - // not of channel type, or that is a receive-only channel. - // - // Example: - // func f() { - // var x int - // close(x) - // } - InvalidClose - - // InvalidCopy occurs when the arguments are not of slice type or do not - // have compatible type. - // - // See https://golang.org/ref/spec#Appendingand_copying_slices for more - // information on the type requirements for the copy built-in. - // - // Example: - // func f() { - // var x []int - // y := []int64{1,2,3} - // copy(x, y) - // } - InvalidCopy - - // InvalidComplex occurs when the complex built-in function is called with - // arguments with incompatible types. - // - // Example: - // var _ = complex(float32(1), float64(2)) - InvalidComplex - - // InvalidDelete occurs when the delete built-in function is called with a - // first argument that is not a map. - // - // Example: - // func f() { - // m := "hello" - // delete(m, "e") - // } - InvalidDelete - - // InvalidImag occurs when the imag built-in function is called with an - // argument that does not have complex type. - // - // Example: - // var _ = imag(int(1)) - InvalidImag - - // InvalidLen occurs when an argument to the len built-in function is not of - // supported type. - // - // See https://golang.org/ref/spec#Lengthand_capacity for information on - // which underlying types are supported as arguments to cap and len. - // - // Example: - // var s = 2 - // var x = len(s) - InvalidLen - - // SwappedMakeArgs occurs when make is called with three arguments, and its - // length argument is larger than its capacity argument. - // - // Example: - // var x = make([]int, 3, 2) - SwappedMakeArgs - - // InvalidMake occurs when make is called with an unsupported type argument. - // - // See https://golang.org/ref/spec#Makingslices_maps_and_channels for - // information on the types that may be created using make. - // - // Example: - // var x = make(int) - InvalidMake - - // InvalidReal occurs when the real built-in function is called with an - // argument that does not have complex type. - // - // Example: - // var _ = real(int(1)) - InvalidReal - - /* exprs > assertion */ - - // InvalidAssert occurs when a type assertion is applied to a - // value that is not of interface type. - // - // Example: - // var x = 1 - // var _ = x.(float64) - InvalidAssert - - // ImpossibleAssert occurs for a type assertion x.(T) when the value x of - // interface cannot have dynamic type T, due to a missing or mismatching - // method on T. - // - // Example: - // type T int - // - // func (t *T) m() int { return int(*t) } - // - // type I interface { m() int } - // - // var x I - // var _ = x.(T) - ImpossibleAssert - - /* exprs > conversion */ - - // InvalidConversion occurs when the argument type cannot be converted to the - // target. - // - // See https://golang.org/ref/spec#Conversions for the rules of - // convertibility. - // - // Example: - // var x float64 - // var _ = string(x) - InvalidConversion - - // InvalidUntypedConversion occurs when an there is no valid implicit - // conversion from an untyped value satisfying the type constraints of the - // context in which it is used. - // - // Example: - // var _ = 1 + "" - InvalidUntypedConversion - - /* offsetof */ - - // BadOffsetofSyntax occurs when unsafe.Offsetof is called with an argument - // that is not a selector expression. - // - // Example: - // import "unsafe" - // - // var x int - // var _ = unsafe.Offsetof(x) - BadOffsetofSyntax - - // InvalidOffsetof occurs when unsafe.Offsetof is called with a method - // selector, rather than a field selector, or when the field is embedded via - // a pointer. - // - // Per the spec: - // - // "If f is an embedded field, it must be reachable without pointer - // indirections through fields of the struct. " - // - // Example: - // import "unsafe" - // - // type T struct { f int } - // type S struct { *T } - // var s S - // var _ = unsafe.Offsetof(s.f) - // - // Example: - // import "unsafe" - // - // type S struct{} - // - // func (S) m() {} - // - // var s S - // var _ = unsafe.Offsetof(s.m) - InvalidOffsetof - - /* control flow > scope */ - - // UnusedExpr occurs when a side-effect free expression is used as a - // statement. Such a statement has no effect. - // - // Example: - // func f(i int) { - // i*i - // } - UnusedExpr - - // UnusedVar occurs when a variable is declared but unused. - // - // Example: - // func f() { - // x := 1 - // } - UnusedVar - - // MissingReturn occurs when a function with results is missing a return - // statement. - // - // Example: - // func f() int {} - MissingReturn - - // WrongResultCount occurs when a return statement returns an incorrect - // number of values. - // - // Example: - // func ReturnOne() int { - // return 1, 2 - // } - WrongResultCount - - // OutOfScopeResult occurs when the name of a value implicitly returned by - // an empty return statement is shadowed in a nested scope. - // - // Example: - // func factor(n int) (i int) { - // for i := 2; i < n; i++ { - // if n%i == 0 { - // return - // } - // } - // return 0 - // } - OutOfScopeResult - - /* control flow > if */ - - // InvalidCond occurs when an if condition is not a boolean expression. - // - // Example: - // func checkReturn(i int) { - // if i { - // panic("non-zero return") - // } - // } - InvalidCond - - /* control flow > for */ - - // InvalidPostDecl occurs when there is a declaration in a for-loop post - // statement. - // - // Example: - // func f() { - // for i := 0; i < 10; j := 0 {} - // } - InvalidPostDecl - - // InvalidChanRange occurs when a send-only channel used in a range - // expression. - // - // Example: - // func sum(c chan<- int) { - // s := 0 - // for i := range c { - // s += i - // } - // } - InvalidChanRange - - // InvalidIterVar occurs when two iteration variables are used while ranging - // over a channel. - // - // Example: - // func f(c chan int) { - // for k, v := range c { - // println(k, v) - // } - // } - InvalidIterVar - - // InvalidRangeExpr occurs when the type of a range expression is not array, - // slice, string, map, or channel. - // - // Example: - // func f(i int) { - // for j := range i { - // println(j) - // } - // } - InvalidRangeExpr - - /* control flow > switch */ - - // MisplacedBreak occurs when a break statement is not within a for, switch, - // or select statement of the innermost function definition. - // - // Example: - // func f() { - // break - // } - MisplacedBreak - - // MisplacedContinue occurs when a continue statement is not within a for - // loop of the innermost function definition. - // - // Example: - // func sumeven(n int) int { - // proceed := func() { - // continue - // } - // sum := 0 - // for i := 1; i <= n; i++ { - // if i % 2 != 0 { - // proceed() - // } - // sum += i - // } - // return sum - // } - MisplacedContinue - - // MisplacedFallthrough occurs when a fallthrough statement is not within an - // expression switch. - // - // Example: - // func typename(i interface{}) string { - // switch i.(type) { - // case int64: - // fallthrough - // case int: - // return "int" - // } - // return "unsupported" - // } - MisplacedFallthrough - - // DuplicateCase occurs when a type or expression switch has duplicate - // cases. - // - // Example: - // func printInt(i int) { - // switch i { - // case 1: - // println("one") - // case 1: - // println("One") - // } - // } - DuplicateCase - - // DuplicateDefault occurs when a type or expression switch has multiple - // default clauses. - // - // Example: - // func printInt(i int) { - // switch i { - // case 1: - // println("one") - // default: - // println("One") - // default: - // println("1") - // } - // } - DuplicateDefault - - // BadTypeKeyword occurs when a .(type) expression is used anywhere other - // than a type switch. - // - // Example: - // type I interface { - // m() - // } - // var t I - // var _ = t.(type) - BadTypeKeyword - - // InvalidTypeSwitch occurs when .(type) is used on an expression that is - // not of interface type. - // - // Example: - // func f(i int) { - // switch x := i.(type) {} - // } - InvalidTypeSwitch - - // InvalidExprSwitch occurs when a switch expression is not comparable. - // - // Example: - // func _() { - // var a struct{ _ func() } - // switch a /* ERROR cannot switch on a */ { - // } - // } - InvalidExprSwitch - - /* control flow > select */ - - // InvalidSelectCase occurs when a select case is not a channel send or - // receive. - // - // Example: - // func checkChan(c <-chan int) bool { - // select { - // case c: - // return true - // default: - // return false - // } - // } - InvalidSelectCase - - /* control flow > labels and jumps */ - - // UndeclaredLabel occurs when an undeclared label is jumped to. - // - // Example: - // func f() { - // goto L - // } - UndeclaredLabel - - // DuplicateLabel occurs when a label is declared more than once. - // - // Example: - // func f() int { - // L: - // L: - // return 1 - // } - DuplicateLabel - - // MisplacedLabel occurs when a break or continue label is not on a for, - // switch, or select statement. - // - // Example: - // func f() { - // L: - // a := []int{1,2,3} - // for _, e := range a { - // if e > 10 { - // break L - // } - // println(a) - // } - // } - MisplacedLabel - - // UnusedLabel occurs when a label is declared but not used. - // - // Example: - // func f() { - // L: - // } - UnusedLabel - - // JumpOverDecl occurs when a label jumps over a variable declaration. - // - // Example: - // func f() int { - // goto L - // x := 2 - // L: - // x++ - // return x - // } - JumpOverDecl - - // JumpIntoBlock occurs when a forward jump goes to a label inside a nested - // block. - // - // Example: - // func f(x int) { - // goto L - // if x > 0 { - // L: - // print("inside block") - // } - // } - JumpIntoBlock - - /* control flow > calls */ - - // InvalidMethodExpr occurs when a pointer method is called but the argument - // is not addressable. - // - // Example: - // type T struct {} - // - // func (*T) m() int { return 1 } - // - // var _ = T.m(T{}) - InvalidMethodExpr - - // WrongArgCount occurs when too few or too many arguments are passed by a - // function call. - // - // Example: - // func f(i int) {} - // var x = f() - WrongArgCount - - // InvalidCall occurs when an expression is called that is not of function - // type. - // - // Example: - // var x = "x" - // var y = x() - InvalidCall - - /* control flow > suspended */ - - // UnusedResults occurs when a restricted expression-only built-in function - // is suspended via go or defer. Such a suspension discards the results of - // these side-effect free built-in functions, and therefore is ineffectual. - // - // Example: - // func f(a []int) int { - // defer len(a) - // return i - // } - UnusedResults - - // InvalidDefer occurs when a deferred expression is not a function call, - // for example if the expression is a type conversion. - // - // Example: - // func f(i int) int { - // defer int32(i) - // return i - // } - InvalidDefer - - // InvalidGo occurs when a go expression is not a function call, for example - // if the expression is a type conversion. - // - // Example: - // func f(i int) int { - // go int32(i) - // return i - // } - InvalidGo - - // All codes below were added in Go 1.17. - - /* decl */ - - // BadDecl occurs when a declaration has invalid syntax. - BadDecl - - // RepeatedDecl occurs when an identifier occurs more than once on the left - // hand side of a short variable declaration. - // - // Example: - // func _() { - // x, y, y := 1, 2, 3 - // } - RepeatedDecl - - /* unsafe */ - - // InvalidUnsafeAdd occurs when unsafe.Add is called with a - // length argument that is not of integer type. - // - // Example: - // import "unsafe" - // - // var p unsafe.Pointer - // var _ = unsafe.Add(p, float64(1)) - InvalidUnsafeAdd - - // InvalidUnsafeSlice occurs when unsafe.Slice is called with a - // pointer argument that is not of pointer type or a length argument - // that is not of integer type, negative, or out of bounds. - // - // Example: - // import "unsafe" - // - // var x int - // var _ = unsafe.Slice(x, 1) - // - // Example: - // import "unsafe" - // - // var x int - // var _ = unsafe.Slice(&x, float64(1)) - // - // Example: - // import "unsafe" - // - // var x int - // var _ = unsafe.Slice(&x, -1) - // - // Example: - // import "unsafe" - // - // var x int - // var _ = unsafe.Slice(&x, uint64(1) << 63) - InvalidUnsafeSlice - - // All codes below were added in Go 1.18. - - /* features */ - - // UnsupportedFeature occurs when a language feature is used that is not - // supported at this Go version. - UnsupportedFeature - - /* type params */ - - // NotAGenericType occurs when a non-generic type is used where a generic - // type is expected: in type or function instantiation. - // - // Example: - // type T int - // - // var _ T[int] - NotAGenericType - - // WrongTypeArgCount occurs when a type or function is instantiated with an - // incorrent number of type arguments, including when a generic type or - // function is used without instantiation. - // - // Errors inolving failed type inference are assigned other error codes. - // - // Example: - // type T[p any] int - // - // var _ T[int, string] - // - // Example: - // func f[T any]() {} - // - // var x = f - WrongTypeArgCount - - // CannotInferTypeArgs occurs when type or function type argument inference - // fails to infer all type arguments. - // - // Example: - // func f[T any]() {} - // - // func _() { - // f() - // } - // - // Example: - // type N[P, Q any] struct{} - // - // var _ N[int] - CannotInferTypeArgs - - // InvalidTypeArg occurs when a type argument does not satisfy its - // corresponding type parameter constraints. - // - // Example: - // type T[P ~int] struct{} - // - // var _ T[string] - InvalidTypeArg // arguments? InferenceFailed - - // InvalidInstanceCycle occurs when an invalid cycle is detected - // within the instantiation graph. - // - // Example: - // func f[T any]() { f[*T]() } - InvalidInstanceCycle - - // InvalidUnion occurs when an embedded union or approximation element is - // not valid. - // - // Example: - // type _ interface { - // ~int | interface{ m() } - // } - InvalidUnion - - // MisplacedConstraintIface occurs when a constraint-type interface is used - // outside of constraint position. - // - // Example: - // type I interface { ~int } - // - // var _ I - MisplacedConstraintIface - - // InvalidMethodTypeParams occurs when methods have type parameters. - // - // It cannot be encountered with an AST parsed using go/parser. - InvalidMethodTypeParams - - // MisplacedTypeParam occurs when a type parameter is used in a place where - // it is not permitted. - // - // Example: - // type T[P any] P - // - // Example: - // type T[P any] struct{ *P } - MisplacedTypeParam - - // InvalidUnsafeSliceData occurs when unsafe.SliceData is called with - // an argument that is not of slice type. It also occurs if it is used - // in a package compiled for a language version before go1.20. - // - // Example: - // import "unsafe" - // - // var x int - // var _ = unsafe.SliceData(x) - InvalidUnsafeSliceData - - // InvalidUnsafeString occurs when unsafe.String is called with - // a length argument that is not of integer type, negative, or - // out of bounds. It also occurs if it is used in a package - // compiled for a language version before go1.20. - // - // Example: - // import "unsafe" - // - // var b [10]byte - // var _ = unsafe.String(&b[0], -1) - InvalidUnsafeString - - // InvalidUnsafeStringData occurs if it is used in a package - // compiled for a language version before go1.20. - _ // not used anymore - -) diff --git a/go/extractor/vendor/golang.org/x/tools/internal/typesinternal/errorcode_string.go b/go/extractor/vendor/golang.org/x/tools/internal/typesinternal/errorcode_string.go deleted file mode 100644 index 15ecf7c5ded9..000000000000 --- a/go/extractor/vendor/golang.org/x/tools/internal/typesinternal/errorcode_string.go +++ /dev/null @@ -1,179 +0,0 @@ -// Code generated by "stringer -type=ErrorCode"; DO NOT EDIT. - -package typesinternal - -import "strconv" - -func _() { - // An "invalid array index" compiler error signifies that the constant values have changed. - // Re-run the stringer command to generate them again. - var x [1]struct{} - _ = x[InvalidSyntaxTree - -1] - _ = x[Test-1] - _ = x[BlankPkgName-2] - _ = x[MismatchedPkgName-3] - _ = x[InvalidPkgUse-4] - _ = x[BadImportPath-5] - _ = x[BrokenImport-6] - _ = x[ImportCRenamed-7] - _ = x[UnusedImport-8] - _ = x[InvalidInitCycle-9] - _ = x[DuplicateDecl-10] - _ = x[InvalidDeclCycle-11] - _ = x[InvalidTypeCycle-12] - _ = x[InvalidConstInit-13] - _ = x[InvalidConstVal-14] - _ = x[InvalidConstType-15] - _ = x[UntypedNilUse-16] - _ = x[WrongAssignCount-17] - _ = x[UnassignableOperand-18] - _ = x[NoNewVar-19] - _ = x[MultiValAssignOp-20] - _ = x[InvalidIfaceAssign-21] - _ = x[InvalidChanAssign-22] - _ = x[IncompatibleAssign-23] - _ = x[UnaddressableFieldAssign-24] - _ = x[NotAType-25] - _ = x[InvalidArrayLen-26] - _ = x[BlankIfaceMethod-27] - _ = x[IncomparableMapKey-28] - _ = x[InvalidIfaceEmbed-29] - _ = x[InvalidPtrEmbed-30] - _ = x[BadRecv-31] - _ = x[InvalidRecv-32] - _ = x[DuplicateFieldAndMethod-33] - _ = x[DuplicateMethod-34] - _ = x[InvalidBlank-35] - _ = x[InvalidIota-36] - _ = x[MissingInitBody-37] - _ = x[InvalidInitSig-38] - _ = x[InvalidInitDecl-39] - _ = x[InvalidMainDecl-40] - _ = x[TooManyValues-41] - _ = x[NotAnExpr-42] - _ = x[TruncatedFloat-43] - _ = x[NumericOverflow-44] - _ = x[UndefinedOp-45] - _ = x[MismatchedTypes-46] - _ = x[DivByZero-47] - _ = x[NonNumericIncDec-48] - _ = x[UnaddressableOperand-49] - _ = x[InvalidIndirection-50] - _ = x[NonIndexableOperand-51] - _ = x[InvalidIndex-52] - _ = x[SwappedSliceIndices-53] - _ = x[NonSliceableOperand-54] - _ = x[InvalidSliceExpr-55] - _ = x[InvalidShiftCount-56] - _ = x[InvalidShiftOperand-57] - _ = x[InvalidReceive-58] - _ = x[InvalidSend-59] - _ = x[DuplicateLitKey-60] - _ = x[MissingLitKey-61] - _ = x[InvalidLitIndex-62] - _ = x[OversizeArrayLit-63] - _ = x[MixedStructLit-64] - _ = x[InvalidStructLit-65] - _ = x[MissingLitField-66] - _ = x[DuplicateLitField-67] - _ = x[UnexportedLitField-68] - _ = x[InvalidLitField-69] - _ = x[UntypedLit-70] - _ = x[InvalidLit-71] - _ = x[AmbiguousSelector-72] - _ = x[UndeclaredImportedName-73] - _ = x[UnexportedName-74] - _ = x[UndeclaredName-75] - _ = x[MissingFieldOrMethod-76] - _ = x[BadDotDotDotSyntax-77] - _ = x[NonVariadicDotDotDot-78] - _ = x[MisplacedDotDotDot-79] - _ = x[InvalidDotDotDotOperand-80] - _ = x[InvalidDotDotDot-81] - _ = x[UncalledBuiltin-82] - _ = x[InvalidAppend-83] - _ = x[InvalidCap-84] - _ = x[InvalidClose-85] - _ = x[InvalidCopy-86] - _ = x[InvalidComplex-87] - _ = x[InvalidDelete-88] - _ = x[InvalidImag-89] - _ = x[InvalidLen-90] - _ = x[SwappedMakeArgs-91] - _ = x[InvalidMake-92] - _ = x[InvalidReal-93] - _ = x[InvalidAssert-94] - _ = x[ImpossibleAssert-95] - _ = x[InvalidConversion-96] - _ = x[InvalidUntypedConversion-97] - _ = x[BadOffsetofSyntax-98] - _ = x[InvalidOffsetof-99] - _ = x[UnusedExpr-100] - _ = x[UnusedVar-101] - _ = x[MissingReturn-102] - _ = x[WrongResultCount-103] - _ = x[OutOfScopeResult-104] - _ = x[InvalidCond-105] - _ = x[InvalidPostDecl-106] - _ = x[InvalidChanRange-107] - _ = x[InvalidIterVar-108] - _ = x[InvalidRangeExpr-109] - _ = x[MisplacedBreak-110] - _ = x[MisplacedContinue-111] - _ = x[MisplacedFallthrough-112] - _ = x[DuplicateCase-113] - _ = x[DuplicateDefault-114] - _ = x[BadTypeKeyword-115] - _ = x[InvalidTypeSwitch-116] - _ = x[InvalidExprSwitch-117] - _ = x[InvalidSelectCase-118] - _ = x[UndeclaredLabel-119] - _ = x[DuplicateLabel-120] - _ = x[MisplacedLabel-121] - _ = x[UnusedLabel-122] - _ = x[JumpOverDecl-123] - _ = x[JumpIntoBlock-124] - _ = x[InvalidMethodExpr-125] - _ = x[WrongArgCount-126] - _ = x[InvalidCall-127] - _ = x[UnusedResults-128] - _ = x[InvalidDefer-129] - _ = x[InvalidGo-130] - _ = x[BadDecl-131] - _ = x[RepeatedDecl-132] - _ = x[InvalidUnsafeAdd-133] - _ = x[InvalidUnsafeSlice-134] - _ = x[UnsupportedFeature-135] - _ = x[NotAGenericType-136] - _ = x[WrongTypeArgCount-137] - _ = x[CannotInferTypeArgs-138] - _ = x[InvalidTypeArg-139] - _ = x[InvalidInstanceCycle-140] - _ = x[InvalidUnion-141] - _ = x[MisplacedConstraintIface-142] - _ = x[InvalidMethodTypeParams-143] - _ = x[MisplacedTypeParam-144] - _ = x[InvalidUnsafeSliceData-145] - _ = x[InvalidUnsafeString-146] -} - -const ( - _ErrorCode_name_0 = "InvalidSyntaxTree" - _ErrorCode_name_1 = "TestBlankPkgNameMismatchedPkgNameInvalidPkgUseBadImportPathBrokenImportImportCRenamedUnusedImportInvalidInitCycleDuplicateDeclInvalidDeclCycleInvalidTypeCycleInvalidConstInitInvalidConstValInvalidConstTypeUntypedNilUseWrongAssignCountUnassignableOperandNoNewVarMultiValAssignOpInvalidIfaceAssignInvalidChanAssignIncompatibleAssignUnaddressableFieldAssignNotATypeInvalidArrayLenBlankIfaceMethodIncomparableMapKeyInvalidIfaceEmbedInvalidPtrEmbedBadRecvInvalidRecvDuplicateFieldAndMethodDuplicateMethodInvalidBlankInvalidIotaMissingInitBodyInvalidInitSigInvalidInitDeclInvalidMainDeclTooManyValuesNotAnExprTruncatedFloatNumericOverflowUndefinedOpMismatchedTypesDivByZeroNonNumericIncDecUnaddressableOperandInvalidIndirectionNonIndexableOperandInvalidIndexSwappedSliceIndicesNonSliceableOperandInvalidSliceExprInvalidShiftCountInvalidShiftOperandInvalidReceiveInvalidSendDuplicateLitKeyMissingLitKeyInvalidLitIndexOversizeArrayLitMixedStructLitInvalidStructLitMissingLitFieldDuplicateLitFieldUnexportedLitFieldInvalidLitFieldUntypedLitInvalidLitAmbiguousSelectorUndeclaredImportedNameUnexportedNameUndeclaredNameMissingFieldOrMethodBadDotDotDotSyntaxNonVariadicDotDotDotMisplacedDotDotDotInvalidDotDotDotOperandInvalidDotDotDotUncalledBuiltinInvalidAppendInvalidCapInvalidCloseInvalidCopyInvalidComplexInvalidDeleteInvalidImagInvalidLenSwappedMakeArgsInvalidMakeInvalidRealInvalidAssertImpossibleAssertInvalidConversionInvalidUntypedConversionBadOffsetofSyntaxInvalidOffsetofUnusedExprUnusedVarMissingReturnWrongResultCountOutOfScopeResultInvalidCondInvalidPostDeclInvalidChanRangeInvalidIterVarInvalidRangeExprMisplacedBreakMisplacedContinueMisplacedFallthroughDuplicateCaseDuplicateDefaultBadTypeKeywordInvalidTypeSwitchInvalidExprSwitchInvalidSelectCaseUndeclaredLabelDuplicateLabelMisplacedLabelUnusedLabelJumpOverDeclJumpIntoBlockInvalidMethodExprWrongArgCountInvalidCallUnusedResultsInvalidDeferInvalidGoBadDeclRepeatedDeclInvalidUnsafeAddInvalidUnsafeSliceUnsupportedFeatureNotAGenericTypeWrongTypeArgCountCannotInferTypeArgsInvalidTypeArgInvalidInstanceCycleInvalidUnionMisplacedConstraintIfaceInvalidMethodTypeParamsMisplacedTypeParamInvalidUnsafeSliceDataInvalidUnsafeString" -) - -var ( - _ErrorCode_index_1 = [...]uint16{0, 4, 16, 33, 46, 59, 71, 85, 97, 113, 126, 142, 158, 174, 189, 205, 218, 234, 253, 261, 277, 295, 312, 330, 354, 362, 377, 393, 411, 428, 443, 450, 461, 484, 499, 511, 522, 537, 551, 566, 581, 594, 603, 617, 632, 643, 658, 667, 683, 703, 721, 740, 752, 771, 790, 806, 823, 842, 856, 867, 882, 895, 910, 926, 940, 956, 971, 988, 1006, 1021, 1031, 1041, 1058, 1080, 1094, 1108, 1128, 1146, 1166, 1184, 1207, 1223, 1238, 1251, 1261, 1273, 1284, 1298, 1311, 1322, 1332, 1347, 1358, 1369, 1382, 1398, 1415, 1439, 1456, 1471, 1481, 1490, 1503, 1519, 1535, 1546, 1561, 1577, 1591, 1607, 1621, 1638, 1658, 1671, 1687, 1701, 1718, 1735, 1752, 1767, 1781, 1795, 1806, 1818, 1831, 1848, 1861, 1872, 1885, 1897, 1906, 1913, 1925, 1941, 1959, 1977, 1992, 2009, 2028, 2042, 2062, 2074, 2098, 2121, 2139, 2161, 2180} -) - -func (i ErrorCode) String() string { - switch { - case i == -1: - return _ErrorCode_name_0 - case 1 <= i && i <= 146: - i -= 1 - return _ErrorCode_name_1[_ErrorCode_index_1[i]:_ErrorCode_index_1[i+1]] - default: - return "ErrorCode(" + strconv.FormatInt(int64(i), 10) + ")" - } -} diff --git a/go/extractor/vendor/golang.org/x/tools/internal/typesinternal/types.go b/go/extractor/vendor/golang.org/x/tools/internal/typesinternal/types.go deleted file mode 100644 index ce7d4351b220..000000000000 --- a/go/extractor/vendor/golang.org/x/tools/internal/typesinternal/types.go +++ /dev/null @@ -1,52 +0,0 @@ -// Copyright 2020 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Package typesinternal provides access to internal go/types APIs that are not -// yet exported. -package typesinternal - -import ( - "go/token" - "go/types" - "reflect" - "unsafe" -) - -func SetUsesCgo(conf *types.Config) bool { - v := reflect.ValueOf(conf).Elem() - - f := v.FieldByName("go115UsesCgo") - if !f.IsValid() { - f = v.FieldByName("UsesCgo") - if !f.IsValid() { - return false - } - } - - addr := unsafe.Pointer(f.UnsafeAddr()) - *(*bool)(addr) = true - - return true -} - -// ReadGo116ErrorData extracts additional information from types.Error values -// generated by Go version 1.16 and later: the error code, start position, and -// end position. If all positions are valid, start <= err.Pos <= end. -// -// If the data could not be read, the final result parameter will be false. -func ReadGo116ErrorData(err types.Error) (code ErrorCode, start, end token.Pos, ok bool) { - var data [3]int - // By coincidence all of these fields are ints, which simplifies things. - v := reflect.ValueOf(err) - for i, name := range []string{"go116code", "go116start", "go116end"} { - f := v.FieldByName(name) - if !f.IsValid() { - return 0, 0, 0, false - } - data[i] = int(f.Int()) - } - return ErrorCode(data[0]), token.Pos(data[1]), token.Pos(data[2]), true -} - -var SetGoVersion = func(conf *types.Config, version string) bool { return false } diff --git a/go/extractor/vendor/golang.org/x/tools/internal/typesinternal/types_118.go b/go/extractor/vendor/golang.org/x/tools/internal/typesinternal/types_118.go deleted file mode 100644 index a42b072a67d3..000000000000 --- a/go/extractor/vendor/golang.org/x/tools/internal/typesinternal/types_118.go +++ /dev/null @@ -1,19 +0,0 @@ -// Copyright 2021 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -//go:build go1.18 -// +build go1.18 - -package typesinternal - -import ( - "go/types" -) - -func init() { - SetGoVersion = func(conf *types.Config, version string) bool { - conf.GoVersion = version - return true - } -} diff --git a/go/extractor/vendor/golang.org/x/tools/internal/versions/BUILD.bazel b/go/extractor/vendor/golang.org/x/tools/internal/versions/BUILD.bazel deleted file mode 100644 index 85d428debf54..000000000000 --- a/go/extractor/vendor/golang.org/x/tools/internal/versions/BUILD.bazel +++ /dev/null @@ -1,17 +0,0 @@ -# generated running `bazel run //go/gazelle`, do not edit - -load("@rules_go//go:def.bzl", "go_library") - -go_library( - name = "versions", - srcs = [ - "gover.go", - "types.go", - "types_go121.go", - "types_go122.go", - "versions.go", - ], - importmap = "github.com/github/codeql-go/extractor/vendor/golang.org/x/tools/internal/versions", - importpath = "golang.org/x/tools/internal/versions", - visibility = ["//go/extractor/vendor/golang.org/x/tools:__subpackages__"], -) diff --git a/go/extractor/vendor/golang.org/x/tools/internal/versions/gover.go b/go/extractor/vendor/golang.org/x/tools/internal/versions/gover.go deleted file mode 100644 index bbabcd22e948..000000000000 --- a/go/extractor/vendor/golang.org/x/tools/internal/versions/gover.go +++ /dev/null @@ -1,172 +0,0 @@ -// Copyright 2023 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// This is a fork of internal/gover for use by x/tools until -// go1.21 and earlier are no longer supported by x/tools. - -package versions - -import "strings" - -// A gover is a parsed Go gover: major[.Minor[.Patch]][kind[pre]] -// The numbers are the original decimal strings to avoid integer overflows -// and since there is very little actual math. (Probably overflow doesn't matter in practice, -// but at the time this code was written, there was an existing test that used -// go1.99999999999, which does not fit in an int on 32-bit platforms. -// The "big decimal" representation avoids the problem entirely.) -type gover struct { - major string // decimal - minor string // decimal or "" - patch string // decimal or "" - kind string // "", "alpha", "beta", "rc" - pre string // decimal or "" -} - -// compare returns -1, 0, or +1 depending on whether -// x < y, x == y, or x > y, interpreted as toolchain versions. -// The versions x and y must not begin with a "go" prefix: just "1.21" not "go1.21". -// Malformed versions compare less than well-formed versions and equal to each other. -// The language version "1.21" compares less than the release candidate and eventual releases "1.21rc1" and "1.21.0". -func compare(x, y string) int { - vx := parse(x) - vy := parse(y) - - if c := cmpInt(vx.major, vy.major); c != 0 { - return c - } - if c := cmpInt(vx.minor, vy.minor); c != 0 { - return c - } - if c := cmpInt(vx.patch, vy.patch); c != 0 { - return c - } - if c := strings.Compare(vx.kind, vy.kind); c != 0 { // "" < alpha < beta < rc - return c - } - if c := cmpInt(vx.pre, vy.pre); c != 0 { - return c - } - return 0 -} - -// lang returns the Go language version. For example, lang("1.2.3") == "1.2". -func lang(x string) string { - v := parse(x) - if v.minor == "" || v.major == "1" && v.minor == "0" { - return v.major - } - return v.major + "." + v.minor -} - -// isValid reports whether the version x is valid. -func isValid(x string) bool { - return parse(x) != gover{} -} - -// parse parses the Go version string x into a version. -// It returns the zero version if x is malformed. -func parse(x string) gover { - var v gover - - // Parse major version. - var ok bool - v.major, x, ok = cutInt(x) - if !ok { - return gover{} - } - if x == "" { - // Interpret "1" as "1.0.0". - v.minor = "0" - v.patch = "0" - return v - } - - // Parse . before minor version. - if x[0] != '.' { - return gover{} - } - - // Parse minor version. - v.minor, x, ok = cutInt(x[1:]) - if !ok { - return gover{} - } - if x == "" { - // Patch missing is same as "0" for older versions. - // Starting in Go 1.21, patch missing is different from explicit .0. - if cmpInt(v.minor, "21") < 0 { - v.patch = "0" - } - return v - } - - // Parse patch if present. - if x[0] == '.' { - v.patch, x, ok = cutInt(x[1:]) - if !ok || x != "" { - // Note that we are disallowing prereleases (alpha, beta, rc) for patch releases here (x != ""). - // Allowing them would be a bit confusing because we already have: - // 1.21 < 1.21rc1 - // But a prerelease of a patch would have the opposite effect: - // 1.21.3rc1 < 1.21.3 - // We've never needed them before, so let's not start now. - return gover{} - } - return v - } - - // Parse prerelease. - i := 0 - for i < len(x) && (x[i] < '0' || '9' < x[i]) { - if x[i] < 'a' || 'z' < x[i] { - return gover{} - } - i++ - } - if i == 0 { - return gover{} - } - v.kind, x = x[:i], x[i:] - if x == "" { - return v - } - v.pre, x, ok = cutInt(x) - if !ok || x != "" { - return gover{} - } - - return v -} - -// cutInt scans the leading decimal number at the start of x to an integer -// and returns that value and the rest of the string. -func cutInt(x string) (n, rest string, ok bool) { - i := 0 - for i < len(x) && '0' <= x[i] && x[i] <= '9' { - i++ - } - if i == 0 || x[0] == '0' && i != 1 { // no digits or unnecessary leading zero - return "", "", false - } - return x[:i], x[i:], true -} - -// cmpInt returns cmp.Compare(x, y) interpreting x and y as decimal numbers. -// (Copied from golang.org/x/mod/semver's compareInt.) -func cmpInt(x, y string) int { - if x == y { - return 0 - } - if len(x) < len(y) { - return -1 - } - if len(x) > len(y) { - return +1 - } - if x < y { - return -1 - } else { - return +1 - } -} diff --git a/go/extractor/vendor/golang.org/x/tools/internal/versions/types.go b/go/extractor/vendor/golang.org/x/tools/internal/versions/types.go deleted file mode 100644 index 562eef21fa20..000000000000 --- a/go/extractor/vendor/golang.org/x/tools/internal/versions/types.go +++ /dev/null @@ -1,19 +0,0 @@ -// Copyright 2023 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package versions - -import ( - "go/types" -) - -// GoVersion returns the Go version of the type package. -// It returns zero if no version can be determined. -func GoVersion(pkg *types.Package) string { - // TODO(taking): x/tools can call GoVersion() [from 1.21] after 1.25. - if pkg, ok := any(pkg).(interface{ GoVersion() string }); ok { - return pkg.GoVersion() - } - return "" -} diff --git a/go/extractor/vendor/golang.org/x/tools/internal/versions/types_go121.go b/go/extractor/vendor/golang.org/x/tools/internal/versions/types_go121.go deleted file mode 100644 index a7b79207aeeb..000000000000 --- a/go/extractor/vendor/golang.org/x/tools/internal/versions/types_go121.go +++ /dev/null @@ -1,20 +0,0 @@ -// Copyright 2023 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -//go:build !go1.22 -// +build !go1.22 - -package versions - -import ( - "go/ast" - "go/types" -) - -// FileVersions always reports the a file's Go version as the -// zero version at this Go version. -func FileVersions(info *types.Info, file *ast.File) string { return "" } - -// InitFileVersions is a noop at this Go version. -func InitFileVersions(*types.Info) {} diff --git a/go/extractor/vendor/golang.org/x/tools/internal/versions/types_go122.go b/go/extractor/vendor/golang.org/x/tools/internal/versions/types_go122.go deleted file mode 100644 index 7b9ba89a8220..000000000000 --- a/go/extractor/vendor/golang.org/x/tools/internal/versions/types_go122.go +++ /dev/null @@ -1,24 +0,0 @@ -// Copyright 2023 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -//go:build go1.22 -// +build go1.22 - -package versions - -import ( - "go/ast" - "go/types" -) - -// FileVersions maps a file to the file's semantic Go version. -// The reported version is the zero version if a version cannot be determined. -func FileVersions(info *types.Info, file *ast.File) string { - return info.FileVersions[file] -} - -// InitFileVersions initializes info to record Go versions for Go files. -func InitFileVersions(info *types.Info) { - info.FileVersions = make(map[*ast.File]string) -} diff --git a/go/extractor/vendor/golang.org/x/tools/internal/versions/versions.go b/go/extractor/vendor/golang.org/x/tools/internal/versions/versions.go deleted file mode 100644 index e16f6c33a523..000000000000 --- a/go/extractor/vendor/golang.org/x/tools/internal/versions/versions.go +++ /dev/null @@ -1,52 +0,0 @@ -// Copyright 2023 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package versions - -// Note: If we use build tags to use go/versions when go >=1.22, -// we run into go.dev/issue/53737. Under some operations users would see an -// import of "go/versions" even if they would not compile the file. -// For example, during `go get -u ./...` (go.dev/issue/64490) we do not try to include -// For this reason, this library just a clone of go/versions for the moment. - -// Lang returns the Go language version for version x. -// If x is not a valid version, Lang returns the empty string. -// For example: -// -// Lang("go1.21rc2") = "go1.21" -// Lang("go1.21.2") = "go1.21" -// Lang("go1.21") = "go1.21" -// Lang("go1") = "go1" -// Lang("bad") = "" -// Lang("1.21") = "" -func Lang(x string) string { - v := lang(stripGo(x)) - if v == "" { - return "" - } - return x[:2+len(v)] // "go"+v without allocation -} - -// Compare returns -1, 0, or +1 depending on whether -// x < y, x == y, or x > y, interpreted as Go versions. -// The versions x and y must begin with a "go" prefix: "go1.21" not "1.21". -// Invalid versions, including the empty string, compare less than -// valid versions and equal to each other. -// The language version "go1.21" compares less than the -// release candidate and eventual releases "go1.21rc1" and "go1.21.0". -// Custom toolchain suffixes are ignored during comparison: -// "go1.21.0" and "go1.21.0-bigcorp" are equal. -func Compare(x, y string) int { return compare(stripGo(x), stripGo(y)) } - -// IsValid reports whether the version x is valid. -func IsValid(x string) bool { return isValid(stripGo(x)) } - -// stripGo converts from a "go1.21" version to a "1.21" version. -// If v does not start with "go", stripGo returns the empty string (a known invalid version). -func stripGo(v string) string { - if len(v) < 2 || v[:2] != "go" { - return "" - } - return v[2:] -} diff --git a/go/extractor/vendor/modules.txt b/go/extractor/vendor/modules.txt deleted file mode 100644 index 5687615f62c3..000000000000 --- a/go/extractor/vendor/modules.txt +++ /dev/null @@ -1,25 +0,0 @@ -# golang.org/x/mod v0.15.0 -## explicit; go 1.18 -golang.org/x/mod/internal/lazyregexp -golang.org/x/mod/modfile -golang.org/x/mod/module -golang.org/x/mod/semver -# golang.org/x/tools v0.18.0 -## explicit; go 1.18 -golang.org/x/tools/go/gcexportdata -golang.org/x/tools/go/internal/packagesdriver -golang.org/x/tools/go/packages -golang.org/x/tools/go/types/objectpath -golang.org/x/tools/internal/event -golang.org/x/tools/internal/event/core -golang.org/x/tools/internal/event/keys -golang.org/x/tools/internal/event/label -golang.org/x/tools/internal/event/tag -golang.org/x/tools/internal/gcimporter -golang.org/x/tools/internal/gocommand -golang.org/x/tools/internal/packagesinternal -golang.org/x/tools/internal/pkgbits -golang.org/x/tools/internal/tokeninternal -golang.org/x/tools/internal/typeparams -golang.org/x/tools/internal/typesinternal -golang.org/x/tools/internal/versions diff --git a/go/gen.py b/go/gen.py index e04dea0b5e98..eb74db7bddd9 100644 --- a/go/gen.py +++ b/go/gen.py @@ -41,15 +41,8 @@ def options(): go, gazelle, go_gen_dbscheme = map(r.Rlocation, opts.executables) -if opts.force: - print("clearing vendor directory") - shutil.rmtree(go_extractor_dir / "vendor") - existing_build_files = set(go_extractor_dir.glob("*/**/BUILD.bazel")) -print("updating vendor directory") -subprocess.check_call([go, "-C", go_extractor_dir, "mod", "vendor"]) - if opts.force: print("clearing generated BUILD files") for build_file in existing_build_files: @@ -64,8 +57,6 @@ def options(): if not opts.force: # otherwise, subtract the files that existed at the start build_files_to_update -= existing_build_files - # but bring back the `vendor` ones, as the vendor update step always clears them - build_files_to_update.update(go_extractor_dir.glob("vendor/**/BUILD.bazel")) print("adding header to newly generated BUILD files") for build_file in build_files_to_update: